Actual source code: ex1.c

petsc-master 2017-11-16
Report Typos and Errors
  1: static const char help[] = "Test star forest communication (PetscSF)\n\n";

  3: /*T
  4:     Description: A star forest is a simple tree with one root and zero or more leaves.
  5:     Many common communication patterns can be expressed as updates of rootdata using leafdata and vice-versa.
  6:     This example creates a star forest, communicates values using the graph (see options for types of communication), views the graph, then destroys it.
  7: T*/

  9: /*
 10:   Include petscsf.h so we can use PetscSF objects. Note that this automatically
 11:   includes petscsys.h.
 12: */
 13:  #include <petscsf.h>
 14:  #include <petscviewer.h>

 16: int main(int argc,char **argv)
 17: {
 19:   PetscInt       i,nroots,nrootsalloc,nleaves,nleavesalloc,*mine,stride;
 20:   PetscSFNode    *remote;
 21:   PetscMPIInt    rank,size;
 22:   PetscSF        sf;
 23:   PetscBool      test_bcast,test_reduce,test_degree,test_fetchandop,test_gather,test_scatter,test_embed,test_invert,test_sf_distribute;
 24:   MPI_Op         mop=MPI_OP_NULL; /* initialize to prevent compiler warnings with cxx_quad build */
 25:   char           opstring[256];
 26:   PetscBool      strflg;

 28:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 29:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 30:   MPI_Comm_size(PETSC_COMM_WORLD,&size);

 32:   PetscOptionsBegin(PETSC_COMM_WORLD,"","PetscSF Test Options","none");
 33:   test_bcast      = PETSC_FALSE;
 34:   PetscOptionsBool("-test_bcast","Test broadcast","",test_bcast,&test_bcast,NULL);
 35:   test_reduce     = PETSC_FALSE;
 36:   PetscOptionsBool("-test_reduce","Test reduction","",test_reduce,&test_reduce,NULL);
 37:   mop             = MPI_SUM;
 38:   PetscStrcpy(opstring,"sum");
 39:   PetscOptionsString("-test_op","Designate which MPI_Op to use","",opstring,opstring,256,NULL);
 40:   PetscStrcmp("sum",opstring,&strflg);
 41:   if (strflg) {
 42:     mop = MPIU_SUM;
 43:   }
 44:   PetscStrcmp("prod",opstring,&strflg);
 45:   if (strflg) {
 46:     mop = MPI_PROD;
 47:   }
 48:   PetscStrcmp("max",opstring,&strflg);
 49:   if (strflg) {
 50:     mop = MPI_MAX;
 51:   }
 52:   PetscStrcmp("min",opstring,&strflg);
 53:   if (strflg) {
 54:     mop = MPI_MIN;
 55:   }
 56:   PetscStrcmp("land",opstring,&strflg);
 57:   if (strflg) {
 58:     mop = MPI_LAND;
 59:   }
 60:   PetscStrcmp("band",opstring,&strflg);
 61:   if (strflg) {
 62:     mop = MPI_BAND;
 63:   }
 64:   PetscStrcmp("lor",opstring,&strflg);
 65:   if (strflg) {
 66:     mop = MPI_LOR;
 67:   }
 68:   PetscStrcmp("bor",opstring,&strflg);
 69:   if (strflg) {
 70:     mop = MPI_BOR;
 71:   }
 72:   PetscStrcmp("lxor",opstring,&strflg);
 73:   if (strflg) {
 74:     mop = MPI_LXOR;
 75:   }
 76:   PetscStrcmp("bxor",opstring,&strflg);
 77:   if (strflg) {
 78:     mop = MPI_BXOR;
 79:   }
 80:   test_degree     = PETSC_FALSE;
 81:   PetscOptionsBool("-test_degree","Test computation of vertex degree","",test_degree,&test_degree,NULL);
 82:   test_fetchandop = PETSC_FALSE;
 83:   PetscOptionsBool("-test_fetchandop","Test atomic Fetch-And-Op","",test_fetchandop,&test_fetchandop,NULL);
 84:   test_gather     = PETSC_FALSE;
 85:   PetscOptionsBool("-test_gather","Test point gather","",test_gather,&test_gather,NULL);
 86:   test_scatter    = PETSC_FALSE;
 87:   PetscOptionsBool("-test_scatter","Test point scatter","",test_scatter,&test_scatter,NULL);
 88:   test_embed      = PETSC_FALSE;
 89:   PetscOptionsBool("-test_embed","Test point embed","",test_embed,&test_embed,NULL);
 90:   test_invert     = PETSC_FALSE;
 91:   PetscOptionsBool("-test_invert","Test point invert","",test_invert,&test_invert,NULL);
 92:   stride          = 1;
 93:   PetscOptionsInt("-stride","Stride for leaf and root data","",stride,&stride,NULL);
 94:   test_sf_distribute = PETSC_FALSE;
 95:   PetscOptionsBool("-test_sf_distribute","Create an SF that 'distributes' to each process, like an alltoall","",test_sf_distribute,&test_sf_distribute,NULL);
 96:   PetscOptionsEnd();

 98:   if (test_sf_distribute) {
 99:     nroots = size;
100:     nrootsalloc = size;
101:     nleaves = size;
102:     nleavesalloc = size;
103:     mine = NULL;
104:     PetscMalloc1(nleaves,&remote);
105:     for (i=0; i<size; i++) {
106:       remote[i].rank = i;
107:       remote[i].index = rank;
108:     }
109:   } else {
110:     nroots       = 2 + (PetscInt)(rank == 0);
111:     nrootsalloc  = nroots * stride;
112:     nleaves      = 2 + (PetscInt)(rank > 0);
113:     nleavesalloc = nleaves * stride;
114:     mine         = NULL;
115:     if (stride > 1) {
116:       PetscInt i;

118:       PetscMalloc1(nleaves,&mine);
119:       for (i = 0; i < nleaves; i++) {
120:         mine[i] = stride * i;
121:       }
122:     }
123:     PetscMalloc1(nleaves,&remote);
124:     /* Left periodic neighbor */
125:     remote[0].rank  = (rank+size-1)%size;
126:     remote[0].index = 1 * stride;
127:     /* Right periodic neighbor */
128:     remote[1].rank  = (rank+1)%size;
129:     remote[1].index = 0 * stride;
130:     if (rank > 0) {               /* All processes reference rank 0, index 1 */
131:       remote[2].rank  = 0;
132:       remote[2].index = 2 * stride;
133:     }
134:   }

136:   /* Create a star forest for communication. In this example, the leaf space is dense, so we pass NULL. */
137:   PetscSFCreate(PETSC_COMM_WORLD,&sf);
138:   PetscSFSetFromOptions(sf);
139:   PetscSFSetGraph(sf,nrootsalloc,nleaves,mine,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);
140:   PetscSFSetUp(sf);

142:   /* View graph, mostly useful for debugging purposes. */
143:   PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
144:   PetscSFView(sf,PETSC_VIEWER_STDOUT_WORLD);
145:   PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);


148:   if (test_bcast) {             /* broadcast rootdata into leafdata */
149:     PetscInt *rootdata,*leafdata;
150:     /* Allocate space for send and recieve buffers. This example communicates PetscInt, but other types, including
151:      * user-defined structures, could also be used. */
152:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
153:     /* Set rootdata buffer to be broadcast */
154:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
155:     for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
156:     /* Initialize local buffer, these values are never used. */
157:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
158:     /* Broadcast entries from rootdata to leafdata. Computation or other communication can be performed between the begin and end calls. */
159:     PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);
160:     PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);
161:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Rootdata\n");
162:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
163:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Bcast Leafdata\n");
164:     PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
165:     PetscFree2(rootdata,leafdata);
166:   }

168:   if (test_reduce) {            /* Reduce leafdata into rootdata */
169:     PetscInt *rootdata,*leafdata;
170:     PetscMalloc2(nrootsalloc,&rootdata,nleavesalloc,&leafdata);
171:     /* Initialize rootdata buffer in which the result of the reduction will appear. */
172:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
173:     for (i=0; i<nroots; i++) rootdata[i*stride] = 100*(rank+1) + i;
174:     /* Set leaf values to reduce. */
175:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
176:     for (i=0; i<nleaves; i++) leafdata[i*stride] = 1000*(rank+1) + 10*i;
177:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Pre-Reduce Rootdata\n");
178:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
179:     /* Perform reduction. Computation or other communication can be performed between the begin and end calls.
180:      * This example sums the values, but other MPI_Ops can be used (e.g MPI_MAX, MPI_PROD). */
181:     PetscSFReduceBegin(sf,MPIU_INT,leafdata,rootdata,mop);
182:     PetscSFReduceEnd(sf,MPIU_INT,leafdata,rootdata,mop);
183:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Leafdata\n");
184:     PetscIntView(nleavesalloc,leafdata,PETSC_VIEWER_STDOUT_WORLD);
185:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Reduce Rootdata\n");
186:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
187:     PetscFree2(rootdata,leafdata);
188:   }

190:   if (test_degree) {
191:     const PetscInt *degree;
192:     PetscSFComputeDegreeBegin(sf,&degree);
193:     PetscSFComputeDegreeEnd(sf,&degree);
194:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Root degrees\n");
195:     PetscIntView(nrootsalloc,degree,PETSC_VIEWER_STDOUT_WORLD);
196:   }

198:   if (test_fetchandop) {
199:     /* Cannot use text compare here because token ordering is not deterministic */
200:     PetscInt *leafdata,*leafupdate,*rootdata;
201:     PetscMalloc3(nleavesalloc,&leafdata,nleavesalloc,&leafupdate,nrootsalloc,&rootdata);
202:     for (i=0; i<nleavesalloc; i++) leafdata[i] = -1;
203:     for (i=0; i<nleaves; i++) leafdata[i*stride] = 1;
204:     for (i=0; i<nrootsalloc; i++) rootdata[i] = -1;
205:     for (i=0; i<nroots; i++) rootdata[i*stride] = 0;
206:     PetscSFFetchAndOpBegin(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
207:     PetscSFFetchAndOpEnd(sf,MPIU_INT,rootdata,leafdata,leafupdate,mop);
208:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Rootdata (sum of 1 from each leaf)\n");
209:     PetscIntView(nrootsalloc,rootdata,PETSC_VIEWER_STDOUT_WORLD);
210:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Leafupdate (value at roots prior to my atomic update)\n");
211:     PetscIntView(nleavesalloc,leafupdate,PETSC_VIEWER_STDOUT_WORLD);
212:     PetscFree3(leafdata,leafupdate,rootdata);
213:   }

215:   if (test_gather) {
216:     const PetscInt *degree;
217:     PetscInt       inedges,*indata,*outdata;
218:     PetscSFComputeDegreeBegin(sf,&degree);
219:     PetscSFComputeDegreeEnd(sf,&degree);
220:     for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
221:     PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
222:     for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
223:     for (i=0; i<nleaves; i++) outdata[i*stride] = 1000*(rank+1) + i;
224:     PetscSFGatherBegin(sf,MPIU_INT,outdata,indata);
225:     PetscSFGatherEnd(sf,MPIU_INT,outdata,indata);
226:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Gathered data at multi-roots from leaves\n");
227:     PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);
228:     PetscFree2(indata,outdata);
229:   }

231:   if (test_scatter) {
232:     const PetscInt *degree;
233:     PetscInt       j,count,inedges,*indata,*outdata;
234:     PetscSFComputeDegreeBegin(sf,&degree);
235:     PetscSFComputeDegreeEnd(sf,&degree);
236:     for (i=0,inedges=0; i<nrootsalloc; i++) inedges += degree[i];
237:     PetscMalloc2(inedges,&indata,nleavesalloc,&outdata);
238:     for (i=0; i<nleavesalloc; i++) outdata[i] = -1;
239:     for (i=0,count=0; i<nrootsalloc; i++) {
240:       for (j=0; j<degree[i]; j++) indata[count++] = 1000*(rank+1) + 100*i + j;
241:     }
242:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Data at multi-roots, to scatter to leaves\n");
243:     PetscIntView(inedges,indata,PETSC_VIEWER_STDOUT_WORLD);

245:     PetscSFScatterBegin(sf,MPIU_INT,indata,outdata);
246:     PetscSFScatterEnd(sf,MPIU_INT,indata,outdata);
247:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Scattered data at leaves\n");
248:     PetscIntView(nleavesalloc,outdata,PETSC_VIEWER_STDOUT_WORLD);
249:     PetscFree2(indata,outdata);
250:   }

252:   if (test_embed) {
253:     const PetscInt nroots = 1 + (PetscInt) !rank;
254:     PetscInt       selected[2];
255:     PetscSF        esf;

257:     selected[0] = stride;
258:     selected[1] = 2*stride;
259:     PetscSFCreateEmbeddedSF(sf,nroots,selected,&esf);
260:     PetscSFSetUp(esf);
261:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Embedded PetscSF\n");
262:     PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
263:     PetscSFView(esf,PETSC_VIEWER_STDOUT_WORLD);
264:     PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
265:     PetscSFDestroy(&esf);
266:   }

268:   if (test_invert) {
269:     PetscSF msf,imsf;
270:     PetscSFGetMultiSF(sf,&msf);
271:     PetscSFCreateInverseSF(msf,&imsf);
272:     PetscSFSetUp(msf);
273:     PetscSFSetUp(imsf);
274:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Multi-SF\n");
275:     PetscSFView(msf,PETSC_VIEWER_STDOUT_WORLD);
276:     PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"## Inverse of Multi-SF\n");
277:     PetscSFView(imsf,PETSC_VIEWER_STDOUT_WORLD);
278:     PetscSFDestroy(&imsf);
279:   }

281:   /* Clean storage for star forest. */
282:   PetscSFDestroy(&sf);
283:   PetscFinalize();
284:   return ierr;
285: }

287: /*TEST
288:   test:
289:     suffix: 8
290:     nsize: 3
291:     args: -test_bcast -test_sf_distribute -sf_type window
292:   test:
293:     suffix: 8_basic
294:     nsize: 3
295:     args: -test_bcast -test_sf_distribute -sf_type basic
296: TEST*/