Actual source code: da1.c
1: #define PETSCDM_DLL
2: /*
3: Code for manipulating distributed regular 1d arrays in parallel.
4: This file was created by Peter Mell 6/30/95
5: */
7: #include private/daimpl.h
9: const char *DAPeriodicTypes[] = {"NONPERIODIC","XPERIODIC","YPERIODIC","XYPERIODIC",
10: "XYZPERIODIC","XZPERIODIC","YZPERIODIC","ZPERIODIC","XYZGHOSTED","DAPeriodicType","DA_",0};
14: PetscErrorCode DAView_1d(DA da,PetscViewer viewer)
15: {
17: PetscMPIInt rank;
18: PetscTruth iascii,isdraw;
21: MPI_Comm_rank(((PetscObject)da)->comm,&rank);
23: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
24: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
25: if (iascii) {
26: PetscViewerFormat format;
28: PetscViewerGetFormat(viewer, &format);
29: if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
30: DALocalInfo info;
31: DAGetLocalInfo(da,&info);
32: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,da->M,
33: da->m,da->w,da->s);
34: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);
35: PetscViewerFlush(viewer);
36: }
37: } else if (isdraw) {
38: PetscDraw draw;
39: double ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
40: PetscInt base;
41: char node[10];
42: PetscTruth isnull;
44: PetscViewerDrawGetDraw(viewer,0,&draw);
45: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
47: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
48: PetscDrawSynchronizedClear(draw);
50: /* first processor draws all node lines */
51: if (!rank) {
52: PetscInt xmin_tmp;
53: ymin = 0.0; ymax = 0.3;
54:
55: /* ADIC doesn't like doubles in a for loop */
56: for (xmin_tmp =0; xmin_tmp < da->M; xmin_tmp++) {
57: PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
58: }
60: xmin = 0.0; xmax = da->M - 1;
61: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
62: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
63: }
65: PetscDrawSynchronizedFlush(draw);
66: PetscDrawPause(draw);
68: /* draw my box */
69: ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w) - 1;
70: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
71: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
72: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
73: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
75: /* Put in index numbers */
76: base = da->base / da->w;
77: for (x=xmin; x<=xmax; x++) {
78: sprintf(node,"%d",(int)base++);
79: PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
80: }
82: PetscDrawSynchronizedFlush(draw);
83: PetscDrawPause(draw);
84: } else {
85: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
86: }
87: return(0);
88: }
92: /*
93: Processes command line options to determine if/how a DA
94: is to be viewed. Called by DACreateXX()
95: */
96: PetscErrorCode DAView_Private(DA da)
97: {
99: PetscTruth flg1 = PETSC_FALSE;
100: PetscViewer view;
103: PetscOptionsBegin(((PetscObject)da)->comm,((PetscObject)da)->prefix,"DA viewing options","DA");
104: PetscOptionsTruth("-da_view","Print information about the DA's distribution","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
105: if (flg1) {
106: PetscViewerASCIIGetStdout(((PetscObject)da)->comm,&view);
107: DAView(da,view);
108: }
109: flg1 = PETSC_FALSE;
110: PetscOptionsTruth("-da_view_draw","Draw how the DA is distributed","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
111: if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(((PetscObject)da)->comm));}
112: PetscOptionsEnd();
113: return(0);
114: }
119: PetscErrorCode DACreate_1D(DA da)
120: {
121: const PetscInt dim = da->dim;
122: const PetscInt M = da->M;
123: const PetscInt dof = da->w;
124: const PetscInt s = da->s;
125: const PetscInt sDist = s*dof; /* absolute stencil distance */
126: const PetscInt *lx = da->lx;
127: const DAPeriodicType wrap = da->wrap;
128: MPI_Comm comm;
129: Vec local, global;
130: VecScatter ltog, gtol;
131: IS to, from;
132: PetscTruth flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
133: PetscMPIInt rank, size;
134: PetscInt i,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m;
135: PetscErrorCode ierr;
138: if (dim != PETSC_DECIDE && dim != 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Dimension should be 1: %D",dim);
139: if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
140: if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
142: da->dim = 1;
143: PetscMalloc(dof*sizeof(char*),&da->fieldname);
144: PetscMemzero(da->fieldname,dof*sizeof(char*));
145: PetscObjectGetComm((PetscObject) da, &comm);
146: MPI_Comm_size(comm,&size);
147: MPI_Comm_rank(comm,&rank);
149: da->m = size;
150: m = da->m;
152: if (s > 0) {
153: /* if not communicating data then should be ok to have nothing on some processes */
154: if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %D %D",m,M);
155: if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
156: }
157: /*
158: Determine locally owned region
159: xs is the first local node number, x is the number of local nodes
160: */
161: if (!lx) {
162: PetscOptionsGetTruth(PETSC_NULL,"-da_partition_blockcomm",&flg1,PETSC_NULL);
163: PetscOptionsGetTruth(PETSC_NULL,"-da_partition_nodes_at_end",&flg2,PETSC_NULL);
164: if (flg1) { /* Block Comm type Distribution */
165: xs = rank*M/m;
166: x = (rank + 1)*M/m - xs;
167: } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
168: x = (M + rank)/m;
169: if (M/m == x) { xs = rank*x; }
170: else { xs = rank*(x-1) + (M+rank)%(x*m); }
171: } else { /* The odd nodes are evenly distributed across the first k nodes */
172: /* Regular PETSc Distribution */
173: x = M/m + ((M % m) > rank);
174: if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
175: else {xs = rank * (PetscInt)(M/m) + rank;}
176: }
177: } else {
178: x = lx[rank];
179: xs = 0;
180: for (i=0; i<rank; i++) {
181: xs += lx[i];
182: }
183: /* verify that data user provided is consistent */
184: left = xs;
185: for (i=rank; i<size; i++) {
186: left += lx[i];
187: }
188: if (left != M) {
189: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
190: }
191: }
193: /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
194: x *= dof;
195: xs *= dof;
196: xe = xs + x;
198: /* determine ghost region */
199: if (wrap == DA_XPERIODIC || wrap == DA_XYZGHOSTED) {
200: Xs = xs - sDist;
201: Xe = xe + sDist;
202: } else {
203: if ((xs-sDist) >= 0) Xs = xs-sDist; else Xs = 0;
204: if ((xe+sDist) <= M*dof) Xe = xe+sDist; else Xe = M*dof;
205: }
207: /* allocate the base parallel and sequential vectors */
208: da->Nlocal = x;
209: VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
210: VecSetBlockSize(global,dof);
211: da->nlocal = (Xe-Xs);
212: VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
213: VecSetBlockSize(local,dof);
214:
215: /* Create Local to Global Vector Scatter Context */
216: /* local to global inserts non-ghost point region into global */
217: VecGetOwnershipRange(global,&start,&end);
218: ISCreateStride(comm,x,start,1,&to);
219: ISCreateStride(comm,x,xs-Xs,1,&from);
220: VecScatterCreate(local,from,global,to,<og);
221: PetscLogObjectParent(da,ltog);
222: ISDestroy(from);
223: ISDestroy(to);
225: /* Create Global to Local Vector Scatter Context */
226: /* global to local must retrieve ghost points */
227: if (wrap == DA_XYZGHOSTED) {
228: if (size == 1) {
229: ISCreateStride(comm,(xe-xs),sDist,1,&to);
230: } else if (!rank) {
231: ISCreateStride(comm,(Xe-xs),sDist,1,&to);
232: } else if (rank == size-1) {
233: ISCreateStride(comm,(xe-Xs),0,1,&to);
234: } else {
235: ISCreateStride(comm,(Xe-Xs),0,1,&to);
236: }
237: } else {
238: ISCreateStride(comm,(Xe-Xs),0,1,&to);
239: }
240:
241: PetscMalloc((x+2*sDist)*sizeof(PetscInt),&idx);
242: PetscLogObjectMemory(da,(x+2*sDist)*sizeof(PetscInt));
244: nn = 0;
245: if (wrap == DA_XPERIODIC) { /* Handle all cases with wrap first */
247: for (i=0; i<sDist; i++) { /* Left ghost points */
248: if ((xs-sDist+i)>=0) { idx[nn++] = xs-sDist+i;}
249: else { idx[nn++] = M*dof+(xs-sDist+i);}
250: }
252: for (i=0; i<x; i++) { idx [nn++] = xs + i;} /* Non-ghost points */
253:
254: for (i=0; i<sDist; i++) { /* Right ghost points */
255: if ((xe+i)<M*dof) { idx [nn++] = xe+i; }
256: else { idx [nn++] = (xe+i) - M*dof;}
257: }
258: } else if (wrap == DA_XYZGHOSTED) {
260: if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
262: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
263:
264: if ((xe+sDist)<=M*dof) {for (i=0; i<sDist; i++) {idx[nn++]=xe+i;}}
266: } else { /* Now do all cases with no wrapping */
268: if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
269: else {for (i=0; i<xs; i++) {idx[nn++] = i;}}
271: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
272:
273: if ((xe+sDist)<=M*dof) {for (i=0; i<sDist; i++) {idx[nn++]=xe+i;}}
274: else {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;}}
275: }
277: ISCreateGeneral(comm,nn,idx,&from);
278: VecScatterCreate(global,from,local,to,>ol);
279: PetscLogObjectParent(da,to);
280: PetscLogObjectParent(da,from);
281: PetscLogObjectParent(da,gtol);
282: ISDestroy(to);
283: ISDestroy(from);
284: VecDestroy(local);
285: VecDestroy(global);
287: da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
288: da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
290: da->gtol = gtol;
291: da->ltog = ltog;
292: da->base = xs;
293: da->ops->view = DAView_1d;
295: /*
296: Set the local to global ordering in the global vector, this allows use
297: of VecSetValuesLocal().
298: */
299: if (wrap == DA_XYZGHOSTED) {
300: PetscInt *tmpidx;
301: if (size == 1) {
302: PetscMalloc((nn+2*sDist)*sizeof(PetscInt),&tmpidx);
303: for (i=0; i<sDist; i++) tmpidx[i] = -1;
304: PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));
305: for (i=nn+sDist; i<nn+2*sDist; i++) tmpidx[i] = -1;
306: PetscFree(idx);
307: idx = tmpidx;
308: nn += 2*sDist;
309: } else if (!rank) { /* must preprend -1 marker for ghost location that have no global value */
310: PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);
311: for (i=0; i<sDist; i++) tmpidx[i] = -1;
312: PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));
313: PetscFree(idx);
314: idx = tmpidx;
315: nn += sDist;
316: } else if (rank == size-1) { /* must postpend -1 marker for ghost location that have no global value */
317: PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);
318: PetscMemcpy(tmpidx,idx,nn*sizeof(PetscInt));
319: for (i=nn; i<nn+sDist; i++) tmpidx[i] = -1;
320: PetscFree(idx);
321: idx = tmpidx;
322: nn += sDist;
323: }
324: }
325: ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
326: ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
327: PetscLogObjectParent(da,da->ltogmap);
329: da->idx = idx;
330: da->Nl = nn;
332: PetscPublishAll(da);
333: return(0);
334: }
339: /*@C
340: DACreate1d - Creates an object that will manage the communication of one-dimensional
341: regular array data that is distributed across some processors.
343: Collective on MPI_Comm
345: Input Parameters:
346: + comm - MPI communicator
347: . wrap - type of periodicity should the array have, if any. Use
348: either DA_NONPERIODIC or DA_XPERIODIC
349: . M - global dimension of the array (use -M to indicate that it may be set to a different value
350: from the command line with -da_grid_x <M>)
351: . dof - number of degrees of freedom per node
352: . s - stencil width
353: - lx - array containing number of nodes in the X direction on each processor,
354: or PETSC_NULL. If non-null, must be of length as m.
356: Output Parameter:
357: . da - the resulting distributed array object
359: Options Database Key:
360: + -da_view - Calls DAView() at the conclusion of DACreate1d()
361: . -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
362: - -da_refine_x - refinement factor
364: Level: beginner
366: Notes:
367: The array data itself is NOT stored in the DA, it is stored in Vec objects;
368: The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
369: and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
372: .keywords: distributed array, create, one-dimensional
374: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(), DASetRefinementFactor(),
375: DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DAGetRefinementFactor(),
376: DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView(), DAGetOwnershipRanges()
378: @*/
379: PetscErrorCode DACreate1d(MPI_Comm comm, DAPeriodicType wrap, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DA *da)
380: {
382: PetscMPIInt size;
385: DACreate(comm, da);
386: DASetDim(*da, 1);
387: DASetSizes(*da, M, PETSC_DECIDE, PETSC_DECIDE);
388: MPI_Comm_size(comm, &size);
389: DASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
390: DASetPeriodicity(*da, wrap);
391: DASetDof(*da, dof);
392: DASetStencilWidth(*da, s);
393: DASetVertexDivision(*da, lx, PETSC_NULL, PETSC_NULL);
394: /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
395: DASetFromOptions(*da);
396: DASetType(*da, DA1D);
397: return(0);
398: }