Actual source code: dagtol.c

petsc-master 2018-07-19
Report Typos and Errors
  1: /*
  2:   Code for manipulating distributed regular arrays in parallel.
  3: */

  5:  #include <petsc/private/dmdaimpl.h>

  7: PetscErrorCode  DMGlobalToLocalBegin_DA(DM da,Vec g,InsertMode mode,Vec l)
  8: {
 10:   DM_DA          *dd = (DM_DA*)da->data;

 16:   VecScatterBegin(dd->gtol,g,l,mode,SCATTER_FORWARD);
 17:   return(0);
 18: }

 20: PetscErrorCode  DMGlobalToLocalEnd_DA(DM da,Vec g,InsertMode mode,Vec l)
 21: {
 23:   DM_DA          *dd = (DM_DA*)da->data;

 29:   VecScatterEnd(dd->gtol,g,l,mode,SCATTER_FORWARD);
 30:   return(0);
 31: }

 33: PetscErrorCode  DMLocalToGlobalBegin_DA(DM da,Vec l,InsertMode mode,Vec g)
 34: {
 36:   DM_DA          *dd = (DM_DA*)da->data;

 42:   if (mode == ADD_VALUES) {
 43:     VecScatterBegin(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 44:   } else if (mode == INSERT_VALUES) {
 45:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bx != DM_BOUNDARY_NONE && dd->s > 0 && dd->m == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in x direction");
 46:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->by != DM_BOUNDARY_NONE && dd->s > 0 && dd->n == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in y direction");
 47:     if (dd->bx != DM_BOUNDARY_GHOSTED && dd->bz != DM_BOUNDARY_NONE && dd->s > 0 && dd->p == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Available only for boundary none or with parallelism in z direction");
 48:     VecScatterBegin(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 49:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 50:   return(0);
 51: }

 53: PetscErrorCode  DMLocalToGlobalEnd_DA(DM da,Vec l,InsertMode mode,Vec g)
 54: {
 56:   DM_DA          *dd = (DM_DA*)da->data;

 62:   if (mode == ADD_VALUES) {
 63:     VecScatterEnd(dd->gtol,l,g,ADD_VALUES,SCATTER_REVERSE);
 64:   } else if (mode == INSERT_VALUES) {
 65:     VecScatterEnd(dd->gtol,l,g,INSERT_VALUES,SCATTER_REVERSE_LOCAL);
 66:   } else SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Not yet implemented");
 67:   return(0);
 68: }

 70: extern PetscErrorCode DMDAGetNatural_Private(DM,PetscInt*,IS*);
 71: /*
 72:    DMDAGlobalToNatural_Create - Create the global to natural scatter object

 74:    Collective on DMDA

 76:    Input Parameter:
 77: .  da - the distributed array context

 79:    Level: developer

 81:    Notes:
 82:     This is an internal routine called by DMDAGlobalToNatural() to
 83:      create the scatter context.

 85: .keywords: distributed array, global to local, begin

 87: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
 88:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
 89: */
 90: PetscErrorCode DMDAGlobalToNatural_Create(DM da)
 91: {
 93:   PetscInt       m,start,Nlocal;
 94:   IS             from,to;
 95:   Vec            global;
 96:   DM_DA          *dd = (DM_DA*)da->data;

100:   if (!dd->natural) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");

102:   /* create the scatter context */
103:   VecGetLocalSize(dd->natural,&m);
104:   VecGetOwnershipRange(dd->natural,&start,NULL);

106:   DMDAGetNatural_Private(da,&Nlocal,&to);
107:   if (Nlocal != m) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
108:   ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);
109:   VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);
110:   VecScatterCreate(global,from,dd->natural,to,&dd->gton);
111:   VecDestroy(&global);
112:   ISDestroy(&from);
113:   ISDestroy(&to);
114:   return(0);
115: }

117: /*@
118:    DMDAGlobalToNaturalBegin - Maps values from the global vector to a global vector
119:    in the "natural" grid ordering. Must be followed by
120:    DMDAGlobalToNaturalEnd() to complete the exchange.

122:    Neighbor-wise Collective on DMDA

124:    Input Parameters:
125: +  da - the distributed array context
126: .  g - the global vector
127: -  mode - one of INSERT_VALUES or ADD_VALUES

129:    Output Parameter:
130: .  l  - the natural ordering values

132:    Level: advanced

134:    Notes:
135:    The global and natrual vectors used here need not be the same as those
136:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
137:    must have the same parallel data layout; they could, for example, be
138:    obtained with VecDuplicate() from the DMDA originating vectors.

140:    You must call DMDACreateNaturalVector() before using this routine

142: .keywords: distributed array, global to local, begin

144: .seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
145:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

147: @*/
148: PetscErrorCode  DMDAGlobalToNaturalBegin(DM da,Vec g,InsertMode mode,Vec n)
149: {
151:   DM_DA          *dd = (DM_DA*)da->data;

157:   if (!dd->gton) {
158:     /* create the scatter context */
159:     DMDAGlobalToNatural_Create(da);
160:   }
161:   VecScatterBegin(dd->gton,g,n,mode,SCATTER_FORWARD);
162:   return(0);
163: }

165: /*@
166:    DMDAGlobalToNaturalEnd - Maps values from the global vector to a global vector
167:    in the natural ordering. Must be preceeded by DMDAGlobalToNaturalBegin().

169:    Neighbor-wise Collective on DMDA

171:    Input Parameters:
172: +  da - the distributed array context
173: .  g - the global vector
174: -  mode - one of INSERT_VALUES or ADD_VALUES

176:    Output Parameter:
177: .  l  - the global values in the natural ordering

179:    Level: advanced

181:    Notes:
182:    The global and local vectors used here need not be the same as those
183:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
184:    must have the same parallel data layout; they could, for example, be
185:    obtained with VecDuplicate() from the DMDA originating vectors.

187: .keywords: distributed array, global to local, end

189: .seealso: DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
190:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

192: @*/
193: PetscErrorCode  DMDAGlobalToNaturalEnd(DM da,Vec g,InsertMode mode,Vec n)
194: {
196:   DM_DA          *dd = (DM_DA*)da->data;

202:   VecScatterEnd(dd->gton,g,n,mode,SCATTER_FORWARD);
203:   return(0);
204: }

206: /*@
207:    DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
208:    to a global vector in the PETSc DMDA grid ordering. Must be followed by
209:    DMDANaturalToGlobalEnd() to complete the exchange.

211:    Neighbor-wise Collective on DMDA

213:    Input Parameters:
214: +  da - the distributed array context
215: .  g - the global vector in a natural ordering
216: -  mode - one of INSERT_VALUES or ADD_VALUES

218:    Output Parameter:
219: .  l  - the values in the DMDA ordering

221:    Level: advanced

223:    Notes:
224:    The global and natural vectors used here need not be the same as those
225:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
226:    must have the same parallel data layout; they could, for example, be
227:    obtained with VecDuplicate() from the DMDA originating vectors.

229: .keywords: distributed array, global to local, begin

231: .seealso: DMDAGlobalToNaturalEnd(), DMDAGlobalToNaturalBegin(), DMLocalToGlobalBegin(), DMDACreate2d(),
232:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

234: @*/
235: PetscErrorCode  DMDANaturalToGlobalBegin(DM da,Vec n,InsertMode mode,Vec g)
236: {
238:   DM_DA          *dd = (DM_DA*)da->data;

244:   if (!dd->gton) {
245:     /* create the scatter context */
246:     DMDAGlobalToNatural_Create(da);
247:   }
248:   VecScatterBegin(dd->gton,n,g,mode,SCATTER_REVERSE);
249:   return(0);
250: }

252: /*@
253:    DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
254:    to a global vector in the PETSc DMDA ordering. Must be preceeded by DMDANaturalToGlobalBegin().

256:    Neighbor-wise Collective on DMDA

258:    Input Parameters:
259: +  da - the distributed array context
260: .  g - the global vector in a natural ordering
261: -  mode - one of INSERT_VALUES or ADD_VALUES

263:    Output Parameter:
264: .  l  - the global values in the PETSc DMDA ordering

266:    Level: advanced

268:    Notes:
269:    The global and local vectors used here need not be the same as those
270:    obtained from DMCreateGlobalVector() and DMDACreateNaturalVector(), BUT they
271:    must have the same parallel data layout; they could, for example, be
272:    obtained with VecDuplicate() from the DMDA originating vectors.

274: .keywords: distributed array, global to local, end

276: .seealso: DMDAGlobalToNaturalBegin(), DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
277:           DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()

279: @*/
280: PetscErrorCode  DMDANaturalToGlobalEnd(DM da,Vec n,InsertMode mode,Vec g)
281: {
283:   DM_DA          *dd = (DM_DA*)da->data;

289:   VecScatterEnd(dd->gton,n,g,mode,SCATTER_REVERSE);
290:   return(0);
291: }