Actual source code: dagtol.c

  1: /*
  2:   Code for manipulating distributed regular arrays in parallel.
  3: */

  5: #include <petsc/private/dmdaimpl.h>

  7: PetscErrorCode DMGlobalToLocalBegin_DA(DM da, Vec g, InsertMode mode, Vec l)
  8: {
  9:   DM_DA *dd = (DM_DA *)da->data;

 11:   PetscFunctionBegin;
 15:   PetscCall(VecScatterBegin(dd->gtol, g, l, mode, SCATTER_FORWARD));
 16:   PetscFunctionReturn(PETSC_SUCCESS);
 17: }

 19: PetscErrorCode DMGlobalToLocalEnd_DA(DM da, Vec g, InsertMode mode, Vec l)
 20: {
 21:   DM_DA *dd = (DM_DA *)da->data;

 23:   PetscFunctionBegin;
 27:   PetscCall(VecScatterEnd(dd->gtol, g, l, mode, SCATTER_FORWARD));
 28:   PetscFunctionReturn(PETSC_SUCCESS);
 29: }

 31: PetscErrorCode DMLocalToGlobalBegin_DA(DM da, Vec l, InsertMode mode, Vec g)
 32: {
 33:   DM_DA *dd = (DM_DA *)da->data;

 35:   PetscFunctionBegin;
 39:   if (mode == ADD_VALUES) {
 40:     PetscCall(VecScatterBegin(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
 41:   } else if (mode == INSERT_VALUES) {
 42:     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bx == DM_BOUNDARY_NONE || dd->s <= 0 || dd->m != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in x direction");
 43:     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->by == DM_BOUNDARY_NONE || dd->s <= 0 || dd->n != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in y direction");
 44:     PetscCheck(dd->bx == DM_BOUNDARY_GHOSTED || dd->bz == DM_BOUNDARY_NONE || dd->s <= 0 || dd->p != 1, PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Available only for boundary none or with parallelism in z direction");
 45:     PetscCall(VecScatterBegin(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
 46:   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
 47:   PetscFunctionReturn(PETSC_SUCCESS);
 48: }

 50: PetscErrorCode DMLocalToGlobalEnd_DA(DM da, Vec l, InsertMode mode, Vec g)
 51: {
 52:   DM_DA *dd = (DM_DA *)da->data;

 54:   PetscFunctionBegin;
 58:   if (mode == ADD_VALUES) {
 59:     PetscCall(VecScatterEnd(dd->gtol, l, g, ADD_VALUES, SCATTER_REVERSE));
 60:   } else if (mode == INSERT_VALUES) {
 61:     PetscCall(VecScatterEnd(dd->gtol, l, g, INSERT_VALUES, SCATTER_REVERSE_LOCAL));
 62:   } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Not yet implemented");
 63:   PetscFunctionReturn(PETSC_SUCCESS);
 64: }

 66: /*
 67:   DMDAGlobalToNatural_Create - Create the global to natural scatter object

 69:   Collective

 71:   Input Parameter:
 72: . da - the `DMDA` context

 74:   Level: developer

 76:   Note:
 77:   This is an internal routine called by `DMDAGlobalToNatural()` to
 78:   create the scatter context.

 80: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
 81:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
 82: */
 83: static PetscErrorCode DMDAGlobalToNatural_Create(DM da)
 84: {
 85:   PetscInt m, start, Nlocal;
 86:   IS       from, to;
 87:   Vec      global;
 88:   DM_DA   *dd = (DM_DA *)da->data;

 90:   PetscFunctionBegin;
 92:   PetscCheck(dd->natural, PetscObjectComm((PetscObject)da), PETSC_ERR_ORDER, "Natural layout vector not yet created; cannot scatter into it");

 94:   /* create the scatter context */
 95:   PetscCall(VecGetLocalSize(dd->natural, &m));
 96:   PetscCall(VecGetOwnershipRange(dd->natural, &start, NULL));

 98:   PetscCall(DMDAGetNatural_Private(da, &Nlocal, &to));
 99:   PetscCheck(Nlocal == m, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Internal error: Nlocal %" PetscInt_FMT " local vector size %" PetscInt_FMT, Nlocal, m);
100:   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from));
101:   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global));
102:   PetscCall(VecScatterCreate(global, from, dd->natural, to, &dd->gton));
103:   PetscCall(VecDestroy(&global));
104:   PetscCall(ISDestroy(&from));
105:   PetscCall(ISDestroy(&to));
106:   PetscFunctionReturn(PETSC_SUCCESS);
107: }

109: /*@
110:   DMDAGlobalToNaturalBegin - Maps values from the global vector obtained with `DMCreateGlobalVector()` to a global vector
111:   in the "natural" grid ordering. Must be followed by
112:   `DMDAGlobalToNaturalEnd()` to complete the exchange.

114:   Neighbor-wise Collective

116:   Input Parameters:
117: + da   - the `DMDA` context
118: . g    - the global vector, see `DMCreateGlobalVector()`
119: - mode - one of `INSERT_VALUES` or `ADD_VALUES`

121:   Output Parameter:
122: . n - the natural ordering values, see `DMDACreateNaturalVector()`

124:   Level: advanced

126:   Notes:
127:   The global and natural vectors used here need not be the same as those
128:   obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
129:   must have the same parallel data layout; they could, for example, be
130:   obtained with `VecDuplicate()` from the `DMDA` originating vectors.

132:   You must call `DMDACreateNaturalVector()` before using this routine

134: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
135:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
136: @*/
137: PetscErrorCode DMDAGlobalToNaturalBegin(DM da, Vec g, InsertMode mode, Vec n)
138: {
139:   DM_DA *dd = (DM_DA *)da->data;

141:   PetscFunctionBegin;
145:   if (!dd->gton) {
146:     /* create the scatter context */
147:     PetscCall(DMDAGlobalToNatural_Create(da));
148:   }
149:   PetscCall(VecScatterBegin(dd->gton, g, n, mode, SCATTER_FORWARD));
150:   PetscFunctionReturn(PETSC_SUCCESS);
151: }

153: /*@
154:   DMDAGlobalToNaturalEnd - Maps values from the global vector obtained with `DMCreateGlobalVector()` to a global vector
155:   in the natural ordering. Must be preceded by `DMDAGlobalToNaturalBegin()`.

157:   Neighbor-wise Collective

159:   Input Parameters:
160: + da   - the `DMDA` context
161: . g    - the global vector, see `DMCreateGlobalVector()`
162: - mode - one of `INSERT_VALUES` or `ADD_VALUES`

164:   Output Parameter:
165: . n - the global values in the natural ordering, see `DMDACreateNaturalVector()`

167:   Level: advanced

169:   Note:
170:   The global and local vectors used here need not be the same as those
171:   obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
172:   must have the same parallel data layout; they could, for example, be
173:   obtained with VecDuplicate() from the `DMDA` originating vectors.

175: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
176:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
177: @*/
178: PetscErrorCode DMDAGlobalToNaturalEnd(DM da, Vec g, InsertMode mode, Vec n)
179: {
180:   DM_DA *dd = (DM_DA *)da->data;

182:   PetscFunctionBegin;
186:   PetscCall(VecScatterEnd(dd->gton, g, n, mode, SCATTER_FORWARD));
187:   PetscFunctionReturn(PETSC_SUCCESS);
188: }

190: /*@
191:   DMDANaturalToGlobalBegin - Maps values from a global vector in the "natural" ordering
192:   to a global vector in the PETSc `DMDA` grid ordering. Must be followed by
193:   `DMDANaturalToGlobalEnd()` to complete the exchange.

195:   Neighbor-wise Collective

197:   Input Parameters:
198: + da   - the `DMDA` context
199: . g    - the global vector in a natural ordering, see `DMDACreateNaturalVector()`
200: - mode - one of `INSERT_VALUES` or `ADD_VALUES`

202:   Output Parameter:
203: . n - the values in the `DMDA` ordering

205:   Level: advanced

207:   Note:
208:   The global and natural vectors used here need not be the same as those
209:   obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
210:   must have the same parallel data layout; they could, for example, be
211:   obtained with `VecDuplicate()` from the `DMDA` originating vectors.

213: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGlobalToNaturalEnd()`, `DMDAGlobalToNaturalBegin()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
214:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
215: @*/
216: PetscErrorCode DMDANaturalToGlobalBegin(DM da, Vec n, InsertMode mode, Vec g)
217: {
218:   DM_DA *dd = (DM_DA *)da->data;

220:   PetscFunctionBegin;
224:   if (!dd->gton) {
225:     /* create the scatter context */
226:     PetscCall(DMDAGlobalToNatural_Create(da));
227:   }
228:   PetscCall(VecScatterBegin(dd->gton, n, g, mode, SCATTER_REVERSE));
229:   PetscFunctionReturn(PETSC_SUCCESS);
230: }

232: /*@
233:   DMDANaturalToGlobalEnd - Maps values from the natural ordering global vector
234:   to a global vector in the PETSc `DMDA` ordering. Must be preceded by `DMDANaturalToGlobalBegin()`.

236:   Neighbor-wise Collective

238:   Input Parameters:
239: + da   - the `DMDA` context
240: . g    - the global vector in a natural ordering
241: - mode - one of `INSERT_VALUES` or `ADD_VALUES`

243:   Output Parameter:
244: . n - the global values in the PETSc `DMDA` ordering

246:   Level: advanced

248:   Note:
249:   The global and local vectors used here need not be the same as those
250:   obtained from `DMCreateGlobalVector()` and `DMDACreateNaturalVector()`, BUT they
251:   must have the same parallel data layout; they could, for example, be
252:   obtained with `VecDuplicate()` from the `DMDA` originating vectors.

254: .seealso: [](sec_struct), `DM`, `DMDA`, `DMDAGlobalToNaturalBegin()`, `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
255:           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
256: @*/
257: PetscErrorCode DMDANaturalToGlobalEnd(DM da, Vec n, InsertMode mode, Vec g)
258: {
259:   DM_DA *dd = (DM_DA *)da->data;

261:   PetscFunctionBegin;
265:   PetscCall(VecScatterEnd(dd->gton, n, g, mode, SCATTER_REVERSE));
266:   PetscFunctionReturn(PETSC_SUCCESS);
267: }