Actual source code: pvecimpl.h

  1: #pragma once

  3: #include <../src/vec/vec/impls/dvecimpl.h>

  5: typedef struct {
  6:   PetscInt insertmode;
  7:   PetscInt count;
  8:   PetscInt bcount;
  9: } VecAssemblyHeader;

 11: typedef struct {
 12:   PetscInt    *ints;
 13:   PetscInt    *intb;
 14:   PetscScalar *scalars;
 15:   PetscScalar *scalarb;
 16:   char         pendings;
 17:   char         pendingb;
 18: } VecAssemblyFrame;

 20: typedef struct {
 21:   VECHEADER
 22:   PetscInt   nghost;      /* number of ghost points on this process */
 23:   Vec        localrep;    /* local representation of vector */
 24:   VecScatter localupdate; /* scatter to update ghost values */

 26:   PetscBool          assembly_subset;     /* Subsequent assemblies will set a subset (perhaps equal) of off-process entries set on first assembly */
 27:   PetscBool          first_assembly_done; /* Is the first time assembly done? */
 28:   PetscBool          use_status;          /* Use MPI_Status to determine number of items in each message */
 29:   PetscMPIInt        nsendranks;
 30:   PetscMPIInt        nrecvranks;
 31:   PetscMPIInt       *sendranks;
 32:   PetscMPIInt       *recvranks;
 33:   VecAssemblyHeader *sendhdr, *recvhdr;
 34:   VecAssemblyFrame  *sendptrs; /* pointers to the main messages */
 35:   MPI_Request       *sendreqs;
 36:   MPI_Request       *recvreqs;
 37:   PetscSegBuffer     segrecvint;
 38:   PetscSegBuffer     segrecvscalar;
 39:   PetscSegBuffer     segrecvframe;
 40: #if defined(PETSC_HAVE_NVSHMEM)
 41:   PetscBool use_nvshmem; /* Try to use NVSHMEM in communication of, for example, VecNorm */
 42: #endif

 44:   /* COO fields, assuming m is the vector's local size */
 45:   PetscCount  coo_n;
 46:   PetscCount  tot1;  /* Total local entries in COO arrays */
 47:   PetscCount *jmap1; /* [m+1]: i-th entry of the vector has jmap1[i+1]-jmap1[i] repeats in COO arrays */
 48:   PetscCount *perm1; /* [tot1]: permutation array for local entries */

 50:   PetscCount  nnz2;  /* Unique entries in recvbuf */
 51:   PetscCount *imap2; /* [nnz2]: i-th unique entry in recvbuf is imap2[i]-th entry in the vector */
 52:   PetscCount *jmap2; /* [nnz2+1] */
 53:   PetscCount *perm2; /* [recvlen] */

 55:   PetscSF      coo_sf;
 56:   PetscCount   sendlen, recvlen;  /* Lengths (in unit of PetscScalar) of send/recvbuf */
 57:   PetscCount  *Cperm;             /* [sendlen]: permutation array to fill sendbuf[]. 'C' for communication */
 58:   PetscScalar *sendbuf, *recvbuf; /* Buffers for remote values in VecSetValuesCOO() */
 59: } Vec_MPI;

 61: PETSC_INTERN PetscErrorCode VecMTDot_MPI(Vec, PetscInt, const Vec[], PetscScalar *);
 62: PETSC_INTERN PetscErrorCode VecView_MPI_Binary(Vec, PetscViewer);
 63: PETSC_INTERN PetscErrorCode VecView_MPI_Draw_LG(Vec, PetscViewer);
 64: PETSC_INTERN PetscErrorCode VecView_MPI_Socket(Vec, PetscViewer);
 65: PETSC_INTERN PetscErrorCode VecView_MPI_HDF5(Vec, PetscViewer);
 66: PETSC_INTERN PetscErrorCode VecView_MPI_ADIOS(Vec, PetscViewer);
 67: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec, PetscViewer);
 68: PETSC_INTERN PetscErrorCode VecGetSize_MPI(Vec, PetscInt *);
 69: PETSC_INTERN PetscErrorCode VecGetValues_MPI(Vec, PetscInt, const PetscInt[], PetscScalar[]);
 70: PETSC_INTERN PetscErrorCode VecSetValues_MPI(Vec, PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
 71: PETSC_INTERN PetscErrorCode VecSetValuesBlocked_MPI(Vec, PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
 72: PETSC_INTERN PetscErrorCode VecAssemblyBegin_MPI(Vec);
 73: PETSC_INTERN PetscErrorCode VecAssemblyEnd_MPI(Vec);
 74: PETSC_INTERN PetscErrorCode VecAssemblyReset_MPI(Vec);
 75: PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec);

 77: PETSC_INTERN PetscErrorCode VecDuplicate_MPI(Vec, Vec *);
 78: PETSC_INTERN PetscErrorCode VecSetPreallocationCOO_MPI(Vec, PetscCount, const PetscInt[]);
 79: PETSC_INTERN PetscErrorCode VecSetValuesCOO_MPI(Vec, const PetscScalar[], InsertMode);

 81: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecDot_MPI(Vec, Vec, PetscScalar *);
 82: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecMDot_MPI(Vec, PetscInt, const Vec[], PetscScalar *);
 83: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecTDot_MPI(Vec, Vec, PetscScalar *);
 84: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecNorm_MPI(Vec, NormType, PetscReal *);
 85: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecMax_MPI(Vec, PetscInt *, PetscReal *);
 86: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecMin_MPI(Vec, PetscInt *, PetscReal *);
 87: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecPlaceArray_MPI(Vec, const PetscScalar *);
 88: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecResetArray_MPI(Vec);
 89: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecDestroy_MPI(Vec);
 90: PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode VecCreate_MPI_Private(Vec, PetscBool, PetscInt, const PetscScalar[]);

 92: static inline PetscErrorCode VecMXDot_MPI_Default(Vec xin, PetscInt nv, const Vec y[], PetscScalar *z, PetscErrorCode (*VecMXDot_SeqFn)(Vec, PetscInt, const Vec[], PetscScalar *))
 93: {
 94:   PetscFunctionBegin;
 95:   PetscCall(VecMXDot_SeqFn(xin, nv, y, z));
 96:   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, nv, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
 97:   PetscFunctionReturn(PETSC_SUCCESS);
 98: }

100: static inline PetscErrorCode VecXDot_MPI_Default(Vec xin, Vec yin, PetscScalar *z, PetscErrorCode (*VecXDot_SeqFn)(Vec, Vec, PetscScalar *))
101: {
102:   PetscFunctionBegin;
103:   PetscCall(VecXDot_SeqFn(xin, yin, z));
104:   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)xin)));
105:   PetscFunctionReturn(PETSC_SUCCESS);
106: }

108: static inline PetscErrorCode VecMinMax_MPI_Default(Vec xin, PetscInt *idx, PetscReal *z, PetscErrorCode (*VecMinMax_SeqFn)(Vec, PetscInt *, PetscReal *), const MPI_Op ops[2])
109: {
110:   PetscFunctionBegin;
111:   /* Find the local max */
112:   PetscCall(VecMinMax_SeqFn(xin, idx, z));
113:   if (PetscDefined(HAVE_MPIUNI)) PetscFunctionReturn(PETSC_SUCCESS);
114:   /* Find the global max */
115:   if (idx) {
116:     struct {
117:       PetscReal v;
118:       PetscInt  i;
119:     } in = {*z, *idx + xin->map->rstart};

121:     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &in, 1, MPIU_REAL_INT, ops[0], PetscObjectComm((PetscObject)xin)));
122:     *z   = in.v;
123:     *idx = in.i;
124:   } else {
125:     /* User does not need idx */
126:     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, 1, MPIU_REAL, ops[1], PetscObjectComm((PetscObject)xin)));
127:   }
128:   PetscFunctionReturn(PETSC_SUCCESS);
129: }

131: static inline PetscErrorCode VecDotNorm2_MPI_Default(Vec s, Vec t, PetscScalar *dp, PetscScalar *nm, PetscErrorCode (*VecDotNorm2_SeqFn)(Vec, Vec, PetscScalar *, PetscScalar *))
132: {
133:   PetscFunctionBegin;
134:   PetscCall(VecDotNorm2_SeqFn(s, t, dp, nm));
135:   {
136:     PetscScalar sum[] = {*dp, *nm};

138:     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &sum, 2, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)s)));
139:     *dp = sum[0];
140:     *nm = sum[1];
141:   }
142:   PetscFunctionReturn(PETSC_SUCCESS);
143: }

145: static inline PetscErrorCode VecNorm_MPI_Default(Vec xin, NormType type, PetscReal *z, PetscErrorCode (*VecNorm_SeqFn)(Vec, NormType, PetscReal *))
146: {
147:   PetscMPIInt zn = 1;
148:   MPI_Op      op = MPIU_SUM;

150:   PetscFunctionBegin;
151:   PetscCall(VecNorm_SeqFn(xin, type, z));
152:   switch (type) {
153:   case NORM_1_AND_2:
154:     // the 2 norm needs to be squared below before being summed. NORM_2 stores the norm in the
155:     // first slot but while NORM_1_AND_2 stores it in the second
156:     z[1] *= z[1];
157:     zn = 2;
158:     break;
159:   case NORM_2:
160:     z[0] *= z[0];
161:   case NORM_1:
162:   case NORM_FROBENIUS:
163:     break;
164:   case NORM_INFINITY:
165:     op = MPIU_MAX;
166:     break;
167:   }
168:   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, z, zn, MPIU_REAL, op, PetscObjectComm((PetscObject)xin)));
169:   if (type == NORM_2 || type == NORM_FROBENIUS || type == NORM_1_AND_2) z[type == NORM_1_AND_2] = PetscSqrtReal(z[type == NORM_1_AND_2]);
170:   PetscFunctionReturn(PETSC_SUCCESS);
171: }

173: static inline PetscErrorCode VecErrorWeightedNorms_MPI_Default(Vec U, Vec Y, Vec E, NormType wnormtype, PetscReal atol, Vec vatol, PetscReal rtol, Vec vrtol, PetscReal ignore_max, PetscReal *norm, PetscInt *norm_loc, PetscReal *norma, PetscInt *norma_loc, PetscReal *normr, PetscInt *normr_loc, PetscErrorCode (*SeqFn)(Vec, Vec, Vec, NormType, PetscReal, Vec, PetscReal, Vec, PetscReal, PetscReal *, PetscInt *, PetscReal *, PetscInt *, PetscReal *, PetscInt *))
174: {
175:   PetscReal loc[6];

177:   PetscFunctionBegin;
178:   PetscCall(SeqFn(U, Y, E, wnormtype, atol, vatol, rtol, vrtol, ignore_max, norm, norm_loc, norma, norma_loc, normr, normr_loc));
179:   if (wnormtype == NORM_2) {
180:     loc[0] = PetscSqr(*norm);
181:     loc[1] = PetscSqr(*norma);
182:     loc[2] = PetscSqr(*normr);
183:   } else {
184:     loc[0] = *norm;
185:     loc[1] = *norma;
186:     loc[2] = *normr;
187:   }
188:   loc[3] = (PetscReal)*norm_loc;
189:   loc[4] = (PetscReal)*norma_loc;
190:   loc[5] = (PetscReal)*normr_loc;
191:   if (wnormtype == NORM_2) {
192:     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, loc, 6, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)U)));
193:   } else {
194:     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, loc, 3, MPIU_REAL, MPIU_MAX, PetscObjectComm((PetscObject)U)));
195:     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, loc + 3, 3, MPIU_REAL, MPIU_SUM, PetscObjectComm((PetscObject)U)));
196:   }
197:   if (wnormtype == NORM_2) {
198:     *norm  = PetscSqrtReal(loc[0]);
199:     *norma = PetscSqrtReal(loc[1]);
200:     *normr = PetscSqrtReal(loc[2]);
201:   } else {
202:     *norm  = loc[0];
203:     *norma = loc[1];
204:     *normr = loc[2];
205:   }
206:   *norm_loc  = loc[3];
207:   *norma_loc = loc[4];
208:   *normr_loc = loc[5];
209:   PetscFunctionReturn(PETSC_SUCCESS);
210: }