Actual source code: pdvec.c

petsc-master 2019-09-15
Report Typos and Errors

  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
  5:  #include <../src/vec/vec/impls/mpi/pvecimpl.h>
  6:  #include <petsc/private/viewerimpl.h>
  7: #include <petsc/private/viewerhdf5impl.h>
  8:  #include <petsc/private/glvisviewerimpl.h>
  9:  #include <petsc/private/glvisvecimpl.h>

 11: PetscErrorCode VecDestroy_MPI(Vec v)
 12: {
 13:   Vec_MPI        *x = (Vec_MPI*)v->data;

 17: #if defined(PETSC_USE_LOG)
 18:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 19: #endif
 20:   if (!x) return(0);
 21:   PetscFree(x->array_allocated);

 23:   /* Destroy local representation of vector if it exists */
 24:   if (x->localrep) {
 25:     VecDestroy(&x->localrep);
 26:     VecScatterDestroy(&x->localupdate);
 27:   }
 28:   VecAssemblyReset_MPI(v);

 30:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 31:   VecStashDestroy_Private(&v->bstash);
 32:   VecStashDestroy_Private(&v->stash);
 33:   PetscFree(v->data);
 34:   return(0);
 35: }

 37: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 38: {
 39:   PetscErrorCode    ierr;
 40:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 41:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 42:   MPI_Status        status;
 43:   PetscScalar       *values;
 44:   const PetscScalar *xarray;
 45:   const char        *name;
 46:   PetscViewerFormat format;

 49:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
 50:   PetscViewerGetFormat(viewer,&format);
 51:   if (format == PETSC_VIEWER_LOAD_BALANCE) {
 52:     PetscInt nmax = 0,nmin = xin->map->n,navg;
 53:     for (i=0; i<(PetscInt)size; i++) {
 54:       nmax = PetscMax(nmax,xin->map->range[i+1] - xin->map->range[i]);
 55:       nmin = PetscMin(nmin,xin->map->range[i+1] - xin->map->range[i]);
 56:     }
 57:     navg = xin->map->N/size;
 58:     PetscViewerASCIIPrintf(viewer,"  Load Balance - Local vector size Min %D  avg %D  max %D\n",nmin,navg,nmax);
 59:     return(0);
 60:   }

 62:   VecGetArrayRead(xin,&xarray);
 63:   /* determine maximum message to arrive */
 64:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
 65:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
 66:   if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
 67:   if (!rank) {
 68:     PetscMalloc1(len,&values);
 69:     /*
 70:         MATLAB format and ASCII format are very similar except
 71:         MATLAB uses %18.16e format while ASCII uses %g
 72:     */
 73:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 74:       PetscObjectGetName((PetscObject)xin,&name);
 75:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 76:       for (i=0; i<xin->map->n; i++) {
 77: #if defined(PETSC_USE_COMPLEX)
 78:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 79:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
 80:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 81:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
 82:         } else {
 83:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
 84:         }
 85: #else
 86:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
 87: #endif
 88:       }
 89:       /* receive and print messages */
 90:       for (j=1; j<size; j++) {
 91:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
 92:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 93:         for (i=0; i<n; i++) {
 94: #if defined(PETSC_USE_COMPLEX)
 95:           if (PetscImaginaryPart(values[i]) > 0.0) {
 96:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
 97:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 98:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
 99:           } else {
100:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
101:           }
102: #else
103:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
104: #endif
105:         }
106:       }
107:       PetscViewerASCIIPrintf(viewer,"];\n");

109:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
110:       for (i=0; i<xin->map->n; i++) {
111: #if defined(PETSC_USE_COMPLEX)
112:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
113: #else
114:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
115: #endif
116:       }
117:       /* receive and print messages */
118:       for (j=1; j<size; j++) {
119:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
120:         MPI_Get_count(&status,MPIU_SCALAR,&n);
121:         for (i=0; i<n; i++) {
122: #if defined(PETSC_USE_COMPLEX)
123:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
124: #else
125:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
126: #endif
127:         }
128:       }
129:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
130:       /*
131:         state 0: No header has been output
132:         state 1: Only POINT_DATA has been output
133:         state 2: Only CELL_DATA has been output
134:         state 3: Output both, POINT_DATA last
135:         state 4: Output both, CELL_DATA last
136:       */
137:       static PetscInt stateId     = -1;
138:       int             outputState = 0;
139:       int             doOutput    = 0;
140:       PetscBool       hasState;
141:       PetscInt        bs, b;

143:       if (stateId < 0) {
144:         PetscObjectComposedDataRegister(&stateId);
145:       }
146:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
147:       if (!hasState) outputState = 0;

149:       PetscObjectGetName((PetscObject)xin,&name);
150:       VecGetLocalSize(xin, &nLen);
151:       PetscMPIIntCast(nLen,&n);
152:       VecGetBlockSize(xin, &bs);
153:       if (format == PETSC_VIEWER_ASCII_VTK) {
154:         if (outputState == 0) {
155:           outputState = 1;
156:           doOutput    = 1;
157:         } else if (outputState == 1) doOutput = 0;
158:         else if (outputState == 2) {
159:           outputState = 3;
160:           doOutput    = 1;
161:         } else if (outputState == 3) doOutput = 0;
162:         else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");

164:         if (doOutput) {
165:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
166:         }
167:       } else {
168:         if (outputState == 0) {
169:           outputState = 2;
170:           doOutput    = 1;
171:         } else if (outputState == 1) {
172:           outputState = 4;
173:           doOutput    = 1;
174:         } else if (outputState == 2) doOutput = 0;
175:         else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
176:         else if (outputState == 4) doOutput = 0;

178:         if (doOutput) {
179:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
180:         }
181:       }
182:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
183:       if (name) {
184:         if (bs == 3) {
185:           PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
186:         } else {
187:           PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
188:         }
189:       } else {
190:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
191:       }
192:       if (bs != 3) {
193:         PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
194:       }
195:       for (i=0; i<n/bs; i++) {
196:         for (b=0; b<bs; b++) {
197:           if (b > 0) {
198:             PetscViewerASCIIPrintf(viewer," ");
199:           }
200:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
201:         }
202:         PetscViewerASCIIPrintf(viewer,"\n");
203:       }
204:       for (j=1; j<size; j++) {
205:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
206:         MPI_Get_count(&status,MPIU_SCALAR,&n);
207:         for (i=0; i<n/bs; i++) {
208:           for (b=0; b<bs; b++) {
209:             if (b > 0) {
210:               PetscViewerASCIIPrintf(viewer," ");
211:             }
212:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
213:           }
214:           PetscViewerASCIIPrintf(viewer,"\n");
215:         }
216:       }
217:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
218:       PetscInt bs, b;

220:       VecGetLocalSize(xin, &nLen);
221:       PetscMPIIntCast(nLen,&n);
222:       VecGetBlockSize(xin, &bs);
223:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);

225:       for (i=0; i<n/bs; i++) {
226:         for (b=0; b<bs; b++) {
227:           if (b > 0) {
228:             PetscViewerASCIIPrintf(viewer," ");
229:           }
230:           PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
231:         }
232:         for (b=bs; b<3; b++) {
233:           PetscViewerASCIIPrintf(viewer," 0.0");
234:         }
235:         PetscViewerASCIIPrintf(viewer,"\n");
236:       }
237:       for (j=1; j<size; j++) {
238:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
239:         MPI_Get_count(&status,MPIU_SCALAR,&n);
240:         for (i=0; i<n/bs; i++) {
241:           for (b=0; b<bs; b++) {
242:             if (b > 0) {
243:               PetscViewerASCIIPrintf(viewer," ");
244:             }
245:             PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
246:           }
247:           for (b=bs; b<3; b++) {
248:             PetscViewerASCIIPrintf(viewer," 0.0");
249:           }
250:           PetscViewerASCIIPrintf(viewer,"\n");
251:         }
252:       }
253:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
254:       PetscInt bs, b, vertexCount = 1;

256:       VecGetLocalSize(xin, &nLen);
257:       PetscMPIIntCast(nLen,&n);
258:       VecGetBlockSize(xin, &bs);
259:       if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);

261:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
262:       for (i=0; i<n/bs; i++) {
263:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
264:         for (b=0; b<bs; b++) {
265:           if (b > 0) {
266:             PetscViewerASCIIPrintf(viewer," ");
267:           }
268: #if !defined(PETSC_USE_COMPLEX)
269:           PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
270: #endif
271:         }
272:         PetscViewerASCIIPrintf(viewer,"\n");
273:       }
274:       for (j=1; j<size; j++) {
275:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
276:         MPI_Get_count(&status,MPIU_SCALAR,&n);
277:         for (i=0; i<n/bs; i++) {
278:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
279:           for (b=0; b<bs; b++) {
280:             if (b > 0) {
281:               PetscViewerASCIIPrintf(viewer," ");
282:             }
283: #if !defined(PETSC_USE_COMPLEX)
284:             PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
285: #endif
286:           }
287:           PetscViewerASCIIPrintf(viewer,"\n");
288:         }
289:       }
290:     } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
291:       /* GLVis ASCII visualization/dump: this function mimicks mfem::GridFunction::Save() */
292:       const PetscScalar       *array;
293:       PetscInt                i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
294:       PetscContainer          glvis_container;
295:       PetscViewerGLVisVecInfo glvis_vec_info;
296:       PetscViewerGLVisInfo    glvis_info;
297:       PetscErrorCode          ierr;

299:       /* mfem::FiniteElementSpace::Save() */
300:       VecGetBlockSize(xin,&vdim);
301:       PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
302:       PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
303:       if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_PLIB,"Missing GLVis container");
304:       PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
305:       PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
306:       PetscViewerASCIIPrintf(viewer,"VDim: %d\n",vdim);
307:       PetscViewerASCIIPrintf(viewer,"Ordering: %d\n",ordering);
308:       PetscViewerASCIIPrintf(viewer,"\n");
309:       /* mfem::Vector::Print() */
310:       PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
311:       if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_PLIB,"Missing GLVis container");
312:       PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
313:       if (glvis_info->enabled) {
314:         VecGetLocalSize(xin,&n);
315:         VecGetArrayRead(xin,&array);
316:         for (i=0;i<n;i++) {
317:           PetscViewerASCIIPrintf(viewer,glvis_info->fmt,(double)PetscRealPart(array[i]));
318:           PetscViewerASCIIPrintf(viewer,"\n");
319:         }
320:         VecRestoreArrayRead(xin,&array);
321:       }
322:     } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
323:       /* No info */
324:     } else {
325:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
326:       cnt = 0;
327:       for (i=0; i<xin->map->n; i++) {
328:         if (format == PETSC_VIEWER_ASCII_INDEX) {
329:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
330:         }
331: #if defined(PETSC_USE_COMPLEX)
332:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
333:           PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
334:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
335:           PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
336:         } else {
337:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
338:         }
339: #else
340:         PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
341: #endif
342:       }
343:       /* receive and print messages */
344:       for (j=1; j<size; j++) {
345:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
346:         MPI_Get_count(&status,MPIU_SCALAR,&n);
347:         if (format != PETSC_VIEWER_ASCII_COMMON) {
348:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
349:         }
350:         for (i=0; i<n; i++) {
351:           if (format == PETSC_VIEWER_ASCII_INDEX) {
352:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
353:           }
354: #if defined(PETSC_USE_COMPLEX)
355:           if (PetscImaginaryPart(values[i]) > 0.0) {
356:             PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
357:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
358:             PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
359:           } else {
360:             PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
361:           }
362: #else
363:           PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
364: #endif
365:         }
366:       }
367:     }
368:     PetscFree(values);
369:   } else {
370:     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
371:       /* Rank 0 is not trying to receive anything, so don't send anything */
372:     } else {
373:       if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
374:         /* this may be a collective operation so make sure everyone calls it */
375:         PetscObjectGetName((PetscObject)xin,&name);
376:       }
377:       MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
378:     }
379:   }
380:   PetscViewerFlush(viewer);
381:   VecRestoreArrayRead(xin,&xarray);
382:   return(0);
383: }

385: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
386: {
387:   PetscErrorCode    ierr;
388:   PetscMPIInt       rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
389:   PetscInt          len,n,j,tr[2];
390:   int               fdes;
391:   MPI_Status        status;
392:   PetscScalar       *values;
393:   const PetscScalar *xarray;
394:   FILE              *file;
395: #if defined(PETSC_HAVE_MPIIO)
396:   PetscBool         isMPIIO;
397: #endif
398:   PetscBool         skipHeader;
399:   PetscInt          message_count,flowcontrolcount;
400:   PetscViewerFormat format;

403:   VecGetArrayRead(xin,&xarray);
404:   PetscViewerBinaryGetDescriptor(viewer,&fdes);
405:   PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);

407:   /* determine maximum message to arrive */
408:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
409:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);

411:   if (!skipHeader) {
412:     tr[0] = VEC_FILE_CLASSID;
413:     tr[1] = xin->map->N;
414:     PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
415:   }

417: #if defined(PETSC_HAVE_MPIIO)
418:   PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
419:   if (!isMPIIO) {
420: #endif
421:     PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
422:     if (!rank) {
423:       PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);

425:       len = 0;
426:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
427:       PetscMalloc1(len,&values);
428:       PetscMPIIntCast(len,&mesgsize);
429:       /* receive and save messages */
430:       for (j=1; j<size; j++) {
431:         PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
432:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
433:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
434:         n    = (PetscInt)mesglen;
435:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
436:       }
437:       PetscViewerFlowControlEndMaster(viewer,&message_count);
438:       PetscFree(values);
439:     } else {
440:       PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
441:       PetscMPIIntCast(xin->map->n,&mesgsize);
442:       MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
443:       PetscViewerFlowControlEndWorker(viewer,&message_count);
444:     }
445:     PetscViewerGetFormat(viewer,&format);
446:     if (format == PETSC_VIEWER_BINARY_MATLAB) {
447:       MPI_Comm   comm;
448:       FILE       *info;
449:       const char *name;

451:       PetscObjectGetName((PetscObject)xin,&name);
452:       PetscObjectGetComm((PetscObject)viewer,&comm);
453:       PetscViewerBinaryGetInfoPointer(viewer,&info);
454:       PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
455:       PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);
456:       PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
457:     }
458: #if defined(PETSC_HAVE_MPIIO)
459:   } else {
460:     MPI_Offset   off;
461:     MPI_File     mfdes;
462:     PetscMPIInt  lsize;

464:     PetscMPIIntCast(xin->map->n,&lsize);
465:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
466:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
467:     off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
468:     MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
469:     MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
470:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
471:   }
472: #endif

474:   VecRestoreArrayRead(xin,&xarray);
475:   if (!rank) {
476:     PetscViewerBinaryGetInfoPointer(viewer,&file);
477:     if (file) {
478:       if (((PetscObject)xin)->prefix) {
479:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));
480:       } else {
481:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));
482:       }
483:     }
484:   }
485:   return(0);
486: }

488:  #include <petscdraw.h>
489: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
490: {
491:   PetscDraw         draw;
492:   PetscBool         isnull;
493:   PetscDrawLG       lg;
494:   PetscMPIInt       i,size,rank,n,N,*lens = NULL,*disp = NULL;
495:   PetscReal         *values, *xx = NULL,*yy = NULL;
496:   const PetscScalar *xarray;
497:   int               colors[] = {PETSC_DRAW_RED};
498:   PetscErrorCode    ierr;

501:   PetscViewerDrawGetDraw(viewer,0,&draw);
502:   PetscDrawIsNull(draw,&isnull);
503:   if (isnull) return(0);
504:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
505:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
506:   PetscMPIIntCast(xin->map->n,&n);
507:   PetscMPIIntCast(xin->map->N,&N);

509:   VecGetArrayRead(xin,&xarray);
510: #if defined(PETSC_USE_COMPLEX)
511:   PetscMalloc1(n+1,&values);
512:   for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
513: #else
514:   values = (PetscReal*)xarray;
515: #endif
516:   if (!rank) {
517:     PetscMalloc2(N,&xx,N,&yy);
518:     for (i=0; i<N; i++) xx[i] = (PetscReal)i;
519:     PetscMalloc2(size,&lens,size,&disp);
520:     for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
521:     for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
522:   }
523:   MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
524:   PetscFree2(lens,disp);
525: #if defined(PETSC_USE_COMPLEX)
526:   PetscFree(values);
527: #endif
528:   VecRestoreArrayRead(xin,&xarray);

530:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
531:   PetscDrawLGReset(lg);
532:   PetscDrawLGSetDimension(lg,1);
533:   PetscDrawLGSetColors(lg,colors);
534:   if (!rank) {
535:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
536:     PetscFree2(xx,yy);
537:   }
538:   PetscDrawLGDraw(lg);
539:   PetscDrawLGSave(lg);
540:   return(0);
541: }

543: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
544: {
545:   PetscErrorCode    ierr;
546:   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
547:   PetscInt          i,start,end;
548:   MPI_Status        status;
549:   PetscReal         min,max,tmp = 0.0;
550:   PetscDraw         draw;
551:   PetscBool         isnull;
552:   PetscDrawAxis     axis;
553:   const PetscScalar *xarray;

556:   PetscViewerDrawGetDraw(viewer,0,&draw);
557:   PetscDrawIsNull(draw,&isnull);
558:   if (isnull) return(0);
559:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
560:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);

562:   VecMin(xin,NULL,&min);
563:   VecMax(xin,NULL,&max);
564:   if (min == max) {
565:     min -= 1.e-5;
566:     max += 1.e-5;
567:   }

569:   PetscDrawCheckResizedWindow(draw);
570:   PetscDrawClear(draw);

572:   PetscDrawAxisCreate(draw,&axis);
573:   PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
574:   PetscDrawAxisDraw(axis);
575:   PetscDrawAxisDestroy(&axis);

577:   /* draw local part of vector */
578:   VecGetArrayRead(xin,&xarray);
579:   VecGetOwnershipRange(xin,&start,&end);
580:   if (rank < size-1) { /* send value to right */
581:     MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
582:   }
583:   if (rank) { /* receive value from right */
584:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
585:   }
586:   PetscDrawCollectiveBegin(draw);
587:   if (rank) {
588:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
589:   }
590:   for (i=1; i<xin->map->n; i++) {
591:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
592:   }
593:   PetscDrawCollectiveEnd(draw);
594:   VecRestoreArrayRead(xin,&xarray);

596:   PetscDrawFlush(draw);
597:   PetscDrawPause(draw);
598:   PetscDrawSave(draw);
599:   return(0);
600: }

602: #if defined(PETSC_HAVE_MATLAB_ENGINE)
603: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
604: {
605:   PetscErrorCode    ierr;
606:   PetscMPIInt       rank,size,*lens;
607:   PetscInt          i,N = xin->map->N;
608:   const PetscScalar *xarray;
609:   PetscScalar       *xx;

612:   VecGetArrayRead(xin,&xarray);
613:   MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
614:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
615:   if (!rank) {
616:     PetscMalloc1(N,&xx);
617:     PetscMalloc1(size,&lens);
618:     for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];

620:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
621:     PetscFree(lens);

623:     PetscObjectName((PetscObject)xin);
624:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

626:     PetscFree(xx);
627:   } else {
628:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
629:   }
630:   VecRestoreArrayRead(xin,&xarray);
631:   return(0);
632: }
633: #endif

635: #if defined(PETSC_HAVE_ADIOS)
636: #include <adios.h>
637: #include <adios_read.h>
638:  #include <petsc/private/vieweradiosimpl.h>
639:  #include <petsc/private/viewerimpl.h>

641: PetscErrorCode VecView_MPI_ADIOS(Vec xin, PetscViewer viewer)
642: {
643:   PetscViewer_ADIOS *adios = (PetscViewer_ADIOS*)viewer->data;
644:   PetscErrorCode    ierr;
645:   const char        *vecname;
646:   int64_t           id;
647:   PetscInt          n,N,rstart;
648:   const PetscScalar *array;
649:   char              nglobalname[16],nlocalname[16],coffset[16];

652:   PetscObjectGetName((PetscObject) xin, &vecname);

654:   VecGetLocalSize(xin,&n);
655:   VecGetSize(xin,&N);
656:   VecGetOwnershipRange(xin,&rstart,NULL);

658:   sprintf(nlocalname,"%d",(int)n);
659:   sprintf(nglobalname,"%d",(int)N);
660:   sprintf(coffset,"%d",(int)rstart);
661:   id   = adios_define_var(Petsc_adios_group,vecname,"",adios_double,nlocalname,nglobalname,coffset);
662:   VecGetArrayRead(xin,&array);
663:   adios_write_byid(adios->adios_handle,id,array);
664:   VecRestoreArrayRead(xin,&array);

666:   return(0);
667: }
668: #endif

670: #if defined(PETSC_HAVE_ADIOS2)
671: #include <adios2_c.h>
672: #include <petsc/private/vieweradios2impl.h>
673:  #include <petsc/private/viewerimpl.h>

675: PetscErrorCode VecView_MPI_ADIOS2(Vec xin, PetscViewer viewer)
676: {
677:   PetscErrorCode     ierr;
678:   PetscViewer_ADIOS2 *adios2 = (PetscViewer_ADIOS2*)viewer->data;
679:   PetscInt           n,N,rstart;
680:   const char         *vecname;
681:   const PetscScalar  *array;

684:   PetscObjectGetName((PetscObject) xin, &vecname);
685:   VecGetLocalSize(xin,&n);
686:   VecGetSize(xin,&N);
687:   VecGetOwnershipRange(xin,&rstart,NULL);

689:   VecGetArrayRead(xin,&array);
690:   VecRestoreArrayRead(xin,&array);
691:   return(0);
692: }
693: #endif

695: #if defined(PETSC_HAVE_HDF5)
696: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
697: {
698:   PetscViewer_HDF5  *hdf5 = (PetscViewer_HDF5*) viewer->data;
699:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
700:   hid_t             filespace; /* file dataspace identifier */
701:   hid_t             chunkspace; /* chunk dataset property identifier */
702:   hid_t             dset_id;   /* dataset identifier */
703:   hid_t             memspace;  /* memory dataspace identifier */
704:   hid_t             file_id;
705:   hid_t             group;
706:   hid_t             memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
707:   hid_t             filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
708:   PetscInt          bs = PetscAbs(xin->map->bs);
709:   hsize_t           dim;
710:   hsize_t           maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
711:   PetscInt          timestep;
712:   PetscInt          low;
713:   hsize_t           chunksize;
714:   const PetscScalar *x;
715:   const char        *vecname;
716:   PetscErrorCode    ierr;
717:   PetscBool         dim2;
718:   PetscBool         spoutput;

721:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
722:   PetscViewerHDF5GetTimestep(viewer, &timestep);
723:   PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
724:   PetscViewerHDF5GetSPOutput(viewer,&spoutput);

726:   /* Create the dataspace for the dataset.
727:    *
728:    * dims - holds the current dimensions of the dataset
729:    *
730:    * maxDims - holds the maximum dimensions of the dataset (unlimited
731:    * for the number of time steps with the current dimensions for the
732:    * other dimensions; so only additional time steps can be added).
733:    *
734:    * chunkDims - holds the size of a single time step (required to
735:    * permit extending dataset).
736:    */
737:   dim = 0;
738:   chunksize = 1;
739:   if (timestep >= 0) {
740:     dims[dim]      = timestep+1;
741:     maxDims[dim]   = H5S_UNLIMITED;
742:     chunkDims[dim] = 1;
743:     ++dim;
744:   }
745:   PetscHDF5IntCast(xin->map->N/bs,dims + dim);

747:   maxDims[dim]   = dims[dim];
748:   chunkDims[dim] = dims[dim];
749:   chunksize      *= chunkDims[dim];
750:   ++dim;
751:   if (bs > 1 || dim2) {
752:     dims[dim]      = bs;
753:     maxDims[dim]   = dims[dim];
754:     chunkDims[dim] = dims[dim];
755:     chunksize      *= chunkDims[dim];
756:     ++dim;
757:   }
758: #if defined(PETSC_USE_COMPLEX)
759:   dims[dim]      = 2;
760:   maxDims[dim]   = dims[dim];
761:   chunkDims[dim] = dims[dim];
762:   chunksize      *= chunkDims[dim];
763:   /* hdf5 chunks must be less than the max of 32 bit int */
764:   if (chunksize > PETSC_HDF5_INT_MAX/64 ) {
765:     if (bs > 1 || dim2) {
766:       if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
767:         chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
768:       } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
769:         chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
770:       }
771:     } else {
772:       chunkDims[dim-1] = PETSC_HDF5_INT_MAX/128;
773:     }
774:   }
775:   ++dim;
776: #else 
777:   /* hdf5 chunks must be less than the max of 32 bit int */
778:   if (chunksize > PETSC_HDF5_INT_MAX/64) {
779:     if (bs > 1 || dim2) {
780:       if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
781:         chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
782:       } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
783:         chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
784:       }
785:     } else {
786:       chunkDims[dim-1] = PETSC_HDF5_INT_MAX/64;
787:     }
788:   }
789: #endif


792:   PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));

794: #if defined(PETSC_USE_REAL_SINGLE)
795:   memscalartype = H5T_NATIVE_FLOAT;
796:   filescalartype = H5T_NATIVE_FLOAT;
797: #elif defined(PETSC_USE_REAL___FLOAT128)
798: #error "HDF5 output with 128 bit floats not supported."
799: #elif defined(PETSC_USE_REAL___FP16)
800: #error "HDF5 output with 16 bit floats not supported."
801: #else
802:   memscalartype = H5T_NATIVE_DOUBLE;
803:   if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
804:   else filescalartype = H5T_NATIVE_DOUBLE;
805: #endif

807:   /* Create the dataset with default properties and close filespace */
808:   PetscObjectGetName((PetscObject) xin, &vecname);
809:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
810:     /* Create chunk */
811:     PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
812:     PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));

814:     PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
815:     PetscStackCallHDF5(H5Pclose,(chunkspace));
816:   } else {
817:     PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
818:     PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
819:   }
820:   PetscStackCallHDF5(H5Sclose,(filespace));

822:   /* Each process defines a dataset and writes it to the hyperslab in the file */
823:   dim = 0;
824:   if (timestep >= 0) {
825:     count[dim] = 1;
826:     ++dim;
827:   }
828:   PetscHDF5IntCast(xin->map->n/bs,count + dim);
829:   ++dim;
830:   if (bs > 1 || dim2) {
831:     count[dim] = bs;
832:     ++dim;
833:   }
834: #if defined(PETSC_USE_COMPLEX)
835:   count[dim] = 2;
836:   ++dim;
837: #endif
838:   if (xin->map->n > 0) {
839:     PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
840:   } else {
841:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
842:     PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
843:   }

845:   /* Select hyperslab in the file */
846:   VecGetOwnershipRange(xin, &low, NULL);
847:   dim  = 0;
848:   if (timestep >= 0) {
849:     offset[dim] = timestep;
850:     ++dim;
851:   }
852:   PetscHDF5IntCast(low/bs,offset + dim);
853:   ++dim;
854:   if (bs > 1 || dim2) {
855:     offset[dim] = 0;
856:     ++dim;
857:   }
858: #if defined(PETSC_USE_COMPLEX)
859:   offset[dim] = 0;
860:   ++dim;
861: #endif
862:   if (xin->map->n > 0) {
863:     PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
864:     PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
865:   } else {
866:     /* Create null filespace to match null memspace. */
867:     PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
868:   }

870:   VecGetArrayRead(xin, &x);
871:   PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
872:   PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
873:   VecRestoreArrayRead(xin, &x);

875:   /* Close/release resources */
876:   PetscStackCallHDF5(H5Gclose,(group));
877:   PetscStackCallHDF5(H5Sclose,(filespace));
878:   PetscStackCallHDF5(H5Sclose,(memspace));
879:   PetscStackCallHDF5(H5Dclose,(dset_id));

881: #if defined(PETSC_USE_COMPLEX)
882:   {
883:     PetscBool   tru = PETSC_TRUE;
884:     PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru);
885:   }
886: #endif
887:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
888:   return(0);
889: }
890: #endif

892: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
893: {
895:   PetscBool      iascii,isbinary,isdraw;
896: #if defined(PETSC_HAVE_MATHEMATICA)
897:   PetscBool      ismathematica;
898: #endif
899: #if defined(PETSC_HAVE_HDF5)
900:   PetscBool      ishdf5;
901: #endif
902: #if defined(PETSC_HAVE_MATLAB_ENGINE)
903:   PetscBool      ismatlab;
904: #endif
905: #if defined(PETSC_HAVE_ADIOS)
906:   PetscBool      isadios;
907: #endif
908: #if defined(PETSC_HAVE_ADIOS2)
909:   PetscBool      isadios2;
910: #endif
911:   PetscBool      isglvis;

914:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
915:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
916:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
917: #if defined(PETSC_HAVE_MATHEMATICA)
918:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
919: #endif
920: #if defined(PETSC_HAVE_HDF5)
921:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
922: #endif
923: #if defined(PETSC_HAVE_MATLAB_ENGINE)
924:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
925: #endif
926:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
927: #if defined(PETSC_HAVE_ADIOS)
928:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS,&isadios);
929: #endif
930: #if defined(PETSC_HAVE_ADIOS2)
931:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS2,&isadios2);
932: #endif
933:   if (iascii) {
934:     VecView_MPI_ASCII(xin,viewer);
935:   } else if (isbinary) {
936:     VecView_MPI_Binary(xin,viewer);
937:   } else if (isdraw) {
938:     PetscViewerFormat format;
939:     PetscViewerGetFormat(viewer,&format);
940:     if (format == PETSC_VIEWER_DRAW_LG) {
941:       VecView_MPI_Draw_LG(xin,viewer);
942:     } else {
943:       VecView_MPI_Draw(xin,viewer);
944:     }
945: #if defined(PETSC_HAVE_MATHEMATICA)
946:   } else if (ismathematica) {
947:     PetscViewerMathematicaPutVector(viewer,xin);
948: #endif
949: #if defined(PETSC_HAVE_HDF5)
950:   } else if (ishdf5) {
951:     VecView_MPI_HDF5(xin,viewer);
952: #endif
953: #if defined(PETSC_HAVE_ADIOS)
954:   } else if (isadios) {
955:     VecView_MPI_ADIOS(xin,viewer);
956: #endif
957: #if defined(PETSC_HAVE_ADIOS2)
958:   } else if (isadios2) {
959:     VecView_MPI_ADIOS2(xin,viewer);
960: #endif
961: #if defined(PETSC_HAVE_MATLAB_ENGINE)
962:   } else if (ismatlab) {
963:     VecView_MPI_Matlab(xin,viewer);
964: #endif
965:   } else if (isglvis) {
966:     VecView_GLVis(xin,viewer);
967:   }
968:   return(0);
969: }

971: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
972: {
974:   *N = xin->map->N;
975:   return(0);
976: }

978: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
979: {
980:   const PetscScalar *xx;
981:   PetscInt          i,tmp,start = xin->map->range[xin->stash.rank];
982:   PetscErrorCode    ierr;

985:   VecGetArrayRead(xin,&xx);
986:   for (i=0; i<ni; i++) {
987:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
988:     tmp = ix[i] - start;
989: #if defined(PETSC_USE_DEBUG)
990:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
991: #endif
992:     y[i] = xx[tmp];
993:   }
994:   VecRestoreArrayRead(xin,&xx);
995:   return(0);
996: }

998: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
999: {
1001:   PetscMPIInt    rank    = xin->stash.rank;
1002:   PetscInt       *owners = xin->map->range,start = owners[rank];
1003:   PetscInt       end     = owners[rank+1],i,row;
1004:   PetscScalar    *xx;

1007: #if defined(PETSC_USE_DEBUG)
1008:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
1009:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
1010: #endif
1011:   VecGetArray(xin,&xx);
1012:   xin->stash.insertmode = addv;

1014:   if (addv == INSERT_VALUES) {
1015:     for (i=0; i<ni; i++) {
1016:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
1017: #if defined(PETSC_USE_DEBUG)
1018:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
1019: #endif
1020:       if ((row = ix[i]) >= start && row < end) {
1021:         xx[row-start] = y[i];
1022:       } else if (!xin->stash.donotstash) {
1023: #if defined(PETSC_USE_DEBUG)
1024:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
1025: #endif
1026:         VecStashValue_Private(&xin->stash,row,y[i]);
1027:       }
1028:     }
1029:   } else {
1030:     for (i=0; i<ni; i++) {
1031:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
1032: #if defined(PETSC_USE_DEBUG)
1033:       if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
1034: #endif
1035:       if ((row = ix[i]) >= start && row < end) {
1036:         xx[row-start] += y[i];
1037:       } else if (!xin->stash.donotstash) {
1038: #if defined(PETSC_USE_DEBUG)
1039:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
1040: #endif
1041:         VecStashValue_Private(&xin->stash,row,y[i]);
1042:       }
1043:     }
1044:   }
1045:   VecRestoreArray(xin,&xx);
1046:   return(0);
1047: }

1049: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
1050: {
1051:   PetscMPIInt    rank    = xin->stash.rank;
1052:   PetscInt       *owners = xin->map->range,start = owners[rank];
1054:   PetscInt       end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
1055:   PetscScalar    *xx,*y = (PetscScalar*)yin;

1058:   VecGetArray(xin,&xx);
1059: #if defined(PETSC_USE_DEBUG)
1060:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
1061:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
1062: #endif
1063:   xin->stash.insertmode = addv;

1065:   if (addv == INSERT_VALUES) {
1066:     for (i=0; i<ni; i++) {
1067:       if ((row = bs*ix[i]) >= start && row < end) {
1068:         for (j=0; j<bs; j++) xx[row-start+j] = y[j];
1069:       } else if (!xin->stash.donotstash) {
1070:         if (ix[i] < 0) { y += bs; continue; }
1071: #if defined(PETSC_USE_DEBUG)
1072:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1073: #endif
1074:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1075:       }
1076:       y += bs;
1077:     }
1078:   } else {
1079:     for (i=0; i<ni; i++) {
1080:       if ((row = bs*ix[i]) >= start && row < end) {
1081:         for (j=0; j<bs; j++) xx[row-start+j] += y[j];
1082:       } else if (!xin->stash.donotstash) {
1083:         if (ix[i] < 0) { y += bs; continue; }
1084: #if defined(PETSC_USE_DEBUG)
1085:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1086: #endif
1087:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1088:       }
1089:       y += bs;
1090:     }
1091:   }
1092:   VecRestoreArray(xin,&xx);
1093:   return(0);
1094: }

1096: /*
1097:    Since nsends or nreceives may be zero we add 1 in certain mallocs
1098: to make sure we never malloc an empty one.
1099: */
1100: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1101: {
1103:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1104:   PetscMPIInt    size;
1105:   InsertMode     addv;
1106:   MPI_Comm       comm;

1109:   PetscObjectGetComm((PetscObject)xin,&comm);
1110:   if (xin->stash.donotstash) return(0);

1112:   MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1113:   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1114:   xin->stash.insertmode = addv; /* in case this processor had no cache */
1115:   xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */

1117:   VecGetBlockSize(xin,&bs);
1118:   MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1119:   if (!xin->bstash.bowners && xin->map->bs != -1) {
1120:     PetscMalloc1(size+1,&bowners);
1121:     for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1122:     xin->bstash.bowners = bowners;
1123:   } else bowners = xin->bstash.bowners;

1125:   VecStashScatterBegin_Private(&xin->stash,owners);
1126:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1127:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1128:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1129:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1130:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1131:   return(0);
1132: }

1134: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1135: {
1137:   PetscInt       base,i,j,*row,flg,bs;
1138:   PetscMPIInt    n;
1139:   PetscScalar    *val,*vv,*array,*xarray;

1142:   if (!vec->stash.donotstash) {
1143:     VecGetArray(vec,&xarray);
1144:     VecGetBlockSize(vec,&bs);
1145:     base = vec->map->range[vec->stash.rank];

1147:     /* Process the stash */
1148:     while (1) {
1149:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1150:       if (!flg) break;
1151:       if (vec->stash.insertmode == ADD_VALUES) {
1152:         for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1153:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1154:         for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1155:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1156:     }
1157:     VecStashScatterEnd_Private(&vec->stash);

1159:     /* now process the block-stash */
1160:     while (1) {
1161:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1162:       if (!flg) break;
1163:       for (i=0; i<n; i++) {
1164:         array = xarray+row[i]*bs-base;
1165:         vv    = val+i*bs;
1166:         if (vec->stash.insertmode == ADD_VALUES) {
1167:           for (j=0; j<bs; j++) array[j] += vv[j];
1168:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1169:           for (j=0; j<bs; j++) array[j] = vv[j];
1170:         } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1171:       }
1172:     }
1173:     VecStashScatterEnd_Private(&vec->bstash);
1174:     VecRestoreArray(vec,&xarray);
1175:   }
1176:   vec->stash.insertmode = NOT_SET_VALUES;
1177:   return(0);
1178: }