Actual source code: pdvec.c

petsc-3.3-p5 2012-12-01
  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/

  9: PetscErrorCode VecDestroy_MPI(Vec v)
 10: {
 11:   Vec_MPI        *x = (Vec_MPI*)v->data;

 15: #if defined(PETSC_USE_LOG)
 16:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 17: #endif
 18:   if (!x) return(0);
 19:   PetscFree(x->array_allocated);

 21:   /* Destroy local representation of vector if it exists */
 22:   if (x->localrep) {
 23:     VecDestroy(&x->localrep);
 24:     VecScatterDestroy(&x->localupdate);
 25:   }
 26:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 27:   VecStashDestroy_Private(&v->bstash);
 28:   VecStashDestroy_Private(&v->stash);
 29:   PetscFree(v->data);
 30:   return(0);
 31: }

 35: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 36: {
 37:   PetscErrorCode    ierr;
 38:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 39:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 40:   MPI_Status        status;
 41:   PetscScalar       *values;
 42:   const PetscScalar *xarray;
 43:   const char        *name;
 44:   PetscViewerFormat format;

 47:   VecGetArrayRead(xin,&xarray);
 48:   /* determine maximum message to arrive */
 49:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
 50:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,((PetscObject)xin)->comm);
 51:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

 53:   if (!rank) {
 54:     PetscMalloc(len*sizeof(PetscScalar),&values);
 55:     PetscViewerGetFormat(viewer,&format);
 56:     /*
 57:         MATLAB format and ASCII format are very similar except
 58:         MATLAB uses %18.16e format while ASCII uses %g
 59:     */
 60:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 61:       PetscObjectGetName((PetscObject)xin,&name);
 62:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 63:       for (i=0; i<xin->map->n; i++) {
 64: #if defined(PETSC_USE_COMPLEX)
 65:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 66:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
 67:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 68:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
 69:         } else {
 70:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
 71:         }
 72: #else
 73:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
 74: #endif
 75:       }
 76:       /* receive and print messages */
 77:       for (j=1; j<size; j++) {
 78:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
 79:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 80:         for (i=0; i<n; i++) {
 81: #if defined(PETSC_USE_COMPLEX)
 82:           if (PetscImaginaryPart(values[i]) > 0.0) {
 83:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
 84:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 85:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
 86:           } else {
 87:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
 88:           }
 89: #else
 90:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
 91: #endif
 92:         }
 93:       }
 94:       PetscViewerASCIIPrintf(viewer,"];\n");

 96:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
 97:       for (i=0; i<xin->map->n; i++) {
 98: #if defined(PETSC_USE_COMPLEX)
 99:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
100: #else
101:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
102: #endif
103:       }
104:       /* receive and print messages */
105:       for (j=1; j<size; j++) {
106:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
107:         MPI_Get_count(&status,MPIU_SCALAR,&n);
108:         for (i=0; i<n; i++) {
109: #if defined(PETSC_USE_COMPLEX)
110:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
111: #else
112:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
113: #endif
114:         }
115:       }
116:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
117:       /*
118:         state 0: No header has been output
119:         state 1: Only POINT_DATA has been output
120:         state 2: Only CELL_DATA has been output
121:         state 3: Output both, POINT_DATA last
122:         state 4: Output both, CELL_DATA last
123:       */
124:       static PetscInt stateId = -1;
125:       int outputState = 0;
126:       PetscBool  hasState;
127:       int doOutput = 0;
128:       PetscInt bs, b;

130:       if (stateId < 0) {
131:         PetscObjectComposedDataRegister(&stateId);
132:       }
133:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
134:       if (!hasState) {
135:         outputState = 0;
136:       }
137:       PetscObjectGetName((PetscObject) xin, &name);
138:       VecGetLocalSize(xin, &nLen);
139:       n    = PetscMPIIntCast(nLen);
140:       VecGetBlockSize(xin, &bs);
141:       if (format == PETSC_VIEWER_ASCII_VTK) {
142:         if (outputState == 0) {
143:           outputState = 1;
144:           doOutput = 1;
145:         } else if (outputState == 1) {
146:           doOutput = 0;
147:         } else if (outputState == 2) {
148:           outputState = 3;
149:           doOutput = 1;
150:         } else if (outputState == 3) {
151:           doOutput = 0;
152:         } else if (outputState == 4) {
153:           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
154:         }
155:         if (doOutput) {
156:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
157:         }
158:       } else {
159:         if (outputState == 0) {
160:           outputState = 2;
161:           doOutput = 1;
162:         } else if (outputState == 1) {
163:           outputState = 4;
164:           doOutput = 1;
165:         } else if (outputState == 2) {
166:           doOutput = 0;
167:         } else if (outputState == 3) {
168:           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
169:         } else if (outputState == 4) {
170:           doOutput = 0;
171:         }
172:         if (doOutput) {
173:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
174:         }
175:       }
176:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
177:       if (name) {
178:         if (bs == 3) {
179:           PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
180:         } else {
181:           PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
182:         }
183:       } else {
184:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
185:       }
186:       if (bs != 3) {
187:         PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
188:       }
189:       for (i=0; i<n/bs; i++) {
190:         for (b=0; b<bs; b++) {
191:           if (b > 0) {
192:             PetscViewerASCIIPrintf(viewer," ");
193:           }
194:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
195:         }
196:         PetscViewerASCIIPrintf(viewer,"\n");
197:       }
198:       for (j=1; j<size; j++) {
199:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
200:         MPI_Get_count(&status,MPIU_SCALAR,&n);
201:         for (i=0; i<n/bs; i++) {
202:           for (b=0; b<bs; b++) {
203:             if (b > 0) {
204:               PetscViewerASCIIPrintf(viewer," ");
205:             }
206:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
207:           }
208:           PetscViewerASCIIPrintf(viewer,"\n");
209:         }
210:       }
211:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
212:       PetscInt bs, b;

214:       VecGetLocalSize(xin, &nLen);
215:       n    = PetscMPIIntCast(nLen);
216:       VecGetBlockSize(xin, &bs);
217:       if ((bs < 1) || (bs > 3)) {
218:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
219:       }
220:       for (i=0; i<n/bs; i++) {
221:         for (b=0; b<bs; b++) {
222:           if (b > 0) {
223:             PetscViewerASCIIPrintf(viewer," ");
224:           }
225:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
226:         }
227:         for (b=bs; b<3; b++) {
228:           PetscViewerASCIIPrintf(viewer," 0.0");
229:         }
230:         PetscViewerASCIIPrintf(viewer,"\n");
231:       }
232:       for (j=1; j<size; j++) {
233:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
234:         MPI_Get_count(&status,MPIU_SCALAR,&n);
235:         for (i=0; i<n/bs; i++) {
236:           for (b=0; b<bs; b++) {
237:             if (b > 0) {
238:               PetscViewerASCIIPrintf(viewer," ");
239:             }
240:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
241:           }
242:           for (b=bs; b<3; b++) {
243:             PetscViewerASCIIPrintf(viewer," 0.0");
244:           }
245:           PetscViewerASCIIPrintf(viewer,"\n");
246:         }
247:       }
248:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
249:       PetscInt bs, b, vertexCount = 1;

251:       VecGetLocalSize(xin, &nLen);
252:       n    = PetscMPIIntCast(nLen);
253:       VecGetBlockSize(xin, &bs);
254:       if ((bs < 1) || (bs > 3)) {
255:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
256:       }
257:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
258:       for (i=0; i<n/bs; i++) {
259:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
260:         for (b=0; b<bs; b++) {
261:           if (b > 0) {
262:             PetscViewerASCIIPrintf(viewer," ");
263:           }
264: #if !defined(PETSC_USE_COMPLEX)
265:           PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
266: #endif
267:         }
268:         PetscViewerASCIIPrintf(viewer,"\n");
269:       }
270:       for (j=1; j<size; j++) {
271:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
272:         MPI_Get_count(&status,MPIU_SCALAR,&n);
273:         for (i=0; i<n/bs; i++) {
274:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
275:           for (b=0; b<bs; b++) {
276:             if (b > 0) {
277:               PetscViewerASCIIPrintf(viewer," ");
278:             }
279: #if !defined(PETSC_USE_COMPLEX)
280:             PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
281: #endif
282:           }
283:           PetscViewerASCIIPrintf(viewer,"\n");
284:         }
285:       }
286:     } else {
287:       PetscObjectPrintClassNamePrefixType((PetscObject)xin,viewer,"Vector Object");
288:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
289:       cnt = 0;
290:       for (i=0; i<xin->map->n; i++) {
291:         if (format == PETSC_VIEWER_ASCII_INDEX) {
292:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
293:         }
294: #if defined(PETSC_USE_COMPLEX)
295:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
296:           PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
297:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
298:           PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
299:         } else {
300:           PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
301:         }
302: #else
303:         PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
304: #endif
305:       }
306:       /* receive and print messages */
307:       for (j=1; j<size; j++) {
308:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
309:         MPI_Get_count(&status,MPIU_SCALAR,&n);
310:         if (format != PETSC_VIEWER_ASCII_COMMON) {
311:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
312:         }
313:         for (i=0; i<n; i++) {
314:           if (format == PETSC_VIEWER_ASCII_INDEX) {
315:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
316:           }
317: #if defined(PETSC_USE_COMPLEX)
318:           if (PetscImaginaryPart(values[i]) > 0.0) {
319:             PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
320:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
321:             PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
322:           } else {
323:             PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
324:           }
325: #else
326:           PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
327: #endif
328:         }
329:       }
330:     }
331:     PetscFree(values);
332:   } else {
333:     PetscViewerGetFormat(viewer,&format);
334:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
335:       /* this may be a collective operation so make sure everyone calls it */
336:       PetscObjectGetName((PetscObject)xin,&name);
337:     }
338:     /* send values */
339:     MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
340:   }
341:   PetscViewerFlush(viewer);
342:   VecRestoreArrayRead(xin,&xarray);
343:   return(0);
344: }

348: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
349: {
350:   PetscErrorCode    ierr;
351:   PetscMPIInt       rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
352:   PetscInt          len,n = xin->map->n,j,tr[2];
353:   int               fdes;
354:   MPI_Status        status;
355:   PetscScalar       *values;
356:   const PetscScalar *xarray;
357:   FILE              *file;
358: #if defined(PETSC_HAVE_MPIIO)
359:   PetscBool         isMPIIO;
360: #endif
361:   PetscBool         skipHeader;
362:   PetscInt          message_count,flowcontrolcount;

365:   VecGetArrayRead(xin,&xarray);
366:   PetscViewerBinaryGetDescriptor(viewer,&fdes);
367:   PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);

369:   /* determine maximum message to arrive */
370:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
371:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

373:   if (!skipHeader) {
374:     tr[0] = VEC_FILE_CLASSID;
375:     tr[1] = xin->map->N;
376:     PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
377:   }

379: #if defined(PETSC_HAVE_MPIIO)
380:   PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
381:   if (!isMPIIO) {
382: #endif
383:     PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
384:     if (!rank) {
385:       PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);

387:       len = 0;
388:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
389:       PetscMalloc(len*sizeof(PetscScalar),&values);
390:       mesgsize = PetscMPIIntCast(len);
391:       /* receive and save messages */
392:       for (j=1; j<size; j++) {
393:         PetscViewerFlowControlStepMaster(viewer,j,message_count,flowcontrolcount);
394:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
395:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
396:         n = (PetscInt)mesglen;
397:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
398:       }
399:       PetscViewerFlowControlEndMaster(viewer,message_count);
400:       PetscFree(values);
401:     } else {
402:       PetscViewerFlowControlStepWorker(viewer,rank,message_count);
403:       mesgsize = PetscMPIIntCast(xin->map->n);
404:       MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
405:       PetscViewerFlowControlEndWorker(viewer,message_count);
406:     }
407: #if defined(PETSC_HAVE_MPIIO)
408:   } else {
409:     MPI_Offset   off;
410:     MPI_File     mfdes;
411:     PetscMPIInt  gsizes[1],lsizes[1],lstarts[1];
412:     MPI_Datatype view;

414:     gsizes[0]  = PetscMPIIntCast(xin->map->N);
415:     lsizes[0]  = PetscMPIIntCast(n);
416:     lstarts[0] = PetscMPIIntCast(xin->map->rstart);
417:     MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
418:     MPI_Type_commit(&view);

420:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
421:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
422:     MPIU_File_write_all(mfdes,(void*)xarray,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
423:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
424:     MPI_Type_free(&view);
425:   }
426: #endif

428:   VecRestoreArrayRead(xin,&xarray);
429:   if (!rank) {
430:     PetscViewerBinaryGetInfoPointer(viewer,&file);
431:     if (file) {
432:       if (((PetscObject)xin)->prefix) {
433:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,xin->map->bs);
434:       } else {
435:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map->bs);
436:       }
437:     }
438:   }
439:   return(0);
440: }

444: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
445: {
446:   PetscDraw         draw;
447:   PetscBool         isnull;
448:   PetscErrorCode    ierr;

450: #if defined(PETSC_USE_64BIT_INDICES)
452:   PetscViewerDrawGetDraw(viewer,0,&draw);
453:   PetscDrawIsNull(draw,&isnull);
454:   if (isnull) return(0);
455:   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported with 64 bit integers");
456: #else
457:   PetscMPIInt       size,rank;
458:   int               i,N = xin->map->N,*lens;
459:   PetscReal         *xx,*yy;
460:   PetscDrawLG       lg;
461:   const PetscScalar *xarray;

464:   PetscViewerDrawGetDraw(viewer,0,&draw);
465:   PetscDrawIsNull(draw,&isnull);
466:   if (isnull) return(0);

468:   VecGetArrayRead(xin,&xarray);
469:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
470:   PetscDrawCheckResizedWindow(draw);
471:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
472:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
473:   if (!rank) {
474:     PetscDrawLGReset(lg);
475:     PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
476:     for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
477:     yy   = xx + N;
478:     PetscMalloc(size*sizeof(PetscInt),&lens);
479:     for (i=0; i<size; i++) {
480:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
481:     }
482: #if !defined(PETSC_USE_COMPLEX)
483:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
484: #else
485:     {
486:       PetscReal *xr;
487:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
488:       for (i=0; i<xin->map->n; i++) {
489:         xr[i] = PetscRealPart(xarray[i]);
490:       }
491:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
492:       PetscFree(xr);
493:     }
494: #endif
495:     PetscFree(lens);
496:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
497:     PetscFree(xx);
498:   } else {
499: #if !defined(PETSC_USE_COMPLEX)
500:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
501: #else
502:     {
503:       PetscReal *xr;
504:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
505:       for (i=0; i<xin->map->n; i++) {
506:         xr[i] = PetscRealPart(xarray[i]);
507:       }
508:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
509:       PetscFree(xr);
510:     }
511: #endif
512:   }
513:   PetscDrawLGDraw(lg);
514:   PetscDrawSynchronizedFlush(draw);
515:   VecRestoreArrayRead(xin,&xarray);
516: #endif
517:   return(0);
518: }

522: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
523: {
524:   PetscErrorCode    ierr;
525:   PetscMPIInt       rank,size,tag = ((PetscObject)viewer)->tag;
526:   PetscInt          i,start,end;
527:   MPI_Status        status;
528:   PetscReal         coors[4],ymin,ymax,xmin,xmax,tmp;
529:   PetscDraw         draw;
530:   PetscBool         isnull;
531:   PetscDrawAxis     axis;
532:   const PetscScalar *xarray;

535:   PetscViewerDrawGetDraw(viewer,0,&draw);
536:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

538:   VecGetArrayRead(xin,&xarray);
539:   PetscDrawCheckResizedWindow(draw);
540:   xmin = 1.e20; xmax = -1.e20;
541:   for (i=0; i<xin->map->n; i++) {
542: #if defined(PETSC_USE_COMPLEX)
543:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
544:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
545: #else
546:     if (xarray[i] < xmin) xmin = xarray[i];
547:     if (xarray[i] > xmax) xmax = xarray[i];
548: #endif
549:   }
550:   if (xmin + 1.e-10 > xmax) {
551:     xmin -= 1.e-5;
552:     xmax += 1.e-5;
553:   }
554:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPIU_MIN,0,((PetscObject)xin)->comm);
555:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPIU_MAX,0,((PetscObject)xin)->comm);
556:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
557:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
558:   PetscDrawAxisCreate(draw,&axis);
559:   PetscLogObjectParent(draw,axis);
560:   if (!rank) {
561:     PetscDrawClear(draw);
562:     PetscDrawFlush(draw);
563:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->map->N,ymin,ymax);
564:     PetscDrawAxisDraw(axis);
565:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
566:   }
567:   PetscDrawAxisDestroy(&axis);
568:   MPI_Bcast(coors,4,MPIU_REAL,0,((PetscObject)xin)->comm);
569:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
570:   /* draw local part of vector */
571:   VecGetOwnershipRange(xin,&start,&end);
572:   if (rank < size-1) { /*send value to right */
573:     MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,((PetscObject)xin)->comm);
574:   }
575:   for (i=1; i<xin->map->n; i++) {
576: #if !defined(PETSC_USE_COMPLEX)
577:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
578:                    xarray[i],PETSC_DRAW_RED);
579: #else
580:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
581:                    PetscRealPart(xarray[i]),PETSC_DRAW_RED);
582: #endif
583:   }
584:   if (rank) { /* receive value from right */
585:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,((PetscObject)xin)->comm,&status);
586: #if !defined(PETSC_USE_COMPLEX)
587:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
588: #else
589:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
590: #endif
591:   }
592:   PetscDrawSynchronizedFlush(draw);
593:   PetscDrawPause(draw);
594:   VecRestoreArrayRead(xin,&xarray);
595:   return(0);
596: }

598: #if defined(PETSC_HAVE_MATLAB_ENGINE)
601: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
602: {
603:   PetscErrorCode    ierr;
604:   PetscMPIInt       rank,size,*lens;
605:   PetscInt          i,N = xin->map->N;
606:   const PetscScalar *xarray;
607:   PetscScalar       *xx;

610:   VecGetArrayRead(xin,&xarray);
611:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
612:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
613:   if (!rank) {
614:     PetscMalloc(N*sizeof(PetscScalar),&xx);
615:     PetscMalloc(size*sizeof(PetscMPIInt),&lens);
616:     for (i=0; i<size; i++) {
617:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
618:     }
619:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,((PetscObject)xin)->comm);
620:     PetscFree(lens);

622:     PetscObjectName((PetscObject)xin);
623:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

625:     PetscFree(xx);
626:   } else {
627:     MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,((PetscObject)xin)->comm);
628:   }
629:   VecRestoreArrayRead(xin,&xarray);
630:   return(0);
631: }
632: #endif

634: #if defined(PETSC_HAVE_HDF5)
637: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
638: {
639:   /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
640:   hid_t             filespace; /* file dataspace identifier */
641:   hid_t             chunkspace; /* chunk dataset property identifier */
642:   hid_t                    plist_id;  /* property list identifier */
643:   hid_t             dset_id;   /* dataset identifier */
644:   hid_t             memspace;  /* memory dataspace identifier */
645:   hid_t             file_id;
646:   hid_t             group;
647:   hid_t             scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
648:   herr_t            status;
649:   PetscInt          bs = xin->map->bs > 0 ? xin->map->bs : 1;
650:   hsize_t           i,dim;
651:   hsize_t           maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
652:   PetscInt          timestep;
653:   PetscInt          low;
654:   const PetscScalar *x;
655:   const char        *vecname;
656:   PetscErrorCode    ierr;

659:   PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
660:   PetscViewerHDF5GetTimestep(viewer, &timestep);

662:   /* Create the dataspace for the dataset.
663:    *
664:    * dims - holds the current dimensions of the dataset
665:    *
666:    * maxDims - holds the maximum dimensions of the dataset (unlimited
667:    * for the number of time steps with the current dimensions for the
668:    * other dimensions; so only additional time steps can be added).
669:    *
670:    * chunkDims - holds the size of a single time step (required to
671:    * permit extending dataset).
672:    */
673:   dim  = 0;
674:   if (timestep >= 0) {
675:     dims[dim]    = timestep+1;
676:     maxDims[dim] = H5S_UNLIMITED;
677:     chunkDims[dim] = 1;
678:     ++dim;
679:   }
680:   dims[dim]    = PetscHDF5IntCast(xin->map->N)/bs;
681:   maxDims[dim] = dims[dim];
682:   chunkDims[dim] = dims[dim];
683:   ++dim;
684:   if (bs >= 1) {
685:     dims[dim]    = bs;
686:     maxDims[dim] = dims[dim];
687:     chunkDims[dim] = dims[dim];
688:     ++dim;
689:   }
690: #if defined(PETSC_USE_COMPLEX)
691:   dims[dim]    = 2;
692:   maxDims[dim] = dims[dim];
693:   chunkDims[dim] = dims[dim];
694:   ++dim;
695: #endif
696:   for (i=0; i < dim; ++i)
697:   filespace = H5Screate_simple(dim, dims, maxDims);
698:   if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");

700: #if defined(PETSC_USE_REAL_SINGLE)
701:   scalartype = H5T_NATIVE_FLOAT;
702: #elif defined(PETSC_USE_REAL___FLOAT128)
703: #error "HDF5 output with 128 bit floats not supported."
704: #else
705:   scalartype = H5T_NATIVE_DOUBLE;
706: #endif

708:   /* Create the dataset with default properties and close filespace */
709:   PetscObjectGetName((PetscObject) xin, &vecname);
710:   if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
711:     /* Create chunk */
712:     chunkspace = H5Pcreate(H5P_DATASET_CREATE);
713:     if (chunkspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
714:     status = H5Pset_chunk(chunkspace, dim, chunkDims); CHKERRQ(status);

716: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
717:     dset_id = H5Dcreate2(group, vecname, scalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT);
718: #else
719:     dset_id = H5Dcreate(group, vecname, scalartype, filespace, H5P_DEFAULT);
720: #endif
721:     if (dset_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dcreate2()");
722:     status = H5Pclose(chunkspace);CHKERRQ(status);
723:   } else {
724:     dset_id = H5Dopen2(group, vecname, H5P_DEFAULT);
725:     status = H5Dset_extent(dset_id, dims);CHKERRQ(status);
726:   }
727:   status = H5Sclose(filespace);CHKERRQ(status);

729:   /* Each process defines a dataset and writes it to the hyperslab in the file */
730:   dim = 0;
731:   if (timestep >= 0) {
732:     count[dim] = 1;
733:     ++dim;
734:   }
735:   count[dim] = PetscHDF5IntCast(xin->map->n)/bs;
736:   ++dim;
737:   if (bs >= 1) {
738:     count[dim] = bs;
739:     ++dim;
740:   }
741: #if defined(PETSC_USE_COMPLEX)
742:   count[dim] = 2;
743:   ++dim;
744: #endif
745:   if (xin->map->n > 0) {
746:     memspace = H5Screate_simple(dim, count, NULL);
747:     if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");
748:   } else {
749:     /* Can't create dataspace with zero for any dimension, so create null dataspace. */
750:     memspace = H5Screate(H5S_NULL);
751:     if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate()");
752:   }

754:   /* Select hyperslab in the file */
755:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
756:   dim = 0;
757:   if (timestep >= 0) {
758:     offset[dim] = timestep;
759:     ++dim;
760:   }
761:   offset[dim] = PetscHDF5IntCast(low/bs);
762:   ++dim;
763:   if (bs >= 1) {
764:     offset[dim] = 0;
765:     ++dim;
766:   }
767: #if defined(PETSC_USE_COMPLEX)
768:   offset[dim] = 0;
769:   ++dim;
770: #endif
771:   if (xin->map->n > 0) {
772:     filespace = H5Dget_space(dset_id);
773:     if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Dget_space()");
774:     status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
775:   } else {
776:     /* Create null filespace to match null memspace. */
777:     filespace = H5Screate(H5S_NULL);
778:     if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate(H5S_NULL)");
779:   }

781:   /* Create property list for collective dataset write */
782:   plist_id = H5Pcreate(H5P_DATASET_XFER);
783:   if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Pcreate()");
784: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
785:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
786: #endif
787:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

789:   VecGetArrayRead(xin, &x);
790:   status = H5Dwrite(dset_id, scalartype, memspace, filespace, plist_id, x);CHKERRQ(status);
791:   status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
792:   VecRestoreArrayRead(xin, &x);

794:   /* Close/release resources */
795:   if (group != file_id) {
796:     status = H5Gclose(group);CHKERRQ(status);
797:   }
798:   status = H5Pclose(plist_id);CHKERRQ(status);
799:   status = H5Sclose(filespace);CHKERRQ(status);
800:   status = H5Sclose(memspace);CHKERRQ(status);
801:   status = H5Dclose(dset_id);CHKERRQ(status);
802:   PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
803:   return(0);
804: }
805: #endif

809: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
810: {
812:   PetscBool      iascii,isbinary,isdraw;
813: #if defined(PETSC_HAVE_MATHEMATICA)
814:   PetscBool      ismathematica;
815: #endif
816: #if defined(PETSC_HAVE_HDF5)
817:   PetscBool      ishdf5;
818: #endif
819: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
820:   PetscBool      ismatlab;
821: #endif

824:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
825:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
826:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
827: #if defined(PETSC_HAVE_MATHEMATICA)
828:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
829: #endif
830: #if defined(PETSC_HAVE_HDF5)
831:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
832: #endif
833: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
834:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
835: #endif
836:   if (iascii){
837:     VecView_MPI_ASCII(xin,viewer);
838:   } else if (isbinary) {
839:     VecView_MPI_Binary(xin,viewer);
840:   } else if (isdraw) {
841:     PetscViewerFormat format;

843:     PetscViewerGetFormat(viewer,&format);
844:     if (format == PETSC_VIEWER_DRAW_LG) {
845:       VecView_MPI_Draw_LG(xin,viewer);
846:     } else {
847:       VecView_MPI_Draw(xin,viewer);
848:     }
849: #if defined(PETSC_HAVE_MATHEMATICA)
850:   } else if (ismathematica) {
851:     PetscViewerMathematicaPutVector(viewer,xin);
852: #endif
853: #if defined(PETSC_HAVE_HDF5)
854:   } else if (ishdf5) {
855:     VecView_MPI_HDF5(xin,viewer);
856: #endif
857: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128)
858:   } else if (ismatlab) {
859:     VecView_MPI_Matlab(xin,viewer);
860: #endif
861:   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
862:   return(0);
863: }

867: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
868: {
870:   *N = xin->map->N;
871:   return(0);
872: }

876: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
877: {
878:   const PetscScalar *xx;
879:   PetscInt          i,tmp,start = xin->map->range[xin->stash.rank];
880:   PetscErrorCode    ierr;

883:   VecGetArrayRead(xin,&xx);
884:   for (i=0; i<ni; i++) {
885:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
886:     tmp = ix[i] - start;
887: #if defined(PETSC_USE_DEBUG)
888:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
889: #endif
890:     y[i] = xx[tmp];
891:   }
892:   VecRestoreArrayRead(xin,&xx);
893:   return(0);
894: }

898: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
899: {
901:   PetscMPIInt    rank = xin->stash.rank;
902:   PetscInt       *owners = xin->map->range,start = owners[rank];
903:   PetscInt       end = owners[rank+1],i,row;
904:   PetscScalar    *xx;

907: #if defined(PETSC_USE_DEBUG)
908:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
909:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
910: #endif
911:   VecGetArray(xin,&xx);
912:   xin->stash.insertmode = addv;

914:   if (addv == INSERT_VALUES) {
915:     for (i=0; i<ni; i++) {
916:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
917: #if defined(PETSC_USE_DEBUG)
918:       if (ix[i] < 0) {
919:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
920:       }
921: #endif
922:       if ((row = ix[i]) >= start && row < end) {
923:         xx[row-start] = y[i];
924:       } else if (!xin->stash.donotstash) {
925: #if defined(PETSC_USE_DEBUG)
926:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
927: #endif
928:         VecStashValue_Private(&xin->stash,row,y[i]);
929:       }
930:     }
931:   } else {
932:     for (i=0; i<ni; i++) {
933:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
934: #if defined(PETSC_USE_DEBUG)
935:       if (ix[i] < 0) {
936:         SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
937:       }
938: #endif
939:       if ((row = ix[i]) >= start && row < end) {
940:         xx[row-start] += y[i];
941:       } else if (!xin->stash.donotstash) {
942: #if defined(PETSC_USE_DEBUG)
943:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
944: #endif
945:         VecStashValue_Private(&xin->stash,row,y[i]);
946:       }
947:     }
948:   }
949:   VecRestoreArray(xin,&xx);
950:   return(0);
951: }

955: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
956: {
957:   PetscMPIInt    rank = xin->stash.rank;
958:   PetscInt       *owners = xin->map->range,start = owners[rank];
960:   PetscInt       end = owners[rank+1],i,row,bs = xin->map->bs,j;
961:   PetscScalar    *xx,*y = (PetscScalar*)yin;

964:   VecGetArray(xin,&xx);
965: #if defined(PETSC_USE_DEBUG)
966:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
967:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
968: #endif
969:   xin->stash.insertmode = addv;

971:   if (addv == INSERT_VALUES) {
972:     for (i=0; i<ni; i++) {
973:       if ((row = bs*ix[i]) >= start && row < end) {
974:         for (j=0; j<bs; j++) {
975:           xx[row-start+j] = y[j];
976:         }
977:       } else if (!xin->stash.donotstash) {
978:         if (ix[i] < 0) continue;
979: #if defined(PETSC_USE_DEBUG)
980:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
981: #endif
982:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
983:       }
984:       y += bs;
985:     }
986:   } else {
987:     for (i=0; i<ni; i++) {
988:       if ((row = bs*ix[i]) >= start && row < end) {
989:         for (j=0; j<bs; j++) {
990:           xx[row-start+j] += y[j];
991:         }
992:       } else if (!xin->stash.donotstash) {
993:         if (ix[i] < 0) continue;
994: #if defined(PETSC_USE_DEBUG)
995:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
996: #endif
997:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
998:       }
999:       y += bs;
1000:     }
1001:   }
1002:   VecRestoreArray(xin,&xx);
1003:   return(0);
1004: }

1006: /*
1007:    Since nsends or nreceives may be zero we add 1 in certain mallocs
1008: to make sure we never malloc an empty one.
1009: */
1012: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1013: {
1015:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1016:   PetscMPIInt    size;
1017:   InsertMode     addv;
1018:   MPI_Comm       comm = ((PetscObject)xin)->comm;

1021:   if (xin->stash.donotstash) {
1022:     return(0);
1023:   }

1025:   MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
1026:   if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1027:   xin->stash.insertmode = addv; /* in case this processor had no cache */

1029:   bs = xin->map->bs;
1030:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
1031:   if (!xin->bstash.bowners && xin->map->bs != -1) {
1032:     PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
1033:     for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
1034:     xin->bstash.bowners = bowners;
1035:   } else {
1036:     bowners = xin->bstash.bowners;
1037:   }
1038:   VecStashScatterBegin_Private(&xin->stash,owners);
1039:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1040:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1041:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1042:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1043:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);

1045:   return(0);
1046: }

1050: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1051: {
1053:   PetscInt       base,i,j,*row,flg,bs;
1054:   PetscMPIInt    n;
1055:   PetscScalar    *val,*vv,*array,*xarray;

1058:   if (!vec->stash.donotstash) {
1059:     VecGetArray(vec,&xarray);
1060:     base = vec->map->range[vec->stash.rank];
1061:     bs   = vec->map->bs;

1063:     /* Process the stash */
1064:     while (1) {
1065:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1066:       if (!flg) break;
1067:       if (vec->stash.insertmode == ADD_VALUES) {
1068:         for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
1069:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1070:         for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
1071:       } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1072:     }
1073:     VecStashScatterEnd_Private(&vec->stash);

1075:     /* now process the block-stash */
1076:     while (1) {
1077:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1078:       if (!flg) break;
1079:       for (i=0; i<n; i++) {
1080:         array = xarray+row[i]*bs-base;
1081:         vv    = val+i*bs;
1082:         if (vec->stash.insertmode == ADD_VALUES) {
1083:           for (j=0; j<bs; j++) { array[j] += vv[j];}
1084:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1085:           for (j=0; j<bs; j++) { array[j] = vv[j]; }
1086:         } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1087:       }
1088:     }
1089:     VecStashScatterEnd_Private(&vec->bstash);
1090:     VecRestoreArray(vec,&xarray);
1091:   }
1092:   vec->stash.insertmode = NOT_SET_VALUES;
1093:   return(0);
1094: }