Actual source code: pdvec.c
petsc-3.14.3 2021-01-09
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
6: #include <petsc/private/viewerimpl.h>
7: #include <petsc/private/viewerhdf5impl.h>
8: #include <petsc/private/glvisviewerimpl.h>
9: #include <petsc/private/glvisvecimpl.h>
11: PetscErrorCode VecDestroy_MPI(Vec v)
12: {
13: Vec_MPI *x = (Vec_MPI*)v->data;
17: #if defined(PETSC_USE_LOG)
18: PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
19: #endif
20: if (!x) return(0);
21: PetscFree(x->array_allocated);
23: /* Destroy local representation of vector if it exists */
24: if (x->localrep) {
25: VecDestroy(&x->localrep);
26: VecScatterDestroy(&x->localupdate);
27: }
28: VecAssemblyReset_MPI(v);
30: /* Destroy the stashes: note the order - so that the tags are freed properly */
31: VecStashDestroy_Private(&v->bstash);
32: VecStashDestroy_Private(&v->stash);
33: PetscFree(v->data);
34: return(0);
35: }
37: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
38: {
39: PetscErrorCode ierr;
40: PetscInt i,work = xin->map->n,cnt,len,nLen;
41: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
42: MPI_Status status;
43: PetscScalar *values;
44: const PetscScalar *xarray;
45: const char *name;
46: PetscViewerFormat format;
49: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
50: PetscViewerGetFormat(viewer,&format);
51: if (format == PETSC_VIEWER_LOAD_BALANCE) {
52: PetscInt nmax = 0,nmin = xin->map->n,navg;
53: for (i=0; i<(PetscInt)size; i++) {
54: nmax = PetscMax(nmax,xin->map->range[i+1] - xin->map->range[i]);
55: nmin = PetscMin(nmin,xin->map->range[i+1] - xin->map->range[i]);
56: }
57: navg = xin->map->N/size;
58: PetscViewerASCIIPrintf(viewer," Load Balance - Local vector size Min %D avg %D max %D\n",nmin,navg,nmax);
59: return(0);
60: }
62: VecGetArrayRead(xin,&xarray);
63: /* determine maximum message to arrive */
64: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
65: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
66: if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
67: if (!rank) {
68: PetscMalloc1(len,&values);
69: /*
70: MATLAB format and ASCII format are very similar except
71: MATLAB uses %18.16e format while ASCII uses %g
72: */
73: if (format == PETSC_VIEWER_ASCII_MATLAB) {
74: PetscObjectGetName((PetscObject)xin,&name);
75: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
76: for (i=0; i<xin->map->n; i++) {
77: #if defined(PETSC_USE_COMPLEX)
78: if (PetscImaginaryPart(xarray[i]) > 0.0) {
79: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
80: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
81: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
82: } else {
83: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
84: }
85: #else
86: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
87: #endif
88: }
89: /* receive and print messages */
90: for (j=1; j<size; j++) {
91: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
92: MPI_Get_count(&status,MPIU_SCALAR,&n);
93: for (i=0; i<n; i++) {
94: #if defined(PETSC_USE_COMPLEX)
95: if (PetscImaginaryPart(values[i]) > 0.0) {
96: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
97: } else if (PetscImaginaryPart(values[i]) < 0.0) {
98: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
99: } else {
100: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
101: }
102: #else
103: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
104: #endif
105: }
106: }
107: PetscViewerASCIIPrintf(viewer,"];\n");
109: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
110: for (i=0; i<xin->map->n; i++) {
111: #if defined(PETSC_USE_COMPLEX)
112: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
113: #else
114: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
115: #endif
116: }
117: /* receive and print messages */
118: for (j=1; j<size; j++) {
119: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
120: MPI_Get_count(&status,MPIU_SCALAR,&n);
121: for (i=0; i<n; i++) {
122: #if defined(PETSC_USE_COMPLEX)
123: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
124: #else
125: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
126: #endif
127: }
128: }
129: } else if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
130: /*
131: state 0: No header has been output
132: state 1: Only POINT_DATA has been output
133: state 2: Only CELL_DATA has been output
134: state 3: Output both, POINT_DATA last
135: state 4: Output both, CELL_DATA last
136: */
137: static PetscInt stateId = -1;
138: int outputState = 0;
139: int doOutput = 0;
140: PetscBool hasState;
141: PetscInt bs, b;
143: if (stateId < 0) {
144: PetscObjectComposedDataRegister(&stateId);
145: }
146: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
147: if (!hasState) outputState = 0;
149: PetscObjectGetName((PetscObject)xin,&name);
150: VecGetLocalSize(xin, &nLen);
151: PetscMPIIntCast(nLen,&n);
152: VecGetBlockSize(xin, &bs);
153: if (format == PETSC_VIEWER_ASCII_VTK_DEPRECATED) {
154: if (outputState == 0) {
155: outputState = 1;
156: doOutput = 1;
157: } else if (outputState == 1) doOutput = 0;
158: else if (outputState == 2) {
159: outputState = 3;
160: doOutput = 1;
161: } else if (outputState == 3) doOutput = 0;
162: else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
164: if (doOutput) {
165: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
166: }
167: } else {
168: if (outputState == 0) {
169: outputState = 2;
170: doOutput = 1;
171: } else if (outputState == 1) {
172: outputState = 4;
173: doOutput = 1;
174: } else if (outputState == 2) doOutput = 0;
175: else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
176: else if (outputState == 4) doOutput = 0;
178: if (doOutput) {
179: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
180: }
181: }
182: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
183: if (name) {
184: if (bs == 3) {
185: PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
186: } else {
187: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
188: }
189: } else {
190: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
191: }
192: if (bs != 3) {
193: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
194: }
195: for (i=0; i<n/bs; i++) {
196: for (b=0; b<bs; b++) {
197: if (b > 0) {
198: PetscViewerASCIIPrintf(viewer," ");
199: }
200: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
201: }
202: PetscViewerASCIIPrintf(viewer,"\n");
203: }
204: for (j=1; j<size; j++) {
205: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
206: MPI_Get_count(&status,MPIU_SCALAR,&n);
207: for (i=0; i<n/bs; i++) {
208: for (b=0; b<bs; b++) {
209: if (b > 0) {
210: PetscViewerASCIIPrintf(viewer," ");
211: }
212: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
213: }
214: PetscViewerASCIIPrintf(viewer,"\n");
215: }
216: }
217: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS_DEPRECATED) {
218: PetscInt bs, b;
220: VecGetLocalSize(xin, &nLen);
221: PetscMPIIntCast(nLen,&n);
222: VecGetBlockSize(xin, &bs);
223: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
225: for (i=0; i<n/bs; i++) {
226: for (b=0; b<bs; b++) {
227: if (b > 0) {
228: PetscViewerASCIIPrintf(viewer," ");
229: }
230: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
231: }
232: for (b=bs; b<3; b++) {
233: PetscViewerASCIIPrintf(viewer," 0.0");
234: }
235: PetscViewerASCIIPrintf(viewer,"\n");
236: }
237: for (j=1; j<size; j++) {
238: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
239: MPI_Get_count(&status,MPIU_SCALAR,&n);
240: for (i=0; i<n/bs; i++) {
241: for (b=0; b<bs; b++) {
242: if (b > 0) {
243: PetscViewerASCIIPrintf(viewer," ");
244: }
245: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
246: }
247: for (b=bs; b<3; b++) {
248: PetscViewerASCIIPrintf(viewer," 0.0");
249: }
250: PetscViewerASCIIPrintf(viewer,"\n");
251: }
252: }
253: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
254: PetscInt bs, b, vertexCount = 1;
256: VecGetLocalSize(xin, &nLen);
257: PetscMPIIntCast(nLen,&n);
258: VecGetBlockSize(xin, &bs);
259: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
261: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
262: for (i=0; i<n/bs; i++) {
263: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
264: for (b=0; b<bs; b++) {
265: if (b > 0) {
266: PetscViewerASCIIPrintf(viewer," ");
267: }
268: #if !defined(PETSC_USE_COMPLEX)
269: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
270: #endif
271: }
272: PetscViewerASCIIPrintf(viewer,"\n");
273: }
274: for (j=1; j<size; j++) {
275: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
276: MPI_Get_count(&status,MPIU_SCALAR,&n);
277: for (i=0; i<n/bs; i++) {
278: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
279: for (b=0; b<bs; b++) {
280: if (b > 0) {
281: PetscViewerASCIIPrintf(viewer," ");
282: }
283: #if !defined(PETSC_USE_COMPLEX)
284: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
285: #endif
286: }
287: PetscViewerASCIIPrintf(viewer,"\n");
288: }
289: }
290: } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
291: /* GLVis ASCII visualization/dump: this function mimicks mfem::GridFunction::Save() */
292: const PetscScalar *array;
293: PetscInt i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
294: PetscContainer glvis_container;
295: PetscViewerGLVisVecInfo glvis_vec_info;
296: PetscViewerGLVisInfo glvis_info;
297: PetscErrorCode ierr;
299: /* mfem::FiniteElementSpace::Save() */
300: VecGetBlockSize(xin,&vdim);
301: PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
302: PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
303: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_PLIB,"Missing GLVis container");
304: PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
305: PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
306: PetscViewerASCIIPrintf(viewer,"VDim: %d\n",vdim);
307: PetscViewerASCIIPrintf(viewer,"Ordering: %d\n",ordering);
308: PetscViewerASCIIPrintf(viewer,"\n");
309: /* mfem::Vector::Print() */
310: PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
311: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_PLIB,"Missing GLVis container");
312: PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
313: if (glvis_info->enabled) {
314: VecGetLocalSize(xin,&n);
315: VecGetArrayRead(xin,&array);
316: for (i=0;i<n;i++) {
317: PetscViewerASCIIPrintf(viewer,glvis_info->fmt,(double)PetscRealPart(array[i]));
318: PetscViewerASCIIPrintf(viewer,"\n");
319: }
320: VecRestoreArrayRead(xin,&array);
321: }
322: } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
323: /* No info */
324: } else {
325: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
326: cnt = 0;
327: for (i=0; i<xin->map->n; i++) {
328: if (format == PETSC_VIEWER_ASCII_INDEX) {
329: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
330: }
331: #if defined(PETSC_USE_COMPLEX)
332: if (PetscImaginaryPart(xarray[i]) > 0.0) {
333: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
334: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
335: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
336: } else {
337: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
338: }
339: #else
340: PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
341: #endif
342: }
343: /* receive and print messages */
344: for (j=1; j<size; j++) {
345: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
346: MPI_Get_count(&status,MPIU_SCALAR,&n);
347: if (format != PETSC_VIEWER_ASCII_COMMON) {
348: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
349: }
350: for (i=0; i<n; i++) {
351: if (format == PETSC_VIEWER_ASCII_INDEX) {
352: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
353: }
354: #if defined(PETSC_USE_COMPLEX)
355: if (PetscImaginaryPart(values[i]) > 0.0) {
356: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
357: } else if (PetscImaginaryPart(values[i]) < 0.0) {
358: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
359: } else {
360: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
361: }
362: #else
363: PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
364: #endif
365: }
366: }
367: }
368: PetscFree(values);
369: } else {
370: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
371: /* Rank 0 is not trying to receive anything, so don't send anything */
372: } else {
373: if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK_DEPRECATED || format == PETSC_VIEWER_ASCII_VTK_CELL_DEPRECATED) {
374: /* this may be a collective operation so make sure everyone calls it */
375: PetscObjectGetName((PetscObject)xin,&name);
376: }
377: MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
378: }
379: }
380: PetscViewerFlush(viewer);
381: VecRestoreArrayRead(xin,&xarray);
382: return(0);
383: }
385: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
386: {
387: return VecView_Binary(xin,viewer);
388: }
390: #include <petscdraw.h>
391: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
392: {
393: PetscDraw draw;
394: PetscBool isnull;
395: PetscDrawLG lg;
396: PetscMPIInt i,size,rank,n,N,*lens = NULL,*disp = NULL;
397: PetscReal *values, *xx = NULL,*yy = NULL;
398: const PetscScalar *xarray;
399: int colors[] = {PETSC_DRAW_RED};
400: PetscErrorCode ierr;
403: PetscViewerDrawGetDraw(viewer,0,&draw);
404: PetscDrawIsNull(draw,&isnull);
405: if (isnull) return(0);
406: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
407: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
408: PetscMPIIntCast(xin->map->n,&n);
409: PetscMPIIntCast(xin->map->N,&N);
411: VecGetArrayRead(xin,&xarray);
412: #if defined(PETSC_USE_COMPLEX)
413: PetscMalloc1(n+1,&values);
414: for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
415: #else
416: values = (PetscReal*)xarray;
417: #endif
418: if (!rank) {
419: PetscMalloc2(N,&xx,N,&yy);
420: for (i=0; i<N; i++) xx[i] = (PetscReal)i;
421: PetscMalloc2(size,&lens,size,&disp);
422: for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
423: for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
424: }
425: MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
426: PetscFree2(lens,disp);
427: #if defined(PETSC_USE_COMPLEX)
428: PetscFree(values);
429: #endif
430: VecRestoreArrayRead(xin,&xarray);
432: PetscViewerDrawGetDrawLG(viewer,0,&lg);
433: PetscDrawLGReset(lg);
434: PetscDrawLGSetDimension(lg,1);
435: PetscDrawLGSetColors(lg,colors);
436: if (!rank) {
437: PetscDrawLGAddPoints(lg,N,&xx,&yy);
438: PetscFree2(xx,yy);
439: }
440: PetscDrawLGDraw(lg);
441: PetscDrawLGSave(lg);
442: return(0);
443: }
445: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
446: {
447: PetscErrorCode ierr;
448: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
449: PetscInt i,start,end;
450: MPI_Status status;
451: PetscReal min,max,tmp = 0.0;
452: PetscDraw draw;
453: PetscBool isnull;
454: PetscDrawAxis axis;
455: const PetscScalar *xarray;
458: PetscViewerDrawGetDraw(viewer,0,&draw);
459: PetscDrawIsNull(draw,&isnull);
460: if (isnull) return(0);
461: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
462: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
464: VecMin(xin,NULL,&min);
465: VecMax(xin,NULL,&max);
466: if (min == max) {
467: min -= 1.e-5;
468: max += 1.e-5;
469: }
471: PetscDrawCheckResizedWindow(draw);
472: PetscDrawClear(draw);
474: PetscDrawAxisCreate(draw,&axis);
475: PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
476: PetscDrawAxisDraw(axis);
477: PetscDrawAxisDestroy(&axis);
479: /* draw local part of vector */
480: VecGetArrayRead(xin,&xarray);
481: VecGetOwnershipRange(xin,&start,&end);
482: if (rank < size-1) { /* send value to right */
483: MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
484: }
485: if (rank) { /* receive value from right */
486: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
487: }
488: PetscDrawCollectiveBegin(draw);
489: if (rank) {
490: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
491: }
492: for (i=1; i<xin->map->n; i++) {
493: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
494: }
495: PetscDrawCollectiveEnd(draw);
496: VecRestoreArrayRead(xin,&xarray);
498: PetscDrawFlush(draw);
499: PetscDrawPause(draw);
500: PetscDrawSave(draw);
501: return(0);
502: }
504: #if defined(PETSC_HAVE_MATLAB_ENGINE)
505: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
506: {
507: PetscErrorCode ierr;
508: PetscMPIInt rank,size,*lens;
509: PetscInt i,N = xin->map->N;
510: const PetscScalar *xarray;
511: PetscScalar *xx;
514: VecGetArrayRead(xin,&xarray);
515: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
516: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
517: if (!rank) {
518: PetscMalloc1(N,&xx);
519: PetscMalloc1(size,&lens);
520: for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];
522: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
523: PetscFree(lens);
525: PetscObjectName((PetscObject)xin);
526: PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);
528: PetscFree(xx);
529: } else {
530: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
531: }
532: VecRestoreArrayRead(xin,&xarray);
533: return(0);
534: }
535: #endif
537: #if defined(PETSC_HAVE_ADIOS)
538: #include <adios.h>
539: #include <adios_read.h>
540: #include <petsc/private/vieweradiosimpl.h>
541: #include <petsc/private/viewerimpl.h>
543: PetscErrorCode VecView_MPI_ADIOS(Vec xin, PetscViewer viewer)
544: {
545: PetscViewer_ADIOS *adios = (PetscViewer_ADIOS*)viewer->data;
546: PetscErrorCode ierr;
547: const char *vecname;
548: int64_t id;
549: PetscInt n,N,rstart;
550: const PetscScalar *array;
551: char nglobalname[16],nlocalname[16],coffset[16];
554: PetscObjectGetName((PetscObject) xin, &vecname);
556: VecGetLocalSize(xin,&n);
557: VecGetSize(xin,&N);
558: VecGetOwnershipRange(xin,&rstart,NULL);
560: sprintf(nlocalname,"%d",(int)n);
561: sprintf(nglobalname,"%d",(int)N);
562: sprintf(coffset,"%d",(int)rstart);
563: id = adios_define_var(Petsc_adios_group,vecname,"",adios_double,nlocalname,nglobalname,coffset);
564: VecGetArrayRead(xin,&array);
565: adios_write_byid(adios->adios_handle,id,array);
566: VecRestoreArrayRead(xin,&array);
568: return(0);
569: }
570: #endif
572: #if defined(PETSC_HAVE_ADIOS2)
573: #include <adios2_c.h>
574: #include <petsc/private/vieweradios2impl.h>
575: #include <petsc/private/viewerimpl.h>
577: PetscErrorCode VecView_MPI_ADIOS2(Vec xin, PetscViewer viewer)
578: {
579: PetscErrorCode ierr;
580: PetscViewer_ADIOS2 *adios2 = (PetscViewer_ADIOS2*)viewer->data;
581: PetscInt n,N,rstart;
582: const char *vecname;
583: const PetscScalar *array;
586: PetscObjectGetName((PetscObject) xin, &vecname);
587: VecGetLocalSize(xin,&n);
588: VecGetSize(xin,&N);
589: VecGetOwnershipRange(xin,&rstart,NULL);
591: VecGetArrayRead(xin,&array);
592: VecRestoreArrayRead(xin,&array);
593: return(0);
594: }
595: #endif
597: #if defined(PETSC_HAVE_HDF5)
598: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
599: {
600: PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5*) viewer->data;
601: /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
602: hid_t filespace; /* file dataspace identifier */
603: hid_t chunkspace; /* chunk dataset property identifier */
604: hid_t dset_id; /* dataset identifier */
605: hid_t memspace; /* memory dataspace identifier */
606: hid_t file_id;
607: hid_t group;
608: hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
609: hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
610: PetscInt bs = PetscAbs(xin->map->bs);
611: hsize_t dim;
612: hsize_t maxDims[4], dims[4], chunkDims[4], count[4], offset[4];
613: PetscInt timestep;
614: PetscInt low;
615: hsize_t chunksize;
616: const PetscScalar *x;
617: const char *vecname;
618: PetscErrorCode ierr;
619: PetscBool dim2;
620: PetscBool spoutput;
623: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
624: PetscViewerHDF5GetTimestep(viewer, ×tep);
625: PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
626: PetscViewerHDF5GetSPOutput(viewer,&spoutput);
628: /* Create the dataspace for the dataset.
629: *
630: * dims - holds the current dimensions of the dataset
631: *
632: * maxDims - holds the maximum dimensions of the dataset (unlimited
633: * for the number of time steps with the current dimensions for the
634: * other dimensions; so only additional time steps can be added).
635: *
636: * chunkDims - holds the size of a single time step (required to
637: * permit extending dataset).
638: */
639: dim = 0;
640: chunksize = 1;
641: if (timestep >= 0) {
642: dims[dim] = timestep+1;
643: maxDims[dim] = H5S_UNLIMITED;
644: chunkDims[dim] = 1;
645: ++dim;
646: }
647: PetscHDF5IntCast(xin->map->N/bs,dims + dim);
649: maxDims[dim] = dims[dim];
650: chunkDims[dim] = PetscMax(1, dims[dim]);
651: chunksize *= chunkDims[dim];
652: ++dim;
653: if (bs > 1 || dim2) {
654: dims[dim] = bs;
655: maxDims[dim] = dims[dim];
656: chunkDims[dim] = PetscMax(1, dims[dim]);
657: chunksize *= chunkDims[dim];
658: ++dim;
659: }
660: #if defined(PETSC_USE_COMPLEX)
661: dims[dim] = 2;
662: maxDims[dim] = dims[dim];
663: chunkDims[dim] = PetscMax(1, dims[dim]);
664: chunksize *= chunkDims[dim];
665: /* hdf5 chunks must be less than 4GB */
666: if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE/64) {
667: if (bs > 1 || dim2) {
668: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128))) {
669: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128));
670: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128))) {
671: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/128));
672: }
673: } else {
674: chunkDims[dim-1] = PETSC_HDF5_MAX_CHUNKSIZE/128;
675: }
676: }
677: ++dim;
678: #else
679: /* hdf5 chunks must be less than 4GB */
680: if (chunksize > PETSC_HDF5_MAX_CHUNKSIZE/64) {
681: if (bs > 1 || dim2) {
682: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64))) {
683: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64));
684: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64))) {
685: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_MAX_CHUNKSIZE/64));
686: }
687: } else {
688: chunkDims[dim-1] = PETSC_HDF5_MAX_CHUNKSIZE/64;
689: }
690: }
691: #endif
694: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
696: #if defined(PETSC_USE_REAL_SINGLE)
697: memscalartype = H5T_NATIVE_FLOAT;
698: filescalartype = H5T_NATIVE_FLOAT;
699: #elif defined(PETSC_USE_REAL___FLOAT128)
700: #error "HDF5 output with 128 bit floats not supported."
701: #elif defined(PETSC_USE_REAL___FP16)
702: #error "HDF5 output with 16 bit floats not supported."
703: #else
704: memscalartype = H5T_NATIVE_DOUBLE;
705: if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
706: else filescalartype = H5T_NATIVE_DOUBLE;
707: #endif
709: /* Create the dataset with default properties and close filespace */
710: PetscObjectGetName((PetscObject) xin, &vecname);
711: if (H5Lexists(group, vecname, H5P_DEFAULT) < 1) {
712: /* Create chunk */
713: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
714: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
716: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
717: PetscStackCallHDF5(H5Pclose,(chunkspace));
718: } else {
719: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
720: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
721: }
722: PetscStackCallHDF5(H5Sclose,(filespace));
724: /* Each process defines a dataset and writes it to the hyperslab in the file */
725: dim = 0;
726: if (timestep >= 0) {
727: count[dim] = 1;
728: ++dim;
729: }
730: PetscHDF5IntCast(xin->map->n/bs,count + dim);
731: ++dim;
732: if (bs > 1 || dim2) {
733: count[dim] = bs;
734: ++dim;
735: }
736: #if defined(PETSC_USE_COMPLEX)
737: count[dim] = 2;
738: ++dim;
739: #endif
740: if (xin->map->n > 0 || H5_VERSION_GE(1,10,0)) {
741: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
742: } else {
743: /* Can't create dataspace with zero for any dimension, so create null dataspace. */
744: PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
745: }
747: /* Select hyperslab in the file */
748: VecGetOwnershipRange(xin, &low, NULL);
749: dim = 0;
750: if (timestep >= 0) {
751: offset[dim] = timestep;
752: ++dim;
753: }
754: PetscHDF5IntCast(low/bs,offset + dim);
755: ++dim;
756: if (bs > 1 || dim2) {
757: offset[dim] = 0;
758: ++dim;
759: }
760: #if defined(PETSC_USE_COMPLEX)
761: offset[dim] = 0;
762: ++dim;
763: #endif
764: if (xin->map->n > 0 || H5_VERSION_GE(1,10,0)) {
765: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
766: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
767: } else {
768: /* Create null filespace to match null memspace. */
769: PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
770: }
772: VecGetArrayRead(xin, &x);
773: PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x));
774: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
775: VecRestoreArrayRead(xin, &x);
777: /* Close/release resources */
778: PetscStackCallHDF5(H5Gclose,(group));
779: PetscStackCallHDF5(H5Sclose,(filespace));
780: PetscStackCallHDF5(H5Sclose,(memspace));
781: PetscStackCallHDF5(H5Dclose,(dset_id));
783: #if defined(PETSC_USE_COMPLEX)
784: {
785: PetscBool tru = PETSC_TRUE;
786: PetscViewerHDF5WriteObjectAttribute(viewer,(PetscObject)xin,"complex",PETSC_BOOL,&tru);
787: }
788: #endif
789: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
790: return(0);
791: }
792: #endif
794: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
795: {
797: PetscBool iascii,isbinary,isdraw;
798: #if defined(PETSC_HAVE_MATHEMATICA)
799: PetscBool ismathematica;
800: #endif
801: #if defined(PETSC_HAVE_HDF5)
802: PetscBool ishdf5;
803: #endif
804: #if defined(PETSC_HAVE_MATLAB_ENGINE)
805: PetscBool ismatlab;
806: #endif
807: #if defined(PETSC_HAVE_ADIOS)
808: PetscBool isadios;
809: #endif
810: #if defined(PETSC_HAVE_ADIOS2)
811: PetscBool isadios2;
812: #endif
813: PetscBool isglvis;
816: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
817: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
818: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
819: #if defined(PETSC_HAVE_MATHEMATICA)
820: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
821: #endif
822: #if defined(PETSC_HAVE_HDF5)
823: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
824: #endif
825: #if defined(PETSC_HAVE_MATLAB_ENGINE)
826: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
827: #endif
828: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
829: #if defined(PETSC_HAVE_ADIOS)
830: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS,&isadios);
831: #endif
832: #if defined(PETSC_HAVE_ADIOS2)
833: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERADIOS2,&isadios2);
834: #endif
835: if (iascii) {
836: VecView_MPI_ASCII(xin,viewer);
837: } else if (isbinary) {
838: VecView_MPI_Binary(xin,viewer);
839: } else if (isdraw) {
840: PetscViewerFormat format;
841: PetscViewerGetFormat(viewer,&format);
842: if (format == PETSC_VIEWER_DRAW_LG) {
843: VecView_MPI_Draw_LG(xin,viewer);
844: } else {
845: VecView_MPI_Draw(xin,viewer);
846: }
847: #if defined(PETSC_HAVE_MATHEMATICA)
848: } else if (ismathematica) {
849: PetscViewerMathematicaPutVector(viewer,xin);
850: #endif
851: #if defined(PETSC_HAVE_HDF5)
852: } else if (ishdf5) {
853: VecView_MPI_HDF5(xin,viewer);
854: #endif
855: #if defined(PETSC_HAVE_ADIOS)
856: } else if (isadios) {
857: VecView_MPI_ADIOS(xin,viewer);
858: #endif
859: #if defined(PETSC_HAVE_ADIOS2)
860: } else if (isadios2) {
861: VecView_MPI_ADIOS2(xin,viewer);
862: #endif
863: #if defined(PETSC_HAVE_MATLAB_ENGINE)
864: } else if (ismatlab) {
865: VecView_MPI_Matlab(xin,viewer);
866: #endif
867: } else if (isglvis) {
868: VecView_GLVis(xin,viewer);
869: }
870: return(0);
871: }
873: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
874: {
876: *N = xin->map->N;
877: return(0);
878: }
880: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
881: {
882: const PetscScalar *xx;
883: PetscInt i,tmp,start = xin->map->range[xin->stash.rank];
884: PetscErrorCode ierr;
887: VecGetArrayRead(xin,&xx);
888: for (i=0; i<ni; i++) {
889: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
890: tmp = ix[i] - start;
891: if (PetscUnlikelyDebug(tmp < 0 || tmp >= xin->map->n)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
892: y[i] = xx[tmp];
893: }
894: VecRestoreArrayRead(xin,&xx);
895: return(0);
896: }
898: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
899: {
901: PetscMPIInt rank = xin->stash.rank;
902: PetscInt *owners = xin->map->range,start = owners[rank];
903: PetscInt end = owners[rank+1],i,row;
904: PetscScalar *xx;
907: if (PetscDefined(USE_DEBUG)) {
908: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
909: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
910: }
911: VecGetArray(xin,&xx);
912: xin->stash.insertmode = addv;
914: if (addv == INSERT_VALUES) {
915: for (i=0; i<ni; i++) {
916: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
917: if (PetscUnlikelyDebug(ix[i] < 0)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
918: if ((row = ix[i]) >= start && row < end) {
919: xx[row-start] = y[i];
920: } else if (!xin->stash.donotstash) {
921: if (PetscUnlikelyDebug(ix[i] >= xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
922: VecStashValue_Private(&xin->stash,row,y[i]);
923: }
924: }
925: } else {
926: for (i=0; i<ni; i++) {
927: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
928: if (PetscUnlikelyDebug(ix[i] < 0)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
929: if ((row = ix[i]) >= start && row < end) {
930: xx[row-start] += y[i];
931: } else if (!xin->stash.donotstash) {
932: if (PetscUnlikelyDebug(ix[i] >= xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
933: VecStashValue_Private(&xin->stash,row,y[i]);
934: }
935: }
936: }
937: VecRestoreArray(xin,&xx);
938: return(0);
939: }
941: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
942: {
943: PetscMPIInt rank = xin->stash.rank;
944: PetscInt *owners = xin->map->range,start = owners[rank];
946: PetscInt end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
947: PetscScalar *xx,*y = (PetscScalar*)yin;
950: VecGetArray(xin,&xx);
951: if (PetscDefined(USE_DEBUG)) {
952: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
953: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
954: }
955: xin->stash.insertmode = addv;
957: if (addv == INSERT_VALUES) {
958: for (i=0; i<ni; i++) {
959: if ((row = bs*ix[i]) >= start && row < end) {
960: for (j=0; j<bs; j++) xx[row-start+j] = y[j];
961: } else if (!xin->stash.donotstash) {
962: if (ix[i] < 0) { y += bs; continue; }
963: if (PetscUnlikelyDebug(ix[i] >= xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
964: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
965: }
966: y += bs;
967: }
968: } else {
969: for (i=0; i<ni; i++) {
970: if ((row = bs*ix[i]) >= start && row < end) {
971: for (j=0; j<bs; j++) xx[row-start+j] += y[j];
972: } else if (!xin->stash.donotstash) {
973: if (ix[i] < 0) { y += bs; continue; }
974: if (PetscUnlikelyDebug(ix[i] > xin->map->N)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
975: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
976: }
977: y += bs;
978: }
979: }
980: VecRestoreArray(xin,&xx);
981: return(0);
982: }
984: /*
985: Since nsends or nreceives may be zero we add 1 in certain mallocs
986: to make sure we never malloc an empty one.
987: */
988: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
989: {
991: PetscInt *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
992: PetscMPIInt size;
993: InsertMode addv;
994: MPI_Comm comm;
997: PetscObjectGetComm((PetscObject)xin,&comm);
998: if (xin->stash.donotstash) return(0);
1000: MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1001: if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1002: xin->stash.insertmode = addv; /* in case this processor had no cache */
1003: xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */
1005: VecGetBlockSize(xin,&bs);
1006: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1007: if (!xin->bstash.bowners && xin->map->bs != -1) {
1008: PetscMalloc1(size+1,&bowners);
1009: for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1010: xin->bstash.bowners = bowners;
1011: } else bowners = xin->bstash.bowners;
1013: VecStashScatterBegin_Private(&xin->stash,owners);
1014: VecStashScatterBegin_Private(&xin->bstash,bowners);
1015: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1016: PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1017: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1018: PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1019: return(0);
1020: }
1022: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1023: {
1025: PetscInt base,i,j,*row,flg,bs;
1026: PetscMPIInt n;
1027: PetscScalar *val,*vv,*array,*xarray;
1030: if (!vec->stash.donotstash) {
1031: VecGetArray(vec,&xarray);
1032: VecGetBlockSize(vec,&bs);
1033: base = vec->map->range[vec->stash.rank];
1035: /* Process the stash */
1036: while (1) {
1037: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1038: if (!flg) break;
1039: if (vec->stash.insertmode == ADD_VALUES) {
1040: for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1041: } else if (vec->stash.insertmode == INSERT_VALUES) {
1042: for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1043: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1044: }
1045: VecStashScatterEnd_Private(&vec->stash);
1047: /* now process the block-stash */
1048: while (1) {
1049: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1050: if (!flg) break;
1051: for (i=0; i<n; i++) {
1052: array = xarray+row[i]*bs-base;
1053: vv = val+i*bs;
1054: if (vec->stash.insertmode == ADD_VALUES) {
1055: for (j=0; j<bs; j++) array[j] += vv[j];
1056: } else if (vec->stash.insertmode == INSERT_VALUES) {
1057: for (j=0; j<bs; j++) array[j] = vv[j];
1058: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1059: }
1060: }
1061: VecStashScatterEnd_Private(&vec->bstash);
1062: VecRestoreArray(vec,&xarray);
1063: }
1064: vec->stash.insertmode = NOT_SET_VALUES;
1065: return(0);
1066: }