Actual source code: pdvec.c
petsc-3.8.0 2017-09-26
2: /*
3: Code for some of the parallel vector primatives.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>
6: #include <petsc/private/glvisviewerimpl.h>
7: #include <petsc/private/glvisvecimpl.h>
8: #include <petscviewerhdf5.h>
10: PetscErrorCode VecDestroy_MPI(Vec v)
11: {
12: Vec_MPI *x = (Vec_MPI*)v->data;
16: #if defined(PETSC_USE_LOG)
17: PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
18: #endif
19: if (!x) return(0);
20: PetscFree(x->array_allocated);
22: /* Destroy local representation of vector if it exists */
23: if (x->localrep) {
24: VecDestroy(&x->localrep);
25: VecScatterDestroy(&x->localupdate);
26: }
27: VecAssemblyReset_MPI(v);
29: /* Destroy the stashes: note the order - so that the tags are freed properly */
30: VecStashDestroy_Private(&v->bstash);
31: VecStashDestroy_Private(&v->stash);
32: PetscFree(v->data);
33: return(0);
34: }
36: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
37: {
38: PetscErrorCode ierr;
39: PetscInt i,work = xin->map->n,cnt,len,nLen;
40: PetscMPIInt j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
41: MPI_Status status;
42: PetscScalar *values;
43: const PetscScalar *xarray;
44: const char *name;
45: PetscViewerFormat format;
48: VecGetArrayRead(xin,&xarray);
49: /* determine maximum message to arrive */
50: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
51: MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)xin));
52: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
53: PetscViewerGetFormat(viewer,&format);
54: if (format == PETSC_VIEWER_ASCII_GLVIS) { rank = 0, len = 0; } /* no parallel distributed write support from GLVis */
55: if (!rank) {
56: PetscMalloc1(len,&values);
57: /*
58: MATLAB format and ASCII format are very similar except
59: MATLAB uses %18.16e format while ASCII uses %g
60: */
61: if (format == PETSC_VIEWER_ASCII_MATLAB) {
62: PetscObjectGetName((PetscObject)xin,&name);
63: PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
64: for (i=0; i<xin->map->n; i++) {
65: #if defined(PETSC_USE_COMPLEX)
66: if (PetscImaginaryPart(xarray[i]) > 0.0) {
67: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
68: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
69: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
70: } else {
71: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(xarray[i]));
72: }
73: #else
74: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
75: #endif
76: }
77: /* receive and print messages */
78: for (j=1; j<size; j++) {
79: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
80: MPI_Get_count(&status,MPIU_SCALAR,&n);
81: for (i=0; i<n; i++) {
82: #if defined(PETSC_USE_COMPLEX)
83: if (PetscImaginaryPart(values[i]) > 0.0) {
84: PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
85: } else if (PetscImaginaryPart(values[i]) < 0.0) {
86: PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
87: } else {
88: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)PetscRealPart(values[i]));
89: }
90: #else
91: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
92: #endif
93: }
94: }
95: PetscViewerASCIIPrintf(viewer,"];\n");
97: } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
98: for (i=0; i<xin->map->n; i++) {
99: #if defined(PETSC_USE_COMPLEX)
100: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
101: #else
102: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)xarray[i]);
103: #endif
104: }
105: /* receive and print messages */
106: for (j=1; j<size; j++) {
107: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
108: MPI_Get_count(&status,MPIU_SCALAR,&n);
109: for (i=0; i<n; i++) {
110: #if defined(PETSC_USE_COMPLEX)
111: PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
112: #else
113: PetscViewerASCIIPrintf(viewer,"%18.16e\n",(double)values[i]);
114: #endif
115: }
116: }
117: } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
118: /*
119: state 0: No header has been output
120: state 1: Only POINT_DATA has been output
121: state 2: Only CELL_DATA has been output
122: state 3: Output both, POINT_DATA last
123: state 4: Output both, CELL_DATA last
124: */
125: static PetscInt stateId = -1;
126: int outputState = 0;
127: int doOutput = 0;
128: PetscBool hasState;
129: PetscInt bs, b;
131: if (stateId < 0) {
132: PetscObjectComposedDataRegister(&stateId);
133: }
134: PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
135: if (!hasState) outputState = 0;
137: PetscObjectGetName((PetscObject)xin,&name);
138: VecGetLocalSize(xin, &nLen);
139: PetscMPIIntCast(nLen,&n);
140: VecGetBlockSize(xin, &bs);
141: if (format == PETSC_VIEWER_ASCII_VTK) {
142: if (outputState == 0) {
143: outputState = 1;
144: doOutput = 1;
145: } else if (outputState == 1) doOutput = 0;
146: else if (outputState == 2) {
147: outputState = 3;
148: doOutput = 1;
149: } else if (outputState == 3) doOutput = 0;
150: else if (outputState == 4) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
152: if (doOutput) {
153: PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
154: }
155: } else {
156: if (outputState == 0) {
157: outputState = 2;
158: doOutput = 1;
159: } else if (outputState == 1) {
160: outputState = 4;
161: doOutput = 1;
162: } else if (outputState == 2) doOutput = 0;
163: else if (outputState == 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
164: else if (outputState == 4) doOutput = 0;
166: if (doOutput) {
167: PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
168: }
169: }
170: PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
171: if (name) {
172: if (bs == 3) {
173: PetscViewerASCIIPrintf(viewer, "VECTORS %s double\n", name);
174: } else {
175: PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
176: }
177: } else {
178: PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
179: }
180: if (bs != 3) {
181: PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
182: }
183: for (i=0; i<n/bs; i++) {
184: for (b=0; b<bs; b++) {
185: if (b > 0) {
186: PetscViewerASCIIPrintf(viewer," ");
187: }
188: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
189: }
190: PetscViewerASCIIPrintf(viewer,"\n");
191: }
192: for (j=1; j<size; j++) {
193: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
194: MPI_Get_count(&status,MPIU_SCALAR,&n);
195: for (i=0; i<n/bs; i++) {
196: for (b=0; b<bs; b++) {
197: if (b > 0) {
198: PetscViewerASCIIPrintf(viewer," ");
199: }
200: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
201: }
202: PetscViewerASCIIPrintf(viewer,"\n");
203: }
204: }
205: } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
206: PetscInt bs, b;
208: VecGetLocalSize(xin, &nLen);
209: PetscMPIIntCast(nLen,&n);
210: VecGetBlockSize(xin, &bs);
211: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
213: for (i=0; i<n/bs; i++) {
214: for (b=0; b<bs; b++) {
215: if (b > 0) {
216: PetscViewerASCIIPrintf(viewer," ");
217: }
218: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(xarray[i*bs+b]));
219: }
220: for (b=bs; b<3; b++) {
221: PetscViewerASCIIPrintf(viewer," 0.0");
222: }
223: PetscViewerASCIIPrintf(viewer,"\n");
224: }
225: for (j=1; j<size; j++) {
226: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
227: MPI_Get_count(&status,MPIU_SCALAR,&n);
228: for (i=0; i<n/bs; i++) {
229: for (b=0; b<bs; b++) {
230: if (b > 0) {
231: PetscViewerASCIIPrintf(viewer," ");
232: }
233: PetscViewerASCIIPrintf(viewer,"%g",(double)PetscRealPart(values[i*bs+b]));
234: }
235: for (b=bs; b<3; b++) {
236: PetscViewerASCIIPrintf(viewer," 0.0");
237: }
238: PetscViewerASCIIPrintf(viewer,"\n");
239: }
240: }
241: } else if (format == PETSC_VIEWER_ASCII_PCICE) {
242: PetscInt bs, b, vertexCount = 1;
244: VecGetLocalSize(xin, &nLen);
245: PetscMPIIntCast(nLen,&n);
246: VecGetBlockSize(xin, &bs);
247: if ((bs < 1) || (bs > 3)) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
249: PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
250: for (i=0; i<n/bs; i++) {
251: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
252: for (b=0; b<bs; b++) {
253: if (b > 0) {
254: PetscViewerASCIIPrintf(viewer," ");
255: }
256: #if !defined(PETSC_USE_COMPLEX)
257: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)xarray[i*bs+b]);
258: #endif
259: }
260: PetscViewerASCIIPrintf(viewer,"\n");
261: }
262: for (j=1; j<size; j++) {
263: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
264: MPI_Get_count(&status,MPIU_SCALAR,&n);
265: for (i=0; i<n/bs; i++) {
266: PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
267: for (b=0; b<bs; b++) {
268: if (b > 0) {
269: PetscViewerASCIIPrintf(viewer," ");
270: }
271: #if !defined(PETSC_USE_COMPLEX)
272: PetscViewerASCIIPrintf(viewer,"% 12.5E",(double)values[i*bs+b]);
273: #endif
274: }
275: PetscViewerASCIIPrintf(viewer,"\n");
276: }
277: }
278: } else if (format == PETSC_VIEWER_ASCII_GLVIS) {
279: /* GLVis ASCII visualization/dump: this function mimicks mfem::GridFunction::Save() */
280: const PetscScalar *array;
281: PetscInt i,n,vdim, ordering = 1; /* mfem::FiniteElementSpace::Ordering::byVDIM */
282: PetscContainer glvis_container;
283: PetscViewerGLVisVecInfo glvis_vec_info;
284: PetscViewerGLVisInfo glvis_info;
285: PetscErrorCode ierr;
287: /* mfem::FiniteElementSpace::Save() */
288: VecGetBlockSize(xin,&vdim);
289: PetscViewerASCIIPrintf(viewer,"FiniteElementSpace\n");
290: PetscObjectQuery((PetscObject)xin,"_glvis_info_container",(PetscObject*)&glvis_container);
291: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)xin),PETSC_ERR_PLIB,"Missing GLVis container");
292: PetscContainerGetPointer(glvis_container,(void**)&glvis_vec_info);
293: PetscViewerASCIIPrintf(viewer,"%s\n",glvis_vec_info->fec_type);
294: PetscViewerASCIIPrintf(viewer,"VDim: %d\n",vdim);
295: PetscViewerASCIIPrintf(viewer,"Ordering: %d\n",ordering);
296: PetscViewerASCIIPrintf(viewer,"\n");
297: /* mfem::Vector::Print() */
298: PetscObjectQuery((PetscObject)viewer,"_glvis_info_container",(PetscObject*)&glvis_container);
299: if (!glvis_container) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_PLIB,"Missing GLVis container");
300: PetscContainerGetPointer(glvis_container,(void**)&glvis_info);
301: if (glvis_info->enabled) {
302: VecGetLocalSize(xin,&n);
303: VecGetArrayRead(xin,&array);
304: for (i=0;i<n;i++) {
305: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(array[i]));
306: }
307: VecRestoreArrayRead(xin,&array);
308: }
309: } else if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
310: /* No info */
311: } else {
312: if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
313: cnt = 0;
314: for (i=0; i<xin->map->n; i++) {
315: if (format == PETSC_VIEWER_ASCII_INDEX) {
316: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
317: }
318: #if defined(PETSC_USE_COMPLEX)
319: if (PetscImaginaryPart(xarray[i]) > 0.0) {
320: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(xarray[i]),(double)PetscImaginaryPart(xarray[i]));
321: } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
322: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(xarray[i]),-(double)PetscImaginaryPart(xarray[i]));
323: } else {
324: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(xarray[i]));
325: }
326: #else
327: PetscViewerASCIIPrintf(viewer,"%g\n",(double)xarray[i]);
328: #endif
329: }
330: /* receive and print messages */
331: for (j=1; j<size; j++) {
332: MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
333: MPI_Get_count(&status,MPIU_SCALAR,&n);
334: if (format != PETSC_VIEWER_ASCII_COMMON) {
335: PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
336: }
337: for (i=0; i<n; i++) {
338: if (format == PETSC_VIEWER_ASCII_INDEX) {
339: PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
340: }
341: #if defined(PETSC_USE_COMPLEX)
342: if (PetscImaginaryPart(values[i]) > 0.0) {
343: PetscViewerASCIIPrintf(viewer,"%g + %g i\n",(double)PetscRealPart(values[i]),(double)PetscImaginaryPart(values[i]));
344: } else if (PetscImaginaryPart(values[i]) < 0.0) {
345: PetscViewerASCIIPrintf(viewer,"%g - %g i\n",(double)PetscRealPart(values[i]),-(double)PetscImaginaryPart(values[i]));
346: } else {
347: PetscViewerASCIIPrintf(viewer,"%g\n",(double)PetscRealPart(values[i]));
348: }
349: #else
350: PetscViewerASCIIPrintf(viewer,"%g\n",(double)values[i]);
351: #endif
352: }
353: }
354: }
355: PetscFree(values);
356: } else {
357: if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
358: /* Rank 0 is not trying to receive anything, so don't send anything */
359: } else {
360: if (format == PETSC_VIEWER_ASCII_MATLAB || format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
361: /* this may be a collective operation so make sure everyone calls it */
362: PetscObjectGetName((PetscObject)xin,&name);
363: }
364: MPI_Send((void*)xarray,xin->map->n,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
365: }
366: }
367: PetscViewerFlush(viewer);
368: VecRestoreArrayRead(xin,&xarray);
369: return(0);
370: }
372: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
373: {
374: PetscErrorCode ierr;
375: PetscMPIInt rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
376: PetscInt len,n,j,tr[2];
377: int fdes;
378: MPI_Status status;
379: PetscScalar *values;
380: const PetscScalar *xarray;
381: FILE *file;
382: #if defined(PETSC_HAVE_MPIIO)
383: PetscBool isMPIIO;
384: #endif
385: PetscBool skipHeader;
386: PetscInt message_count,flowcontrolcount;
387: PetscViewerFormat format;
390: VecGetArrayRead(xin,&xarray);
391: PetscViewerBinaryGetDescriptor(viewer,&fdes);
392: PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);
394: /* determine maximum message to arrive */
395: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
396: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
398: if (!skipHeader) {
399: tr[0] = VEC_FILE_CLASSID;
400: tr[1] = xin->map->N;
401: PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);
402: }
404: #if defined(PETSC_HAVE_MPIIO)
405: PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);
406: if (!isMPIIO) {
407: #endif
408: PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);
409: if (!rank) {
410: PetscBinaryWrite(fdes,(void*)xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);
412: len = 0;
413: for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
414: PetscMalloc1(len,&values);
415: PetscMPIIntCast(len,&mesgsize);
416: /* receive and save messages */
417: for (j=1; j<size; j++) {
418: PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);
419: MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,PetscObjectComm((PetscObject)xin),&status);
420: MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
421: n = (PetscInt)mesglen;
422: PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_TRUE);
423: }
424: PetscViewerFlowControlEndMaster(viewer,&message_count);
425: PetscFree(values);
426: } else {
427: PetscViewerFlowControlStepWorker(viewer,rank,&message_count);
428: PetscMPIIntCast(xin->map->n,&mesgsize);
429: MPI_Send((void*)xarray,mesgsize,MPIU_SCALAR,0,tag,PetscObjectComm((PetscObject)xin));
430: PetscViewerFlowControlEndWorker(viewer,&message_count);
431: }
432: PetscViewerGetFormat(viewer,&format);
433: if (format == PETSC_VIEWER_BINARY_MATLAB) {
434: MPI_Comm comm;
435: FILE *info;
436: const char *name;
438: PetscObjectGetName((PetscObject)xin,&name);
439: PetscObjectGetComm((PetscObject)viewer,&comm);
440: PetscViewerBinaryGetInfoPointer(viewer,&info);
441: PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");
442: PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);
443: PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");
444: }
445: #if defined(PETSC_HAVE_MPIIO)
446: } else {
447: MPI_Offset off;
448: MPI_File mfdes;
449: PetscMPIInt lsize;
451: PetscMPIIntCast(xin->map->n,&lsize);
452: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
453: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
454: off += xin->map->rstart*sizeof(PetscScalar); /* off is MPI_Offset, not PetscMPIInt */
455: MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);
456: MPIU_File_write_all(mfdes,(void*)xarray,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);
457: PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
458: }
459: #endif
461: VecRestoreArrayRead(xin,&xarray);
462: if (!rank) {
463: PetscViewerBinaryGetInfoPointer(viewer,&file);
464: if (file) {
465: if (((PetscObject)xin)->prefix) {
466: PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));
467: } else {
468: PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));
469: }
470: }
471: }
472: return(0);
473: }
475: #include <petscdraw.h>
476: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
477: {
478: PetscDraw draw;
479: PetscBool isnull;
480: PetscDrawLG lg;
481: PetscMPIInt i,size,rank,n,N,*lens = NULL,*disp = NULL;
482: PetscReal *values, *xx = NULL,*yy = NULL;
483: const PetscScalar *xarray;
484: int colors[] = {PETSC_DRAW_RED};
485: PetscErrorCode ierr;
488: PetscViewerDrawGetDraw(viewer,0,&draw);
489: PetscDrawIsNull(draw,&isnull);
490: if (isnull) return(0);
491: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
492: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
493: PetscMPIIntCast(xin->map->n,&n);
494: PetscMPIIntCast(xin->map->N,&N);
496: VecGetArrayRead(xin,&xarray);
497: #if defined(PETSC_USE_COMPLEX)
498: PetscMalloc1(n+1,&values);
499: for (i=0; i<n; i++) values[i] = PetscRealPart(xarray[i]);
500: #else
501: values = (PetscReal*)xarray;
502: #endif
503: if (!rank) {
504: PetscMalloc2(N,&xx,N,&yy);
505: for (i=0; i<N; i++) xx[i] = (PetscReal)i;
506: PetscMalloc2(size,&lens,size,&disp);
507: for (i=0; i<size; i++) lens[i] = (PetscMPIInt)xin->map->range[i+1] - (PetscMPIInt)xin->map->range[i];
508: for (i=0; i<size; i++) disp[i] = (PetscMPIInt)xin->map->range[i];
509: }
510: MPI_Gatherv(values,n,MPIU_REAL,yy,lens,disp,MPIU_REAL,0,PetscObjectComm((PetscObject)xin));
511: PetscFree2(lens,disp);
512: #if defined(PETSC_USE_COMPLEX)
513: PetscFree(values);
514: #endif
515: VecRestoreArrayRead(xin,&xarray);
517: PetscViewerDrawGetDrawLG(viewer,0,&lg);
518: PetscDrawLGReset(lg);
519: PetscDrawLGSetDimension(lg,1);
520: PetscDrawLGSetColors(lg,colors);
521: if (!rank) {
522: PetscDrawLGAddPoints(lg,N,&xx,&yy);
523: PetscFree2(xx,yy);
524: }
525: PetscDrawLGDraw(lg);
526: PetscDrawLGSave(lg);
527: return(0);
528: }
530: PetscErrorCode VecView_MPI_Draw(Vec xin,PetscViewer viewer)
531: {
532: PetscErrorCode ierr;
533: PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag;
534: PetscInt i,start,end;
535: MPI_Status status;
536: PetscReal min,max,tmp = 0.0;
537: PetscDraw draw;
538: PetscBool isnull;
539: PetscDrawAxis axis;
540: const PetscScalar *xarray;
543: PetscViewerDrawGetDraw(viewer,0,&draw);
544: PetscDrawIsNull(draw,&isnull);
545: if (isnull) return(0);
546: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
547: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
549: VecMin(xin,NULL,&min);
550: VecMax(xin,NULL,&max);
551: if (min == max) {
552: min -= 1.e-5;
553: max += 1.e-5;
554: }
556: PetscDrawCheckResizedWindow(draw);
557: PetscDrawClear(draw);
559: PetscDrawAxisCreate(draw,&axis);
560: PetscDrawAxisSetLimits(axis,0.0,(PetscReal)xin->map->N,min,max);
561: PetscDrawAxisDraw(axis);
562: PetscDrawAxisDestroy(&axis);
564: /* draw local part of vector */
565: VecGetArrayRead(xin,&xarray);
566: VecGetOwnershipRange(xin,&start,&end);
567: if (rank < size-1) { /* send value to right */
568: MPI_Send((void*)&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,PetscObjectComm((PetscObject)xin));
569: }
570: if (rank) { /* receive value from right */
571: MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,PetscObjectComm((PetscObject)xin),&status);
572: }
573: PetscDrawCollectiveBegin(draw);
574: if (rank) {
575: PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
576: }
577: for (i=1; i<xin->map->n; i++) {
578: PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),PetscRealPart(xarray[i]),PETSC_DRAW_RED);
579: }
580: PetscDrawCollectiveEnd(draw);
581: VecRestoreArrayRead(xin,&xarray);
583: PetscDrawFlush(draw);
584: PetscDrawPause(draw);
585: PetscDrawSave(draw);
586: return(0);
587: }
589: #if defined(PETSC_HAVE_MATLAB_ENGINE)
590: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
591: {
592: PetscErrorCode ierr;
593: PetscMPIInt rank,size,*lens;
594: PetscInt i,N = xin->map->N;
595: const PetscScalar *xarray;
596: PetscScalar *xx;
599: VecGetArrayRead(xin,&xarray);
600: MPI_Comm_rank(PetscObjectComm((PetscObject)xin),&rank);
601: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
602: if (!rank) {
603: PetscMalloc1(N,&xx);
604: PetscMalloc1(size,&lens);
605: for (i=0; i<size; i++) lens[i] = xin->map->range[i+1] - xin->map->range[i];
607: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
608: PetscFree(lens);
610: PetscObjectName((PetscObject)xin);
611: PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);
613: PetscFree(xx);
614: } else {
615: MPI_Gatherv((void*)xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,PetscObjectComm((PetscObject)xin));
616: }
617: VecRestoreArrayRead(xin,&xarray);
618: return(0);
619: }
620: #endif
622: #if defined(PETSC_HAVE_HDF5)
623: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
624: {
625: /* TODO: It looks like we can remove the H5Sclose(filespace) and H5Dget_space(dset_id). Why do we do this? */
626: hid_t filespace; /* file dataspace identifier */
627: hid_t chunkspace; /* chunk dataset property identifier */
628: hid_t plist_id; /* property list identifier */
629: hid_t dset_id; /* dataset identifier */
630: hid_t memspace; /* memory dataspace identifier */
631: hid_t file_id;
632: hid_t group;
633: hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
634: hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */
635: PetscInt bs = PetscAbs(xin->map->bs);
636: hsize_t dim;
637: hsize_t maxDims[4], dims[4], chunkDims[4], count[4],offset[4];
638: PetscInt timestep;
639: PetscInt low;
640: PetscInt chunksize;
641: const PetscScalar *x;
642: const char *vecname;
643: PetscErrorCode ierr;
644: PetscBool dim2;
645: PetscBool spoutput;
648: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
649: PetscViewerHDF5GetTimestep(viewer, ×tep);
650: PetscViewerHDF5GetBaseDimension2(viewer,&dim2);
651: PetscViewerHDF5GetSPOutput(viewer,&spoutput);
653: /* Create the dataspace for the dataset.
654: *
655: * dims - holds the current dimensions of the dataset
656: *
657: * maxDims - holds the maximum dimensions of the dataset (unlimited
658: * for the number of time steps with the current dimensions for the
659: * other dimensions; so only additional time steps can be added).
660: *
661: * chunkDims - holds the size of a single time step (required to
662: * permit extending dataset).
663: */
664: dim = 0;
665: chunksize = 1;
666: if (timestep >= 0) {
667: dims[dim] = timestep+1;
668: maxDims[dim] = H5S_UNLIMITED;
669: chunkDims[dim] = 1;
670: ++dim;
671: }
672: PetscHDF5IntCast(xin->map->N/bs,dims + dim);
674: maxDims[dim] = dims[dim];
675: chunkDims[dim] = dims[dim];
676: chunksize *= chunkDims[dim];
677: ++dim;
678: if (bs > 1 || dim2) {
679: dims[dim] = bs;
680: maxDims[dim] = dims[dim];
681: chunkDims[dim] = dims[dim];
682: chunksize *= chunkDims[dim];
683: ++dim;
684: }
685: #if defined(PETSC_USE_COMPLEX)
686: dims[dim] = 2;
687: maxDims[dim] = dims[dim];
688: chunkDims[dim] = dims[dim];
689: chunksize *= chunkDims[dim];
690: /* hdf5 chunks must be less than the max of 32 bit int */
691: if (chunksize > PETSC_HDF5_INT_MAX/64 ) {
692: if (bs > 1 || dim2) {
693: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
694: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
695: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128))) {
696: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/128));
697: }
698: } else {
699: chunkDims[dim-1] = PETSC_HDF5_INT_MAX/128;
700: }
701: }
702: ++dim;
703: #else
704: /* hdf5 chunks must be less than the max of 32 bit int */
705: if (chunksize > PETSC_HDF5_INT_MAX/64) {
706: if (bs > 1 || dim2) {
707: if (chunkDims[dim-2] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
708: chunkDims[dim-2] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
709: } if (chunkDims[dim-1] > (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64))) {
710: chunkDims[dim-1] = (PetscInt)PetscSqrtReal((PetscReal)(PETSC_HDF5_INT_MAX/64));
711: }
712: } else {
713: chunkDims[dim-1] = PETSC_HDF5_INT_MAX/64;
714: }
715: }
716: #endif
719: PetscStackCallHDF5Return(filespace,H5Screate_simple,(dim, dims, maxDims));
721: #if defined(PETSC_USE_REAL_SINGLE)
722: memscalartype = H5T_NATIVE_FLOAT;
723: filescalartype = H5T_NATIVE_FLOAT;
724: #elif defined(PETSC_USE_REAL___FLOAT128)
725: #error "HDF5 output with 128 bit floats not supported."
726: #elif defined(PETSC_USE_REAL___FP16)
727: #error "HDF5 output with 16 bit floats not supported."
728: #else
729: memscalartype = H5T_NATIVE_DOUBLE;
730: if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT;
731: else filescalartype = H5T_NATIVE_DOUBLE;
732: #endif
734: /* Create the dataset with default properties and close filespace */
735: PetscObjectGetName((PetscObject) xin, &vecname);
736: if (!H5Lexists(group, vecname, H5P_DEFAULT)) {
737: /* Create chunk */
738: PetscStackCallHDF5Return(chunkspace,H5Pcreate,(H5P_DATASET_CREATE));
739: PetscStackCallHDF5(H5Pset_chunk,(chunkspace, dim, chunkDims));
741: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
742: PetscStackCallHDF5Return(dset_id,H5Dcreate2,(group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT));
743: #else
744: PetscStackCallHDF5Return(dset_id,H5Dcreate,(group, vecname, filescalartype, filespace, H5P_DEFAULT));
745: #endif
746: PetscStackCallHDF5(H5Pclose,(chunkspace));
747: } else {
748: PetscStackCallHDF5Return(dset_id,H5Dopen2,(group, vecname, H5P_DEFAULT));
749: PetscStackCallHDF5(H5Dset_extent,(dset_id, dims));
750: }
751: PetscStackCallHDF5(H5Sclose,(filespace));
753: /* Each process defines a dataset and writes it to the hyperslab in the file */
754: dim = 0;
755: if (timestep >= 0) {
756: count[dim] = 1;
757: ++dim;
758: }
759: PetscHDF5IntCast(xin->map->n/bs,count + dim);
760: ++dim;
761: if (bs > 1 || dim2) {
762: count[dim] = bs;
763: ++dim;
764: }
765: #if defined(PETSC_USE_COMPLEX)
766: count[dim] = 2;
767: ++dim;
768: #endif
769: if (xin->map->n > 0) {
770: PetscStackCallHDF5Return(memspace,H5Screate_simple,(dim, count, NULL));
771: } else {
772: /* Can't create dataspace with zero for any dimension, so create null dataspace. */
773: PetscStackCallHDF5Return(memspace,H5Screate,(H5S_NULL));
774: }
776: /* Select hyperslab in the file */
777: VecGetOwnershipRange(xin, &low, NULL);
778: dim = 0;
779: if (timestep >= 0) {
780: offset[dim] = timestep;
781: ++dim;
782: }
783: PetscHDF5IntCast(low/bs,offset + dim);
784: ++dim;
785: if (bs > 1 || dim2) {
786: offset[dim] = 0;
787: ++dim;
788: }
789: #if defined(PETSC_USE_COMPLEX)
790: offset[dim] = 0;
791: ++dim;
792: #endif
793: if (xin->map->n > 0) {
794: PetscStackCallHDF5Return(filespace,H5Dget_space,(dset_id));
795: PetscStackCallHDF5(H5Sselect_hyperslab,(filespace, H5S_SELECT_SET, offset, NULL, count, NULL));
796: } else {
797: /* Create null filespace to match null memspace. */
798: PetscStackCallHDF5Return(filespace,H5Screate,(H5S_NULL));
799: }
801: /* Create property list for collective dataset write */
802: PetscStackCallHDF5Return(plist_id,H5Pcreate,(H5P_DATASET_XFER));
803: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
804: PetscStackCallHDF5(H5Pset_dxpl_mpio,(plist_id, H5FD_MPIO_COLLECTIVE));
805: #endif
806: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
808: VecGetArrayRead(xin, &x);
809: PetscStackCallHDF5(H5Dwrite,(dset_id, memscalartype, memspace, filespace, plist_id, x));
810: PetscStackCallHDF5(H5Fflush,(file_id, H5F_SCOPE_GLOBAL));
811: VecRestoreArrayRead(xin, &x);
813: #if defined(PETSC_USE_COMPLEX)
814: {
815: PetscInt one = 1;
816: const char *groupname;
817: char vecgroup[PETSC_MAX_PATH_LEN];
819: PetscViewerHDF5GetGroup(viewer,&groupname);
820: PetscSNPrintf(vecgroup,PETSC_MAX_PATH_LEN,"%s/%s",groupname,vecname);
821: PetscViewerHDF5WriteAttribute(viewer,vecgroup,"complex",PETSC_INT,&one);
822: }
823: #endif
824: /* Close/release resources */
825: if (group != file_id) {
826: PetscStackCallHDF5(H5Gclose,(group));
827: }
828: PetscStackCallHDF5(H5Pclose,(plist_id));
829: PetscStackCallHDF5(H5Sclose,(filespace));
830: PetscStackCallHDF5(H5Sclose,(memspace));
831: PetscStackCallHDF5(H5Dclose,(dset_id));
832: PetscInfo1(xin,"Wrote Vec object with name %s\n",vecname);
833: return(0);
834: }
835: #endif
837: PETSC_EXTERN PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
838: {
840: PetscBool iascii,isbinary,isdraw;
841: #if defined(PETSC_HAVE_MATHEMATICA)
842: PetscBool ismathematica;
843: #endif
844: #if defined(PETSC_HAVE_HDF5)
845: PetscBool ishdf5;
846: #endif
847: #if defined(PETSC_HAVE_MATLAB_ENGINE)
848: PetscBool ismatlab;
849: #endif
850: PetscBool isglvis;
853: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
854: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
855: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);
856: #if defined(PETSC_HAVE_MATHEMATICA)
857: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATHEMATICA,&ismathematica);
858: #endif
859: #if defined(PETSC_HAVE_HDF5)
860: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
861: #endif
862: #if defined(PETSC_HAVE_MATLAB_ENGINE)
863: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);
864: #endif
865: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERGLVIS,&isglvis);
866: if (iascii) {
867: VecView_MPI_ASCII(xin,viewer);
868: } else if (isbinary) {
869: VecView_MPI_Binary(xin,viewer);
870: } else if (isdraw) {
871: PetscViewerFormat format;
872: PetscViewerGetFormat(viewer,&format);
873: if (format == PETSC_VIEWER_DRAW_LG) {
874: VecView_MPI_Draw_LG(xin,viewer);
875: } else {
876: VecView_MPI_Draw(xin,viewer);
877: }
878: #if defined(PETSC_HAVE_MATHEMATICA)
879: } else if (ismathematica) {
880: PetscViewerMathematicaPutVector(viewer,xin);
881: #endif
882: #if defined(PETSC_HAVE_HDF5)
883: } else if (ishdf5) {
884: VecView_MPI_HDF5(xin,viewer);
885: #endif
886: #if defined(PETSC_HAVE_MATLAB_ENGINE)
887: } else if (ismatlab) {
888: VecView_MPI_Matlab(xin,viewer);
889: #endif
890: } else if (isglvis) {
891: VecView_GLVis(xin,viewer);
892: }
893: return(0);
894: }
896: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
897: {
899: *N = xin->map->N;
900: return(0);
901: }
903: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
904: {
905: const PetscScalar *xx;
906: PetscInt i,tmp,start = xin->map->range[xin->stash.rank];
907: PetscErrorCode ierr;
910: VecGetArrayRead(xin,&xx);
911: for (i=0; i<ni; i++) {
912: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
913: tmp = ix[i] - start;
914: #if defined(PETSC_USE_DEBUG)
915: if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
916: #endif
917: y[i] = xx[tmp];
918: }
919: VecRestoreArrayRead(xin,&xx);
920: return(0);
921: }
923: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
924: {
926: PetscMPIInt rank = xin->stash.rank;
927: PetscInt *owners = xin->map->range,start = owners[rank];
928: PetscInt end = owners[rank+1],i,row;
929: PetscScalar *xx;
932: #if defined(PETSC_USE_DEBUG)
933: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
934: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
935: #endif
936: VecGetArray(xin,&xx);
937: xin->stash.insertmode = addv;
939: if (addv == INSERT_VALUES) {
940: for (i=0; i<ni; i++) {
941: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
942: #if defined(PETSC_USE_DEBUG)
943: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
944: #endif
945: if ((row = ix[i]) >= start && row < end) {
946: xx[row-start] = y[i];
947: } else if (!xin->stash.donotstash) {
948: #if defined(PETSC_USE_DEBUG)
949: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
950: #endif
951: VecStashValue_Private(&xin->stash,row,y[i]);
952: }
953: }
954: } else {
955: for (i=0; i<ni; i++) {
956: if (xin->stash.ignorenegidx && ix[i] < 0) continue;
957: #if defined(PETSC_USE_DEBUG)
958: if (ix[i] < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
959: #endif
960: if ((row = ix[i]) >= start && row < end) {
961: xx[row-start] += y[i];
962: } else if (!xin->stash.donotstash) {
963: #if defined(PETSC_USE_DEBUG)
964: if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
965: #endif
966: VecStashValue_Private(&xin->stash,row,y[i]);
967: }
968: }
969: }
970: VecRestoreArray(xin,&xx);
971: return(0);
972: }
974: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
975: {
976: PetscMPIInt rank = xin->stash.rank;
977: PetscInt *owners = xin->map->range,start = owners[rank];
979: PetscInt end = owners[rank+1],i,row,bs = PetscAbs(xin->map->bs),j;
980: PetscScalar *xx,*y = (PetscScalar*)yin;
983: VecGetArray(xin,&xx);
984: #if defined(PETSC_USE_DEBUG)
985: if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
986: else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
987: #endif
988: xin->stash.insertmode = addv;
990: if (addv == INSERT_VALUES) {
991: for (i=0; i<ni; i++) {
992: if ((row = bs*ix[i]) >= start && row < end) {
993: for (j=0; j<bs; j++) xx[row-start+j] = y[j];
994: } else if (!xin->stash.donotstash) {
995: if (ix[i] < 0) continue;
996: #if defined(PETSC_USE_DEBUG)
997: if (ix[i] >= xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
998: #endif
999: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1000: }
1001: y += bs;
1002: }
1003: } else {
1004: for (i=0; i<ni; i++) {
1005: if ((row = bs*ix[i]) >= start && row < end) {
1006: for (j=0; j<bs; j++) xx[row-start+j] += y[j];
1007: } else if (!xin->stash.donotstash) {
1008: if (ix[i] < 0) continue;
1009: #if defined(PETSC_USE_DEBUG)
1010: if (ix[i] > xin->map->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
1011: #endif
1012: VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
1013: }
1014: y += bs;
1015: }
1016: }
1017: VecRestoreArray(xin,&xx);
1018: return(0);
1019: }
1021: /*
1022: Since nsends or nreceives may be zero we add 1 in certain mallocs
1023: to make sure we never malloc an empty one.
1024: */
1025: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1026: {
1028: PetscInt *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1029: PetscMPIInt size;
1030: InsertMode addv;
1031: MPI_Comm comm;
1034: PetscObjectGetComm((PetscObject)xin,&comm);
1035: if (xin->stash.donotstash) return(0);
1037: MPIU_Allreduce((PetscEnum*)&xin->stash.insertmode,(PetscEnum*)&addv,1,MPIU_ENUM,MPI_BOR,comm);
1038: if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(comm,PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1039: xin->stash.insertmode = addv; /* in case this processor had no cache */
1040: xin->bstash.insertmode = addv; /* Block stash implicitly tracks InsertMode of scalar stash */
1042: VecGetBlockSize(xin,&bs);
1043: MPI_Comm_size(PetscObjectComm((PetscObject)xin),&size);
1044: if (!xin->bstash.bowners && xin->map->bs != -1) {
1045: PetscMalloc1(size+1,&bowners);
1046: for (i=0; i<size+1; i++) bowners[i] = owners[i]/bs;
1047: xin->bstash.bowners = bowners;
1048: } else bowners = xin->bstash.bowners;
1050: VecStashScatterBegin_Private(&xin->stash,owners);
1051: VecStashScatterBegin_Private(&xin->bstash,bowners);
1052: VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1053: PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1054: VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1055: PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1056: return(0);
1057: }
1059: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1060: {
1062: PetscInt base,i,j,*row,flg,bs;
1063: PetscMPIInt n;
1064: PetscScalar *val,*vv,*array,*xarray;
1067: if (!vec->stash.donotstash) {
1068: VecGetArray(vec,&xarray);
1069: VecGetBlockSize(vec,&bs);
1070: base = vec->map->range[vec->stash.rank];
1072: /* Process the stash */
1073: while (1) {
1074: VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1075: if (!flg) break;
1076: if (vec->stash.insertmode == ADD_VALUES) {
1077: for (i=0; i<n; i++) xarray[row[i] - base] += val[i];
1078: } else if (vec->stash.insertmode == INSERT_VALUES) {
1079: for (i=0; i<n; i++) xarray[row[i] - base] = val[i];
1080: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1081: }
1082: VecStashScatterEnd_Private(&vec->stash);
1084: /* now process the block-stash */
1085: while (1) {
1086: VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1087: if (!flg) break;
1088: for (i=0; i<n; i++) {
1089: array = xarray+row[i]*bs-base;
1090: vv = val+i*bs;
1091: if (vec->stash.insertmode == ADD_VALUES) {
1092: for (j=0; j<bs; j++) array[j] += vv[j];
1093: } else if (vec->stash.insertmode == INSERT_VALUES) {
1094: for (j=0; j<bs; j++) array[j] = vv[j];
1095: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1096: }
1097: }
1098: VecStashScatterEnd_Private(&vec->bstash);
1099: VecRestoreArray(vec,&xarray);
1100: }
1101: vec->stash.insertmode = NOT_SET_VALUES;
1102: return(0);
1103: }