Actual source code: grvtk.c
petsc-main 2021-03-01
1: #include <petsc/private/dmdaimpl.h>
2: /*
3: Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4: including the private vtkvimpl.h file. The code should be refactored.
5: */
6: #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
8: /* Helper function which determines if any DMDA fields are named. This is used
9: as a proxy for the user's intention to use DMDA fields as distinct
10: scalar-valued fields as opposed to a single vector-valued field */
11: static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
12: {
14: PetscInt f,bs;
17: *fieldsnamed = PETSC_FALSE;
18: DMDAGetDof(da,&bs);
19: for (f=0; f<bs; ++f) {
20: const char * fieldname;
21: DMDAGetFieldName(da,f,&fieldname);
22: if (fieldname) {
23: *fieldsnamed = PETSC_TRUE;
24: break;
25: }
26: }
27: return(0);
28: }
30: static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
31: {
32: const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
33: #if defined(PETSC_USE_REAL_SINGLE)
34: const char precision[] = "Float32";
35: #elif defined(PETSC_USE_REAL_DOUBLE)
36: const char precision[] = "Float64";
37: #else
38: const char precision[] = "UnknownPrecision";
39: #endif
40: MPI_Comm comm;
41: Vec Coords;
42: PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data;
43: PetscViewerVTKObjectLink link;
44: FILE *fp;
45: PetscMPIInt rank,size,tag;
46: DMDALocalInfo info;
47: PetscInt dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
48: PetscInt rloc[6],(*grloc)[6] = NULL;
49: PetscScalar *array,*array2;
50: PetscErrorCode ierr;
53: PetscObjectGetComm((PetscObject)da,&comm);
54: if (PetscDefined(PETSC_USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported");
55: MPI_Comm_size(comm,&size);
56: MPI_Comm_rank(comm,&rank);
57: DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
58: DMDAGetLocalInfo(da,&info);
59: DMGetCoordinates(da,&Coords);
60: if (Coords) {
61: PetscInt csize;
62: VecGetSize(Coords,&csize);
63: if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
64: cdim = csize/(mx*my*mz);
65: if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
66: } else {
67: cdim = dim;
68: }
70: PetscFOpen(comm,vtk->filename,"wb",&fp);
71: PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
72: PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
73: PetscFPrintf(comm,fp," <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);
75: if (!rank) {PetscMalloc1(size*6,&grloc);}
76: rloc[0] = info.xs;
77: rloc[1] = info.xm;
78: rloc[2] = info.ys;
79: rloc[3] = info.ym;
80: rloc[4] = info.zs;
81: rloc[5] = info.zm;
82: MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);
84: /* Write XML header */
85: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
86: maxbs = 0; /* Used for the temporary array size on rank 0 */
87: boffset = 0; /* Offset into binary file */
88: for (r=0; r<size; r++) {
89: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
90: if (!rank) {
91: xs = grloc[r][0];
92: xm = grloc[r][1];
93: ys = grloc[r][2];
94: ym = grloc[r][3];
95: zs = grloc[r][4];
96: zm = grloc[r][5];
97: nnodes = xm*ym*zm;
98: }
99: maxnnodes = PetscMax(maxnnodes,nnodes);
100: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
101: PetscFPrintf(comm,fp," <Points>\n");
102: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
103: boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
104: PetscFPrintf(comm,fp," </Points>\n");
106: PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");
107: for (link=vtk->link; link; link=link->next) {
108: Vec X = (Vec)link->vec;
109: PetscInt bs,f;
110: DM daCurr;
111: PetscBool fieldsnamed;
112: const char *vecname = "Unnamed";
114: VecGetDM(X,&daCurr);
115: DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
116: maxbs = PetscMax(maxbs,bs);
118: if (((PetscObject)X)->name || link != vtk->link) {
119: PetscObjectGetName((PetscObject)X,&vecname);
120: }
122: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
123: DMDAGetFieldsNamed(daCurr,&fieldsnamed);
124: if (fieldsnamed) {
125: for (f=0; f<bs; f++) {
126: char buf[256];
127: const char *fieldname;
128: DMDAGetFieldName(daCurr,f,&fieldname);
129: if (!fieldname) {
130: PetscSNPrintf(buf,sizeof(buf),"%D",f);
131: fieldname = buf;
132: }
133: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
134: boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
135: }
136: } else {
137: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
138: boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
139: }
140: }
141: PetscFPrintf(comm,fp," </PointData>\n");
142: PetscFPrintf(comm,fp," </Piece>\n");
143: }
144: PetscFPrintf(comm,fp," </StructuredGrid>\n");
145: PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");
146: PetscFPrintf(comm,fp,"_");
148: /* Now write the arrays. */
149: tag = ((PetscObject)viewer)->tag;
150: PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);
151: for (r=0; r<size; r++) {
152: MPI_Status status;
153: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
154: if (!rank) {
155: xs = grloc[r][0];
156: xm = grloc[r][1];
157: ys = grloc[r][2];
158: ym = grloc[r][3];
159: zs = grloc[r][4];
160: zm = grloc[r][5];
161: nnodes = xm*ym*zm;
162: } else if (r == rank) {
163: nnodes = info.xm*info.ym*info.zm;
164: }
166: /* Write the coordinates */
167: if (Coords) {
168: const PetscScalar *coords;
169: VecGetArrayRead(Coords,&coords);
170: if (!rank) {
171: if (r) {
172: PetscMPIInt nn;
173: MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);
174: MPI_Get_count(&status,MPIU_SCALAR,&nn);
175: if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
176: } else {
177: PetscArraycpy(array,coords,nnodes*cdim);
178: }
179: /* Transpose coordinates to VTK (C-style) ordering */
180: for (k=0; k<zm; k++) {
181: for (j=0; j<ym; j++) {
182: for (i=0; i<xm; i++) {
183: PetscInt Iloc = i+xm*(j+ym*k);
184: array2[Iloc*3+0] = array[Iloc*cdim + 0];
185: array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
186: array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
187: }
188: }
189: }
190: } else if (r == rank) {
191: MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);
192: }
193: VecRestoreArrayRead(Coords,&coords);
194: } else { /* Fabricate some coordinates using grid index */
195: for (k=0; k<zm; k++) {
196: for (j=0; j<ym; j++) {
197: for (i=0; i<xm; i++) {
198: PetscInt Iloc = i+xm*(j+ym*k);
199: array2[Iloc*3+0] = xs+i;
200: array2[Iloc*3+1] = ys+j;
201: array2[Iloc*3+2] = zs+k;
202: }
203: }
204: }
205: }
206: PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);
208: /* Write each of the objects queued up for this file */
209: for (link=vtk->link; link; link=link->next) {
210: Vec X = (Vec)link->vec;
211: const PetscScalar *x;
212: PetscInt bs,f;
213: DM daCurr;
214: PetscBool fieldsnamed;
215: VecGetDM(X,&daCurr);
216: DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
217: VecGetArrayRead(X,&x);
218: if (!rank) {
219: if (r) {
220: PetscMPIInt nn;
221: MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
222: MPI_Get_count(&status,MPIU_SCALAR,&nn);
223: if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
224: } else {
225: PetscArraycpy(array,x,nnodes*bs);
226: }
228: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
229: DMDAGetFieldsNamed(daCurr,&fieldsnamed);
230: if (fieldsnamed) {
231: for (f=0; f<bs; f++) {
232: /* Extract and transpose the f'th field */
233: for (k=0; k<zm; k++) {
234: for (j=0; j<ym; j++) {
235: for (i=0; i<xm; i++) {
236: PetscInt Iloc = i+xm*(j+ym*k);
237: array2[Iloc] = array[Iloc*bs + f];
238: }
239: }
240: }
241: PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
242: }
243: } else {
244: PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);
245: }
246: } else if (r == rank) {
247: MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
248: }
249: VecRestoreArrayRead(X,&x);
250: }
251: }
252: PetscFree2(array,array2);
253: PetscFree(grloc);
255: PetscFPrintf(comm,fp,"\n </AppendedData>\n");
256: PetscFPrintf(comm,fp,"</VTKFile>\n");
257: PetscFClose(comm,fp);
258: return(0);
259: }
262: static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
263: {
264: const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
265: #if defined(PETSC_USE_REAL_SINGLE)
266: const char precision[] = "Float32";
267: #elif defined(PETSC_USE_REAL_DOUBLE)
268: const char precision[] = "Float64";
269: #else
270: const char precision[] = "UnknownPrecision";
271: #endif
272: MPI_Comm comm;
273: PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data;
274: PetscViewerVTKObjectLink link;
275: FILE *fp;
276: PetscMPIInt rank,size,tag;
277: DMDALocalInfo info;
278: PetscInt dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
279: PetscInt rloc[6],(*grloc)[6] = NULL;
280: PetscScalar *array,*array2;
281: PetscErrorCode ierr;
284: PetscObjectGetComm((PetscObject)da,&comm);
285: if (PetscDefined(PETSC_USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported");
286: MPI_Comm_size(comm,&size);
287: MPI_Comm_rank(comm,&rank);
288: DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);
289: DMDAGetLocalInfo(da,&info);
290: PetscFOpen(comm,vtk->filename,"wb",&fp);
291: PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");
292: PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);
293: PetscFPrintf(comm,fp," <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);
295: if (!rank) {PetscMalloc1(size*6,&grloc);}
296: rloc[0] = info.xs;
297: rloc[1] = info.xm;
298: rloc[2] = info.ys;
299: rloc[3] = info.ym;
300: rloc[4] = info.zs;
301: rloc[5] = info.zm;
302: MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);
304: /* Write XML header */
305: maxnnodes = 0; /* Used for the temporary array size on rank 0 */
306: maxbs = 0; /* Used for the temporary array size on rank 0 */
307: boffset = 0; /* Offset into binary file */
308: for (r=0; r<size; r++) {
309: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
310: if (!rank) {
311: xs = grloc[r][0];
312: xm = grloc[r][1];
313: ys = grloc[r][2];
314: ym = grloc[r][3];
315: zs = grloc[r][4];
316: zm = grloc[r][5];
317: nnodes = xm*ym*zm;
318: }
319: maxnnodes = PetscMax(maxnnodes,nnodes);
320: PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);
321: PetscFPrintf(comm,fp," <Coordinates>\n");
322: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
323: boffset += xm*sizeof(PetscScalar) + sizeof(int);
324: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
325: boffset += ym*sizeof(PetscScalar) + sizeof(int);
326: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);
327: boffset += zm*sizeof(PetscScalar) + sizeof(int);
328: PetscFPrintf(comm,fp," </Coordinates>\n");
329: PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");
330: for (link=vtk->link; link; link=link->next) {
331: Vec X = (Vec)link->vec;
332: PetscInt bs,f;
333: DM daCurr;
334: PetscBool fieldsnamed;
335: const char *vecname = "Unnamed";
337: VecGetDM(X,&daCurr);
338: DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
339: maxbs = PetscMax(maxbs,bs);
340: if (((PetscObject)X)->name || link != vtk->link) {
341: PetscObjectGetName((PetscObject)X,&vecname);
342: }
344: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
345: DMDAGetFieldsNamed(daCurr,&fieldsnamed);
346: if (fieldsnamed) {
347: for (f=0; f<bs; f++) {
348: char buf[256];
349: const char *fieldname;
350: DMDAGetFieldName(daCurr,f,&fieldname);
351: if (!fieldname) {
352: PetscSNPrintf(buf,sizeof(buf),"%D",f);
353: fieldname = buf;
354: }
355: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);
356: boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
357: }
358: } else {
359: PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);
360: boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
361: }
362: }
363: PetscFPrintf(comm,fp," </PointData>\n");
364: PetscFPrintf(comm,fp," </Piece>\n");
365: }
366: PetscFPrintf(comm,fp," </RectilinearGrid>\n");
367: PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");
368: PetscFPrintf(comm,fp,"_");
370: /* Now write the arrays. */
371: tag = ((PetscObject)viewer)->tag;
372: PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);
373: for (r=0; r<size; r++) {
374: MPI_Status status;
375: PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
376: if (!rank) {
377: xs = grloc[r][0];
378: xm = grloc[r][1];
379: ys = grloc[r][2];
380: ym = grloc[r][3];
381: zs = grloc[r][4];
382: zm = grloc[r][5];
383: nnodes = xm*ym*zm;
384: } else if (r == rank) {
385: nnodes = info.xm*info.ym*info.zm;
386: }
387: { /* Write the coordinates */
388: Vec Coords;
390: DMGetCoordinates(da,&Coords);
391: if (Coords) {
392: const PetscScalar *coords;
393: VecGetArrayRead(Coords,&coords);
394: if (!rank) {
395: if (r) {
396: PetscMPIInt nn;
397: MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);
398: MPI_Get_count(&status,MPIU_SCALAR,&nn);
399: if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
400: } else {
401: /* x coordinates */
402: for (j=0, k=0, i=0; i<xm; i++) {
403: PetscInt Iloc = i+xm*(j+ym*k);
404: array[i] = coords[Iloc*dim + 0];
405: }
406: /* y coordinates */
407: for (i=0, k=0, j=0; j<ym; j++) {
408: PetscInt Iloc = i+xm*(j+ym*k);
409: array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
410: }
411: /* z coordinates */
412: for (i=0, j=0, k=0; k<zm; k++) {
413: PetscInt Iloc = i+xm*(j+ym*k);
414: array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
415: }
416: }
417: } else if (r == rank) {
418: xm = info.xm;
419: ym = info.ym;
420: zm = info.zm;
421: for (j=0, k=0, i=0; i<xm; i++) {
422: PetscInt Iloc = i+xm*(j+ym*k);
423: array2[i] = coords[Iloc*dim + 0];
424: }
425: for (i=0, k=0, j=0; j<ym; j++) {
426: PetscInt Iloc = i+xm*(j+ym*k);
427: array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
428: }
429: for (i=0, j=0, k=0; k<zm; k++) {
430: PetscInt Iloc = i+xm*(j+ym*k);
431: array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
432: }
433: MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);
434: }
435: VecRestoreArrayRead(Coords,&coords);
436: } else { /* Fabricate some coordinates using grid index */
437: for (i=0; i<xm; i++) {
438: array[i] = xs+i;
439: }
440: for (j=0; j<ym; j++) {
441: array[j+xm] = ys+j;
442: }
443: for (k=0; k<zm; k++) {
444: array[k+xm+ym] = zs+k;
445: }
446: }
447: if (!rank) {
448: PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);
449: PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);
450: PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);
451: }
452: }
454: /* Write each of the objects queued up for this file */
455: for (link=vtk->link; link; link=link->next) {
456: Vec X = (Vec)link->vec;
457: const PetscScalar *x;
458: PetscInt bs,f;
459: DM daCurr;
460: PetscBool fieldsnamed;
461: VecGetDM(X,&daCurr);
462: DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);
464: VecGetArrayRead(X,&x);
465: if (!rank) {
466: if (r) {
467: PetscMPIInt nn;
468: MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);
469: MPI_Get_count(&status,MPIU_SCALAR,&nn);
470: if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
471: } else {
472: PetscArraycpy(array,x,nnodes*bs);
473: }
474: /* If any fields are named, add scalar fields. Otherwise, add a vector field */
475: DMDAGetFieldsNamed(daCurr,&fieldsnamed);
476: if (fieldsnamed) {
477: for (f=0; f<bs; f++) {
478: /* Extract and transpose the f'th field */
479: for (k=0; k<zm; k++) {
480: for (j=0; j<ym; j++) {
481: for (i=0; i<xm; i++) {
482: PetscInt Iloc = i+xm*(j+ym*k);
483: array2[Iloc] = array[Iloc*bs + f];
484: }
485: }
486: }
487: PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);
488: }
489: }
490: PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);
492: } else if (r == rank) {
493: MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);
494: }
495: VecRestoreArrayRead(X,&x);
496: }
497: }
498: PetscFree2(array,array2);
499: PetscFree(grloc);
501: PetscFPrintf(comm,fp,"\n </AppendedData>\n");
502: PetscFPrintf(comm,fp,"</VTKFile>\n");
503: PetscFClose(comm,fp);
504: return(0);
505: }
507: /*@C
508: DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
510: Collective
512: Input Arguments:
513: odm - DM specifying the grid layout, passed as a PetscObject
514: viewer - viewer of type VTK
516: Level: developer
518: Notes:
519: This function is a callback used by the VTK viewer to actually write the file.
520: The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
521: Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
523: If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
524: fields are written. Otherwise, a single multi-dof (vector) field is written.
526: .seealso: PETSCVIEWERVTK
527: @*/
528: PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
529: {
530: DM dm = (DM)odm;
531: PetscBool isvtk;
537: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);
538: if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
539: switch (viewer->format) {
540: case PETSC_VIEWER_VTK_VTS:
541: DMDAVTKWriteAll_VTS(dm,viewer);
542: break;
543: case PETSC_VIEWER_VTK_VTR:
544: DMDAVTKWriteAll_VTR(dm,viewer);
545: break;
546: default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
547: }
548: return(0);
549: }