Actual source code: meshexodus.c

petsc-3.3-p7 2013-05-11
  1: #include<petscdmmesh_formats.hh>   /*I "petscdmmesh.h" I*/

  3: #ifdef PETSC_HAVE_EXODUSII
  4: #include<netcdf.h>
  5: #include<exodusII.h>

  9: PetscErrorCode PetscReadExodusII(MPI_Comm comm, const char filename[], ALE::Obj<PETSC_MESH_TYPE>& mesh)
 10: {
 11:   int   exoid;
 12:   int   CPU_word_size = 0, IO_word_size = 0;
 13:   const PetscMPIInt rank = mesh->commRank();
 14:   float version;
 15:   char  title[PETSC_MAX_PATH_LEN+1], elem_type[PETSC_MAX_PATH_LEN+1];
 16:   int   num_dim, num_nodes, num_elem, num_elem_blk, num_node_sets, num_side_sets;
 17:   int   ierr;

 20:   // Open EXODUS II file
 21:   exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version);CHKERRQ(!exoid);
 22:   // Read database parameters
 23:   ex_get_init(exoid, title, &num_dim, &num_nodes, &num_elem, &num_elem_blk, &num_node_sets, &num_side_sets);

 25:   // Read vertex coordinates
 26:   float *x, *y, *z;
 27:   PetscMalloc3(num_nodes,float,&x,num_nodes,float,&y,num_nodes,float,&z);
 28:   ex_get_coord(exoid, x, y, z);

 30:   // Read element connectivity
 31:   int   *eb_ids, *num_elem_in_block, *num_nodes_per_elem, *num_attr;
 32:   int  **connect;
 33:   char **block_names;
 34:   if (num_elem_blk > 0) {
 35:     PetscMalloc5(num_elem_blk,int,&eb_ids,num_elem_blk,int,&num_elem_in_block,num_elem_blk,int,&num_nodes_per_elem,num_elem_blk,int,&num_attr,num_elem_blk,char*,&block_names);
 36:     ex_get_elem_blk_ids(exoid, eb_ids);
 37:     for(int eb = 0; eb < num_elem_blk; ++eb) {
 38:       PetscMalloc((PETSC_MAX_PATH_LEN+1) * sizeof(char), &block_names[eb]);
 39:     }
 40:     ex_get_names(exoid, EX_ELEM_BLOCK, block_names);
 41:     for(int eb = 0; eb < num_elem_blk; ++eb) {
 42:       ex_get_elem_block(exoid, eb_ids[eb], elem_type, &num_elem_in_block[eb], &num_nodes_per_elem[eb], &num_attr[eb]);
 43:       PetscFree(block_names[eb]);
 44:     }
 45:     PetscMalloc(num_elem_blk * sizeof(int*),&connect);
 46:     for(int eb = 0; eb < num_elem_blk; ++eb) {
 47:       if (num_elem_in_block[eb] > 0) {
 48:         PetscMalloc(num_nodes_per_elem[eb]*num_elem_in_block[eb] * sizeof(int),&connect[eb]);
 49:         ex_get_elem_conn(exoid, eb_ids[eb], connect[eb]);
 50:       }
 51:     }
 52:   }

 54:   // Read node sets
 55:   int  *ns_ids, *num_nodes_in_set;
 56:   int **node_list;
 57:   if (num_node_sets > 0) {
 58:     PetscMalloc3(num_node_sets,int,&ns_ids,num_node_sets,int,&num_nodes_in_set,num_node_sets,int*,&node_list);
 59:     ex_get_node_set_ids(exoid, ns_ids);
 60:     for(int ns = 0; ns < num_node_sets; ++ns) {
 61:       int num_df_in_set;
 62:       ex_get_node_set_param (exoid, ns_ids[ns], &num_nodes_in_set[ns], &num_df_in_set);
 63:       PetscMalloc(num_nodes_in_set[ns] * sizeof(int), &node_list[ns]);
 64:       ex_get_node_set(exoid, ns_ids[ns], node_list[ns]);
 65:         }
 66:   }
 67:   ex_close(exoid);

 69:   // Build mesh topology
 70:   int  numCorners = num_nodes_per_elem[0];
 71:   int *cells;
 72:   mesh->setDimension(num_dim);
 73:   PetscMalloc(numCorners*num_elem * sizeof(int), &cells);
 74:   for(int eb = 0, k = 0; eb < num_elem_blk; ++eb) {
 75:     for(int e = 0; e < num_elem_in_block[eb]; ++e, ++k) {
 76:       for(int c = 0; c < numCorners; ++c) {
 77:         cells[k*numCorners+c] = connect[eb][e*numCorners+c];
 78:       }
 79:     }
 80:     PetscFree(connect[eb]);
 81:   }
 82:   ALE::Obj<PETSC_MESH_TYPE::sieve_type> sieve = new PETSC_MESH_TYPE::sieve_type(mesh->comm(), mesh->debug());
 83:   bool interpolate = false;

 85:   try {
 86:   mesh->setSieve(sieve);
 87:   if (0 == rank) {
 88:     if (!interpolate) {
 89:       // Create the ISieve
 90:       sieve->setChart(PETSC_MESH_TYPE::sieve_type::chart_type(0, num_elem+num_nodes));
 91:       // Set cone and support sizes
 92:       for (int c = 0; c < num_elem; ++c) {
 93:         sieve->setConeSize(c, numCorners);
 94:       }
 95:       sieve->symmetrizeSizes(num_elem, numCorners, cells, num_elem - 1); /* Notice the -1 for 1-based indexing in cells[] */
 96:       // Allocate point storage
 97:       sieve->allocate();
 98:       // Fill up cones
 99:       int *cone  = new int[numCorners];
100:       int *coneO = new int[numCorners];
101:       for (int v = 0; v < numCorners; ++v) {
102:         coneO[v] = 1;
103:       }
104:       for (int c = 0; c < num_elem; ++c) {
105:         for (int v = 0; v < numCorners; ++v) {
106:           cone[v] = cells[c*numCorners+v]+num_elem - 1;
107:         }
108:         sieve->setCone(cone, c);
109:         sieve->setConeOrientation(coneO, c);
110:       } // for
111:       delete[] cone; cone = 0;
112:       delete[] coneO; coneO = 0;
113:       // Symmetrize to fill up supports
114:       sieve->symmetrize();
115:     } else {
116:       // Same old thing
117:       typedef ALE::Mesh<PetscInt,PetscScalar> FlexMesh;
118:       ALE::Obj<FlexMesh::sieve_type> s = new FlexMesh::sieve_type(sieve->comm(), sieve->debug());

120:       ALE::SieveBuilder<FlexMesh>::buildTopology(s, num_dim, num_elem, cells, num_nodes, interpolate, numCorners);
121:       std::map<FlexMesh::point_type,FlexMesh::point_type> renumbering;
122:       ALE::ISieveConverter::convertSieve(*s, *sieve, renumbering);
123:     }
124:     if (!interpolate) {
125:       // Optimized stratification
126:       const ALE::Obj<PETSC_MESH_TYPE::label_type>& height = mesh->createLabel("height");
127:       const ALE::Obj<PETSC_MESH_TYPE::label_type>& depth  = mesh->createLabel("depth");

129:       for(int c = 0; c < num_elem; ++c) {
130:         height->setCone(0, c);
131:         depth->setCone(1, c);
132:       }
133:       for(int v = num_elem; v < num_elem+num_nodes; ++v) {
134:         height->setCone(1, v);
135:         depth->setCone(0, v);
136:       }
137:       mesh->setHeight(1);
138:       mesh->setDepth(1);
139:     } else {
140:       mesh->stratify();
141:     }
142:   } else {
143:     mesh->getSieve()->setChart(PETSC_MESH_TYPE::sieve_type::chart_type());
144:     mesh->getSieve()->allocate();
145:     mesh->stratify();
146:   }
147:   } catch (ALE::Exception e) {
148:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB, e.msg().c_str());
149:   }
150:   PetscFree(cells);

152:   // Build cell blocks
153:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& cellBlocks = mesh->createLabel("CellBlocks");
154:   if (rank == 0) {
155:     for(int eb = 0, k = 0; eb < num_elem_blk; ++eb) {
156:       for(int e = 0; e < num_elem_in_block[eb]; ++e, ++k) {
157:         mesh->setValue(cellBlocks, k, eb_ids[eb]);
158:       }
159:     }
160:   }
161:   if (num_elem_blk > 0) {
162:     PetscFree(connect);
163:     PetscFree5(eb_ids, num_elem_in_block, num_nodes_per_elem, num_attr, block_names);
164:   }

166:   // Build coordinates
167:   double *coords;
168:   PetscMalloc(num_dim*num_nodes * sizeof(double), &coords);
169:   if (num_dim > 0) {for(int v = 0; v < num_nodes; ++v) {coords[v*num_dim+0] = x[v];}}
170:   if (num_dim > 1) {for(int v = 0; v < num_nodes; ++v) {coords[v*num_dim+1] = y[v];}}
171:   if (num_dim > 2) {for(int v = 0; v < num_nodes; ++v) {coords[v*num_dim+2] = z[v];}}
172:   ALE::SieveBuilder<PETSC_MESH_TYPE>::buildCoordinates(mesh, num_dim, coords);
173:   PetscFree(coords);
174:   PetscFree3(x, y, z);

176:   // Build vertex sets
177:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& vertexSets = mesh->createLabel("VertexSets");
178:   if (rank == 0) {
179:     for(int ns = 0; ns < num_node_sets; ++ns) {
180:       for(int n = 0; n < num_nodes_in_set[ns]; ++n) {
181:         mesh->addValue(vertexSets, node_list[ns][n]+num_elem-1, ns_ids[ns]);
182:       }
183:       PetscFree(node_list[ns]);
184:     }
185:   }
186:   if (num_node_sets > 0) {
187:     PetscFree3(ns_ids,num_nodes_in_set,node_list);
188:   }
189:   return(0);
190: }

192: #endif // PETSC_HAVE_EXODUSII

196: /*@C
197:   DMMeshCreateExodus - Create a DMMesh from an ExodusII file.

199:   Not Collective

201:   Input Parameters:
202: + comm - The MPI communicator
203: - filename - The ExodusII filename

205:   Output Parameter:
206: . dm - The DMMesh object

208:   Level: beginner

210: .keywords: mesh, ExodusII
211: .seealso: DMMeshCreate()
212: @*/
213: PetscErrorCode DMMeshCreateExodus(MPI_Comm comm, const char filename[], DM *dm)
214: {
215:   PetscInt       debug = 0;
216:   PetscBool      flag;

220:   DMMeshCreate(comm, dm);
221:   PetscOptionsGetInt(PETSC_NULL, "-debug", &debug, &flag);
222:   ALE::Obj<PETSC_MESH_TYPE> m = new PETSC_MESH_TYPE(comm, -1, debug);
223: #ifdef PETSC_HAVE_EXODUSII
224:   PetscReadExodusII(comm, filename, m);
225: #else
226:   SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
227: #endif
228:   if (debug) {m->view("Mesh");}
229:   DMMeshSetMesh(*dm, m);
230:   return(0);
231: }

235: /*@
236:   DMMeshCreateExodusNG - Create a Mesh from an ExodusII file.

238:   Collective on comm

240:   Input Parameters:
241: + comm  - The MPI communicator
242: - exoid - The ExodusII id associated with a exodus file and obtained using ex_open

244:   Output Parameter:
245: . dm  - The DM object representing the mesh

247:   ExodusII side sets are ignored


250:   Interpolated meshes are not supported.

252:   Level: beginner

254: .keywords: mesh,ExodusII
255: .seealso: MeshCreate() MeshCreateExodus()
256: @*/
257: PetscErrorCode DMMeshCreateExodusNG(MPI_Comm comm, PetscInt exoid,DM *dm)
258: {
259: #if defined(PETSC_HAVE_EXODUSII)
260:   PetscBool               debug = PETSC_FALSE;
261:   PetscMPIInt             num_proc,rank;
262:   PetscErrorCode          ierr;

264:   int                     num_dim,num_vertices = 0,num_cell = 0;
265:   int                     num_cs = 0,num_vs = 0,num_fs = 0;
266:   char                    title[PETSC_MAX_PATH_LEN+1];
267:   char                    buffer[PETSC_MAX_PATH_LEN+1];

269:   int                    *cs_id;
270:   int                     num_cell_in_set,num_vertex_per_cell,num_attr;
271:   int                    *cs_connect;

273:   int                    *vs_id;
274:   int                     num_vertex_in_set;
275:   int                    *vs_vertex_list;
276:   PetscReal              *x,*y,*z;
277:   PetscReal              *coords;

279:   PetscInt                v,c,v_loc,c_loc,vs,cs;

281:   typedef ALE::Mesh<PetscInt,PetscScalar> FlexMesh;
282:   typedef PETSC_MESH_TYPE::sieve_type     sieve_type;
283:   ALE::Obj<PETSC_MESH_TYPE>               mesh;
284:   ALE::Obj<FlexMesh::sieve_type>          flexmesh_sieve;
285:   ALE::Obj<FlexMesh>                      flexmesh;
286:   ALE::Obj<PETSC_MESH_TYPE::sieve_type>   sieve;
287:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;
288: #endif

291: #if defined(PETSC_HAVE_EXODUSII)
292:   MPI_Comm_rank(comm,&rank);
293:   MPI_Comm_size(comm,&num_proc);
294:   PetscOptionsGetBool(PETSC_NULL,"-debug",&debug,PETSC_NULL);

296:   DMMeshCreate(comm,dm);
297:   /*
298:     I _really don't understand how this is needed
299:   */
300:   PetscObjectGetComm((PetscObject) *dm,&comm);


303:   mesh = new PETSC_MESH_TYPE(comm,-1,debug);
304:   DMMeshSetMesh(*dm,mesh);

306:   sieve = new PETSC_MESH_TYPE::sieve_type(mesh->comm(),mesh->debug());
307:   flexmesh_sieve = new FlexMesh::sieve_type(mesh->comm(),mesh->debug());

309:   /*
310:     Open EXODUS II file and read basic informations on rank 0,
311:     then broadcast to all processors
312:   */
313:   if (rank == 0) {
314:     ex_get_init(exoid,title,&num_dim,&num_vertices,&num_cell,&num_cs,&num_vs,&num_fs);
315:     if (num_cs == 0) {
316:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Exodus file does not contain any cell set\n");
317:     }
318:   }

320:   MPI_Bcast(&num_dim,1,MPIU_INT,0,comm);
321:   MPI_Bcast(&num_cs,1,MPIU_INT,0,comm);
322:   MPI_Bcast(&num_vs,1,MPIU_INT,0,comm);
323:   MPI_Bcast(&num_fs,1,MPIU_INT,0,comm);
324:   MPI_Bcast(title,PETSC_MAX_PATH_LEN+1,MPI_CHAR,0,comm);

326:   PetscObjectSetName((PetscObject)*dm,title);
327:   mesh->setDimension(num_dim);

329:   /*
330:     Read cell sets information then broadcast them
331:   */
332:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& cellSets = mesh->createLabel("Cell Sets");
333:   if (rank == 0) {
334:     /*
335:       Get cell sets IDs
336:     */
337:     PetscMalloc(num_cs*sizeof(int),&cs_id);
338:     ex_get_elem_blk_ids(exoid,cs_id);

340:     /*
341:       Read the cell set connectivity table and build mesh topology
342:       EXO standard requires that cells in cell sets be numbered sequentially
343:       and be pairwise disjoint.
344:     */
345:     for (c=0,cs = 0; cs < num_cs; cs++) {
346:       if (debug) {
347:         PetscPrintf(comm,"Building cell set %i\n",cs);
348:       }
349:       ex_get_elem_block(exoid,cs_id[cs],buffer,&num_cell_in_set,&num_vertex_per_cell,&num_attr);
350:       PetscMalloc(num_vertex_per_cell*num_cell_in_set*sizeof(int),&cs_connect);
351:       ex_get_elem_conn(exoid,cs_id[cs],cs_connect);
352:       /*
353:         EXO uses Fortran-based indexing, sieve uses C-style and numbers cell first then vertices.
354:       */
355:       for (v = 0,c_loc = 0; c_loc < num_cell_in_set; c_loc++,c++) {
356:         for (v_loc = 0; v_loc < num_vertex_per_cell; v_loc++,v++) {
357:           if (debug) {
358:             PetscPrintf(PETSC_COMM_SELF,"[%i]:\tinserting vertex \t%i in cell \t%i local vertex number \t%i\n",
359:                                rank,cs_connect[v]+num_cell-1,c,v_loc);
360:           }
361:           flexmesh_sieve->addArrow(sieve_type::point_type(cs_connect[v]+num_cell-1),
362:                           sieve_type::point_type(c),
363:                           v_loc);
364:         }
365:         mesh->setValue(cellSets,c,cs_id[cs]);
366:       }
367:       PetscFree(cs_connect);
368:     }
369:     PetscFree(cs_id);
370:   }
371:   /*
372:     We do not interpolate and know that the numbering is compact (this is required in exo)
373:     so no need to renumber (renumber = false)
374:   */
375:   ALE::ISieveConverter::convertSieve(*flexmesh_sieve,*sieve,renumbering,false);
376:   mesh->setSieve(sieve);
377:   mesh->stratify();
378:   flexmesh = new FlexMesh(mesh->comm(),mesh->debug());
379:   ALE::ISieveConverter::convertOrientation(*flexmesh_sieve,*sieve,renumbering,flexmesh->getArrowSection("orientation").ptr());

381:   /*
382:     Create Vertex set label
383:   */
384:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& vertexSets = mesh->createLabel("Vertex Sets");
385:   if (num_vs >0) {
386:     if (rank == 0) {
387:       /*
388:         Get vertex set ids
389:       */
390:       PetscMalloc(num_vs*sizeof(int),&vs_id);
391:       ex_get_node_set_ids(exoid,vs_id);
392:       for (vs = 0; vs < num_vs; vs++) {
393:         ex_get_node_set_param(exoid,vs_id[vs],&num_vertex_in_set,&num_attr);
394:         PetscMalloc(num_vertex_in_set * sizeof(int),&vs_vertex_list);
395:         ex_get_node_set(exoid,vs_id[vs],vs_vertex_list);
396:         for (v = 0; v < num_vertex_in_set; v++) {
397:           mesh->addValue(vertexSets,vs_vertex_list[v]+num_cell-1,vs_id[vs]);
398:         }
399:         PetscFree(vs_vertex_list);
400:       }
401:       PetscFree(vs_id);
402:     }
403:   }
404:   /*
405:     Read coordinates
406:   */
407:     PetscMalloc4(num_vertices,PetscReal,&x,
408:                         num_vertices,PetscReal,&y,
409:                         num_vertices,PetscReal,&z,
410:                         num_dim*num_vertices,PetscReal,&coords);
411:   if (rank == 0) {
412:     ex_get_coord(exoid,x,y,z);
413:     PetscMalloc(num_dim*num_vertices * sizeof(PetscReal), &coords);
414:     if (num_dim > 0) {for (v = 0; v < num_vertices; ++v) {coords[v*num_dim+0] = x[v];}}
415:     if (num_dim > 1) {for (v = 0; v < num_vertices; ++v) {coords[v*num_dim+1] = y[v];}}
416:     if (num_dim > 2) {for (v = 0; v < num_vertices; ++v) {coords[v*num_dim+2] = z[v];}}
417:   }
418:   ALE::SieveBuilder<PETSC_MESH_TYPE>::buildCoordinates(mesh,num_dim,coords);
419:   if (rank == 0) {
420:     PetscFree4(x,y,z,coords);
421:   }

423:   /*
424:   if (rank == 0) {
425:     ex_close(exoid);
426:   }
427:   */
428: #else
429:   SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
430: #endif
431:   return(0);
432: }

436: /*@
437:   DMMeshViewExodusSplit - Write a dmMesh geometry and topology into several ExodusII files.

439:   Collective on comm

441:   Input Parameters:
442: + comm - The MPI communicator
443: - filename - The ExodusII filename. Must be different on each processor 
444:              (suggest using prefix-<rank>.gen or prefix-<rank>.exo)
445: . dm  - The DM object representing the body

447:   Face Sets (Side Sets in Exodus terminology) are ignored
448:   
449:   Interpolated meshes are not supported.
450:   
451:   Level: beginner

453: .keywords: mesh,ExodusII
454: .seealso: MeshCreate()
455: @*/
456: PetscErrorCode DMMeshViewExodusSplit(DM dm,PetscInt exoid)
457: {
458: #if defined(PETSC_HAVE_EXODUSII)
459:   PetscBool               debug = PETSC_FALSE;
460:   MPI_Comm                comm;
461:   PetscErrorCode          ierr;

463:   int                     num_dim,num_vertices = 0,num_cells = 0;
464:   int                     num_cs = 0,num_vs = 0;
465:   int                     num_cs_global = 0,num_vs_global = 0;
466:   const char             *title;
467:   IS                      csIS,vsIS;
468:   IS                      csIS_global,vsIS_global;
469:   const PetscInt         *csID,*vsID,*labels;

471:   PetscReal              *coord;

473:   PetscInt                c,v,c_offset;

475:   PetscInt                set,num_cell_in_set,num_vertex_per_cell;
476:   const PetscInt         *cells;
477:   IS                      cellsIS;
478:   const char             *cell_type;
479:   int                    *elem_map,*cs_connect,num_cs_attr=0;
480:   const PetscInt         *elem_connect;

482:   PetscInt                num_vertex_in_set,num_vs_attr=0;
483:   PetscInt               *vertices;
484:   IS                      verticesIS;
485: #endif

488: #if defined(PETSC_HAVE_EXODUSII)
489:   /*
490:     Extract mesh global properties from the DM
491:   */
492:   PetscObjectGetName((PetscObject)dm,&title);
493:   DMMeshGetDimension(dm,&num_dim);
494:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
495:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);

497:   /*
498:     Get the local and  global number of sets
499:   */
500:   PetscObjectGetComm((PetscObject) dm,&comm);
501:   DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
502:   DMMeshGetLabelIdIS(dm,"Cell Sets",&csIS);
503:   ISGetTotalIndices(csIS,&labels);
504:   ISGetSize(csIS,&num_cs_global);
505:   PetscSortRemoveDupsInt(&num_cs_global,(PetscInt*)labels);
506:   ISCreateGeneral(comm,num_cs_global,labels,PETSC_COPY_VALUES,&csIS_global);
507:   ISRestoreTotalIndices(csIS,&labels);

509:   DMMeshGetLabelSize(dm,"Vertex Sets",&num_vs);
510:   DMMeshGetLabelIdIS(dm,"Vertex Sets",&vsIS);
511:   ISGetSize(vsIS,&num_vs_global);
512:   if (num_vs_global > 0) {
513:     ISGetTotalIndices(vsIS,&labels);
514:     PetscSortRemoveDupsInt(&num_vs_global,(PetscInt*)labels);
515:     ISCreateGeneral(comm,num_vs_global,labels,PETSC_COPY_VALUES,&vsIS_global);
516:     ISRestoreTotalIndices(vsIS,&labels);
517:   }
518:   ex_put_init(exoid,title,num_dim,num_vertices,num_cells,num_cs_global,num_vs_global,0);
519: 
520:   /*
521:     Write coordinates
522:   */
523:   DMMeshGetCoordinates(dm,PETSC_TRUE,&num_vertices,&num_dim,&coord);
524:   if (debug) {
525:     for (c = 0; c < num_dim; c++) {
526:       PetscPrintf(comm,"Coordinate %i:\n",c);
527:       PetscRealView(num_vertices,&coord[c*num_vertices],PETSC_VIEWER_STDOUT_SELF);
528:     }
529:   }
530:   ex_put_coord(exoid,coord,&coord[num_vertices],&coord[2*num_vertices]);
531: 
532:   /*
533:     Write cell set connectivity table and parameters
534:     Compute the element number map 
535:     The element number map is not needed as long as the cell sets are of the type
536:     0    .. n1
537:     n1+1 .. n2
538:     n2+1 ..n3 
539:     which is the case when a mesh is read from an exo file then distributed, but one never knows
540:   */
541: 
542:   /*
543:     The following loop should be based on csIS and not csIS_global, but EXO has no
544:     way to write the block id's other than ex_put_elem_block
545:     and ensight is bothered if all cell set ID's are not on all files...
546:     Not a huge deal
547:   */
548: 
549:   PetscMalloc(num_cells*sizeof(int),&elem_map);
550:   ISGetIndices(csIS_global,&csID);
551:   for (c_offset = 0,set = 0; set < num_cs_global; set++) {
552:     DMMeshGetStratumSize(dm,"Cell Sets",csID[set],&num_cell_in_set);
553:     DMMeshGetStratumIS(dm,"Cell Sets",csID[set],&cellsIS);
554:     ISGetIndices(cellsIS,&cells);
555:     if (debug) {
556:       PetscPrintf(PETSC_COMM_SELF,"Cell set %i: %i cells\n",csID[set],num_cell_in_set);
557:       PetscIntView(num_cell_in_set,cells,PETSC_VIEWER_STDOUT_SELF);
558:     }
559:     /* 
560:       Add current block indices to elem_map. EXO uses fortran numbering
561:     */
562:     for (c = 0; c < num_cell_in_set; c++,c_offset++) {
563:       elem_map[c_offset] = cells[c]+1;
564:     }
565:     /*
566:       We make an educated guess as to the type of cell. This misses out quads in 
567:       a three-dimensional mesh.
568:       This most likely needs to be fixed by calling again ex_put_elem_block with
569:       the proper parameters
570:     */
571:     if (num_cell_in_set > 0) {
572:       DMMeshGetConeSize(dm,cells[0],&num_vertex_per_cell);
573:       if (num_vertex_per_cell == 2) {
574:         cell_type = "BAR";
575:       } else if (num_vertex_per_cell == 3) {
576:         cell_type = "TRI";
577:       } else if (num_vertex_per_cell == 8) {
578:         cell_type = "HEX";
579:       } else if (num_vertex_per_cell == 4 && num_dim == 2) {
580:           cell_type = "QUAD";
581:       } else if (num_vertex_per_cell == 4 && num_dim == 3) {
582:           cell_type = "TET";
583:       } else {
584:         cell_type = "UNKNOWN";
585:       }
586:     }
587:     ex_put_elem_block (exoid,csID[set],cell_type,num_cell_in_set,num_vertex_per_cell,num_cs_attr);
588:     /* 
589:       Build connectivity table of the current block
590:     */
591:     PetscMalloc(num_cell_in_set*num_vertex_per_cell*sizeof(int),&cs_connect);
592:     for (c = 0; c < num_cell_in_set; c++) {
593:       DMMeshGetCone(dm,cells[c],&elem_connect);
594:       for (v = 0; v < num_vertex_per_cell; v++) {
595:         cs_connect[c*num_vertex_per_cell+v] = elem_connect[v]-num_cells+1;
596:       }
597:     }
598:     if (num_cell_in_set > 0) {
599:       ex_put_elem_conn(exoid,csID[set],cs_connect);
600:     }
601:     PetscFree(cs_connect);
602:     ISRestoreIndices(cellsIS,&cells);
603:     ISDestroy(&cellsIS);
604:   }
605:   ISRestoreIndices(csIS_global,&csID);
606:   ex_put_elem_num_map(exoid,elem_map);
607:   PetscFree(elem_map);
608: 
609:   /*
610:     Writing vertex sets 
611:   */
612:   if (num_vs_global > 0) {
613:     ISGetIndices(vsIS_global,&vsID);
614:     for (set = 0; set < num_vs_global; set++) {
615:       DMMeshGetStratumSize(dm,"Vertex Sets",vsID[set],&num_vertex_in_set);
616:       ex_put_node_set_param(exoid,vsID[set],num_vertex_in_set,num_vs_attr);
617: 
618:       DMMeshGetStratumIS(dm,"Vertex Sets",vsID[set],&verticesIS);
619:       ISGetIndices(verticesIS,(const PetscInt**)&vertices);
620: 
621:       for (v = 0; v < num_vertex_in_set; v++) {
622:         vertices[v] -= num_cells-1;
623:       }
624: 
625:       if (num_vertex_in_set > 0) {
626:         ex_put_node_set(exoid,vsID[set],vertices);
627:       }
628:       ISRestoreIndices(verticesIS,(const PetscInt**)&vertices);
629:       ISDestroy(&verticesIS);
630:     }
631:     ISRestoreIndices(vsIS_global,&vsID);
632:   }
633:   ISDestroy(&csIS);
634:   ISDestroy(&vsIS);
635:   ISDestroy(&csIS_global);
636: #else
637:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
638: #endif
639:   return(0);
640: }

644: /*@
645:   DMMeshCreateScatterToZeroVertex - Creates the scatter required to scatter local (ghosted)
646:     Vecs associated with a field defined at the vertices of a mesh to a Vec on cpu 0.

648:   Input parameters:
649: . dm - the DMMesh representing the mesh

651:   Output parameters:
652: . scatter: the scatter

654:   Level: advanced

656: .keywords: mesh,ExodusII
657: .seealso DMMeshCreateScatterToZeroCell DMMeshCreateScatterToZeroVertexSet DMMeshCreateScatterToZeroCellSet
658: @*/
659: PetscErrorCode DMMeshCreateScatterToZeroVertex(DM dm,VecScatter *scatter)
660: {
661:   PetscErrorCode          ierr;
662:   PetscInt                i;
663:   PetscInt               *vertices;
664:   PetscInt                num_vertices,num_vertices_global,my_num_vertices_global;
665:   PetscInt                num_cells,num_cells_global,my_num_cells_global;
666:   Vec                     v_local,v_zero;
667:   MPI_Comm                comm;
668:   int                     rank;
669:   IS                      is_local;
670:   ALE::Obj<PETSC_MESH_TYPE>                                         mesh;
671:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
672:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


676:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);
677:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
678:   PetscObjectGetComm((PetscObject) dm,&comm);
679:   MPI_Comm_rank(comm,&rank);

681:   DMMeshGetMesh(dm,mesh);
682:   renumbering = mesh->getRenumbering();

684:   my_num_cells_global = 0;
685:   my_num_vertices_global = 0;
686:   PetscMalloc(num_vertices*sizeof(PetscInt),&vertices);
687:   /*
688:     Get the total number of cells and vertices from the mapping, and build array of global indices of the local vertices
689:     (TO array for the scatter) from the iterator
690:   */

692:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
693:     /*
694:        r_iter->first  is a point number in the sequential (global) mesh
695:        r_iter->second is the matching local point number in the distributed (local) mesh
696:     */
697:     if (r_iter->second > num_cells - 1 && r_iter->first > my_num_vertices_global) {
698:       my_num_vertices_global = r_iter->first;
699:     }
700:     if (r_iter->second < num_cells && r_iter->first > my_num_cells_global) {
701:       my_num_cells_global = r_iter->first;
702:     }
703:     if (r_iter->second > num_cells - 1) {
704:       vertices[r_iter->second - num_cells] = r_iter->first;
705:     }
706:   }
707:   my_num_cells_global++;
708:   MPI_Allreduce(&my_num_cells_global,&num_cells_global,1,MPIU_INT,MPI_MAX,comm);
709:   my_num_vertices_global -= num_cells_global-1;
710:   MPI_Allreduce(&my_num_vertices_global,&num_vertices_global,1,MPIU_INT,MPI_MAX,comm);

712:   /*
713:     convert back vertices from point number to vertex numbers
714:   */
715:   for (i = 0; i < num_vertices; i++) {
716:     vertices[i] -= num_cells_global;
717:   }

719:   /*
720:     Build the IS and Vec required to create the VecScatter
721:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
722:     Vecs, but I would need to understand better how it works
723:   */
724:   VecCreate(comm,&v_local);
725:   VecSetSizes(v_local,num_vertices,PETSC_DETERMINE);
726:   VecSetFromOptions(v_local);
727:   VecCreate(comm,&v_zero);
728:   VecSetSizes(v_zero,num_vertices_global*(rank==0),PETSC_DECIDE);
729:   VecSetFromOptions(v_zero);

731:   ISCreateGeneral(comm,num_vertices,vertices,PETSC_OWN_POINTER,&is_local);
732:   VecScatterCreate(v_local,PETSC_NULL,v_zero,is_local,scatter);

734:   ISDestroy(&is_local);
735:   VecDestroy(&v_local);
736:   VecDestroy(&v_zero);
737:   return(0);
738: }

742: /*@
743:   DMMeshCreateScatterToZeroVertexSet - Creates the scatter required to scatter local (ghosted)
744:     Vecs associated with a field defined at the a vertex set of a mesh to a Vec on cpu 0.

746:   Input parameters:
747: . dm - the DMMesh representing the mesh

749:   Output parameters:
750: . scatter - the scatter

752:   Level: advanced

754: .keywords: mesh,ExodusII
755: .seealso DMMeshCreateScatterToZeroCell DMMeshCreateScatterToZeroVertex DMMeshCreateScatterToZeroCellSet
756: @*/
757: PetscErrorCode DMMeshCreateScatterToZeroVertexSet(DM dm,IS is_local,IS is_zero,VecScatter *scatter)
758: {
759:   PetscErrorCode          ierr;
760:   const PetscInt         *setvertices_local;
761:   PetscInt               *allvertices,*setvertices_zero,*setvertices_localtozero;
762:   PetscInt                num_vertices,num_vertices_global,my_num_vertices_global=0;
763:   PetscInt                num_cells,num_cells_global,my_num_cells_global=0;
764:   PetscInt                setsize_local,setsize_zero;
765:   Vec                     v_local,v_zero;
766:   MPI_Comm                comm;
767:   int                     rank,i,j,k,l,istart;
768:   IS                      is_localtozero;
769:   ALE::Obj<PETSC_MESH_TYPE>                                         mesh;
770:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
771:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


775:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
776:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);
777:   PetscObjectGetComm((PetscObject) dm,&comm);
778:   MPI_Comm_rank(comm,&rank);

780:   DMMeshGetMesh(dm,mesh);
781:   renumbering = mesh->getRenumbering();

783:   PetscMalloc(num_vertices*sizeof(PetscInt),&allvertices);
784:   /*
785:     Build array of global indices of the local vertices (TO array for the scatter)
786:     from the iterator
787:   */

789:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
790:     /*
791:        r_iter->first  is a point number in the sequential (global) mesh
792:        r_iter->second is the matching local point number in the distributed (local) mesh
793:     */
794:     if (r_iter->second > num_cells - 1 && r_iter->first > my_num_vertices_global) {
795:       my_num_vertices_global = r_iter->first;
796:     }
797:     if (r_iter->second < num_cells && r_iter->first > my_num_cells_global) {
798:       my_num_cells_global = r_iter->first;
799:     }
800:     if (r_iter->second > num_cells-1 ) {
801:       allvertices[r_iter->second - num_cells] = r_iter->first;
802:     }
803:   }
804:   my_num_cells_global++;
805:   MPI_Allreduce(&my_num_cells_global,&num_cells_global,1,MPIU_INT,MPI_MAX,comm);
806:   my_num_vertices_global -= num_cells_global-1;
807:   MPI_Allreduce(&my_num_vertices_global,&num_vertices_global,1,MPIU_INT,MPI_MAX,comm);

809:   ISGetSize(is_local,&setsize_local);
810:   ISGetSize(is_zero,&setsize_zero);
811:   PetscMalloc(setsize_local*sizeof(PetscInt),&setvertices_localtozero);

813:   if (rank == 0)  {
814:     ISGetIndices(is_zero,(const PetscInt**)&setvertices_zero);
815:   } else {
816:     PetscMalloc(setsize_zero*sizeof(PetscInt),&setvertices_zero);
817:   }
818:   MPI_Bcast(setvertices_zero,setsize_zero,MPIU_INT,0,comm);
819:   ISGetIndices(is_local,&setvertices_local);


822:   istart = 0;
823:   for (i = 0; i < setsize_local; i++) {
824:     j = allvertices[setvertices_local[i]-num_cells];
825:     /*
826:       j is the cell number in the seq mesh of the i-th vertex of the local vertex set.
827:       Search for the matching vertex in vertex set. Because vertex in the zero set are usually 
828:       numbered in ascending order, we start our search from the previous find.
829: `   */
830: 
831:     for (l = 0, k = istart; l < setsize_zero; l++,k = (l+istart)%setsize_zero) {
832:       if (setvertices_zero[k] == j) {
833:         break;
834:       }
835:     }
836:     setvertices_localtozero[i] = k;
837:     istart = (k+1)%setsize_zero;
838:   }
839:   ISRestoreIndices(is_local,&setvertices_local);
840:   if (rank == 0) {
841:     ISRestoreIndices(is_zero,(const PetscInt**)&setvertices_zero);
842:   } else {
843:     PetscFree(setvertices_zero);
844:   }
845:   /*
846:     Build the IS and Vec required to create the VecScatter
847:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
848:     Vecs, but I would need to understand better how it works
849:   */
850:   VecCreate(comm,&v_local);
851:   VecSetSizes(v_local,setsize_local,PETSC_DETERMINE);
852:   VecSetFromOptions(v_local);

854:   VecCreate(comm,&v_zero);
855:   VecSetSizes(v_zero,setsize_zero*(rank==0),PETSC_DECIDE);
856:   VecSetFromOptions(v_zero);

858:   ISCreateGeneral(comm,setsize_local,setvertices_localtozero,PETSC_OWN_POINTER,&is_localtozero);
859:   VecScatterCreate(v_local,PETSC_NULL,v_zero,is_localtozero,scatter);

861:   ISDestroy(&is_localtozero);
862:   PetscFree(allvertices);
863:   VecDestroy(&v_local);
864:   VecDestroy(&v_zero);
865:   return(0);
866: }

870: /*@
871:   DMMeshCreateScatterToZeroCell - Creates the scatter required to scatter local (ghosted)
872:     Vecs associated with a field defined at the cells of a mesh to a Vec on cpu 0.

874:   Input parameters:
875: . dm - the DMMesh representing the mesh

877:   Output parameters:
878: . scatter - the scatter

880:   Level: advanced

882: .keywords: mesh,ExodusII
883: .seealso DMMeshCreateScatterToZeroVertex DMMeshCreateScatterToZeroVertexSet DMMeshCreateScatterToZeroCellSet
884: @*/
885: PetscErrorCode DMMeshCreateScatterToZeroCell(DM dm,VecScatter *scatter)
886: {
887:   PetscErrorCode          ierr;
888:   PetscInt               *cells;
889:   PetscInt                num_cells,num_cells_global,my_num_cells_global;
890:   Vec                     v_local,v_zero;
891:   MPI_Comm                comm;
892:   int                     rank;
893:   IS                      is_local;
894:   ALE::Obj<PETSC_MESH_TYPE>                                         mesh;
895:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
896:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


900:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
901:   PetscObjectGetComm((PetscObject) dm,&comm);
902:   MPI_Comm_rank(comm,&rank);

904:   DMMeshGetMesh(dm,mesh);
905:   renumbering = mesh->getRenumbering();

907:   my_num_cells_global = 0;
908:   PetscMalloc(num_cells*sizeof(PetscInt),&cells);
909:   /*
910:     Get the total number of cells from the mapping, and build array of global indices of the local cells (TO array for the scatter)
911:     from the iterator
912:   */

914:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
915:     /*
916:        r_iter->first  is a point number in the sequential (global) mesh
917:        r_iter->second is the matching local point number in the distributed (local) mesh
918:     */
919:     if (r_iter->second < num_cells && r_iter->first > my_num_cells_global) {
920:       my_num_cells_global = r_iter->first;
921:     }
922:     if (r_iter->second < num_cells ) {
923:       cells[r_iter->second] = r_iter->first;
924:     }
925:   }
926:   my_num_cells_global++;
927:   MPI_Allreduce(&my_num_cells_global,&num_cells_global,1,MPIU_INT,MPI_MAX,comm);

929:   /*
930:     Build the IS and Vec required to create the VecScatter
931:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
932:     Vecs, but I would need to understand better how it works
933:   */
934:   VecCreate(comm,&v_local);
935:   VecSetSizes(v_local,num_cells,PETSC_DETERMINE);
936:   VecSetFromOptions(v_local);
937:   VecCreate(comm,&v_zero);
938:   VecSetSizes(v_zero,num_cells_global*(rank==0),PETSC_DECIDE);
939:   VecSetFromOptions(v_zero);

941:   ISCreateGeneral(comm,num_cells,cells,PETSC_OWN_POINTER,&is_local);
942:   VecScatterCreate(v_local,PETSC_NULL,v_zero,is_local,scatter);

944:   ISDestroy(&is_local);
945:   VecDestroy(&v_local);
946:   VecDestroy(&v_zero);
947:   return(0);
948: }

952: /*@
953:   DMMeshCreateScatterToZeroCellSet - Creates the scatter required to scatter local (ghosted)
954:     Vecs associated with a field defined at a cell set of a mesh to a Vec on cpu 0.

956:   Input parameters:
957: . dm - the DMMesh representing the mesh

959:   Output parameters:
960: . scatter - the scatter

962:   Level: advanced

964: .keywords: mesh,ExodusII
965: .seealso DMMeshCreateScatterToZeroCell DMMeshCreateScatterToZeroVertexSet DMMeshCreateScatterToZeroVertex
966: @*/
967: PetscErrorCode DMMeshCreateScatterToZeroCellSet(DM dm,IS is_local,IS is_zero,VecScatter *scatter)
968: {
969:   PetscErrorCode          ierr;
970:   const PetscInt         *setcells_local;
971:   PetscInt               *allcells,*setcells_zero,*setcells_localtozero;
972:   PetscInt                num_cells;
973:   PetscInt                setsize_local,setsize_zero;
974:   Vec                     v_local,v_zero;
975:   MPI_Comm                comm;
976:   int                     rank,i,j,k,l,istart;
977:   IS                      is_localtozero;
978:   ALE::Obj<PETSC_MESH_TYPE>                                         mesh;
979:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
980:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


984:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
985:   PetscObjectGetComm((PetscObject) dm,&comm);
986:   MPI_Comm_rank(comm,&rank);

988:   DMMeshGetMesh(dm,mesh);
989:   renumbering = mesh->getRenumbering();

991:   PetscMalloc(num_cells*sizeof(PetscInt),&allcells);
992:   /*
993:     Build array of global indices of the local cells (TO array for the scatter)
994:     from the iterator
995:   */

997:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
998:     /*
999:        r_iter->first  is a point number in the sequential (global) mesh
1000:        r_iter->second is the matching local point number in the distributed (local) mesh
1001:     */
1002:     if (r_iter->second < num_cells ) {
1003:       allcells[r_iter->second] = r_iter->first;
1004:     }
1005:   }

1007:   ISGetSize(is_local,&setsize_local);
1008:   ISGetSize(is_zero,&setsize_zero);
1009:   PetscMalloc(setsize_local*sizeof(PetscInt),&setcells_localtozero);

1011:   if (rank == 0)  {
1012:     ISGetIndices(is_zero,(const PetscInt**)&setcells_zero);
1013:   } else {
1014:     PetscMalloc(setsize_zero*sizeof(PetscInt),&setcells_zero);
1015:   }
1016:   MPI_Bcast(setcells_zero,setsize_zero,MPIU_INT,0,comm);
1017:   ISGetIndices(is_local,&setcells_local);

1019:   istart = 0;
1020:   for (i = 0; i < setsize_local; i++) {
1021:     j = allcells[setcells_local[i]];
1022:     /*
1023:       j is the cell number in the seq mesh of the i-th cell of the local cell set.
1024:       Search for the matching cell in cell set. Because cells in the zero set are usually 
1025:       numbered sequentially, we start our search from the previous find.
1026: `   */
1027: 
1028:     for (l = 0, k = istart; l < setsize_zero; l++,k = (l+istart)%setsize_zero) {
1029:       if (setcells_zero[k] == j) {
1030:         break;
1031:       }
1032:     }
1033:     setcells_localtozero[i] = k;
1034:     istart = (k+1)%setsize_zero;
1035:   }
1036:   ISRestoreIndices(is_local,&setcells_local);
1037:   if (rank == 0) {
1038:     ISRestoreIndices(is_zero,(const PetscInt**)&setcells_zero);
1039:   } else {
1040:     PetscFree(setcells_zero);
1041:   }
1042:   /*
1043:     Build the IS and Vec required to create the VecScatter
1044:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
1045:     Vecs, but I would need to understand better how it works
1046:   */
1047:   VecCreate(comm,&v_local);
1048:   VecSetSizes(v_local,setsize_local,PETSC_DETERMINE);
1049:   VecSetFromOptions(v_local);

1051:   VecCreate(comm,&v_zero);
1052:   VecSetSizes(v_zero,setsize_zero*(rank==0),PETSC_DECIDE);
1053:   VecSetFromOptions(v_zero);

1055:   ISCreateGeneral(comm,setsize_local,setcells_localtozero,PETSC_OWN_POINTER,&is_localtozero);
1056:   VecScatterCreate(v_local,PETSC_NULL,v_zero,is_localtozero,scatter);

1058:   PetscFree(setcells_localtozero);
1059:   PetscFree(allcells);
1060:   VecDestroy(&v_local);
1061:   VecDestroy(&v_zero);
1062:   return(0);
1063: }

1067: /*@

1069:   VecViewExodusVertex - Write a Vec representing nodal values of some field in an exodusII file.

1071:   Collective on comm

1073:   The behavior differs depending on the size of comm:
1074:     if size(comm) == 1 each processor writes its local vector into a separate file
1075:     if size(comm) > 1, the values are sent to cpu0, and written in a single file


1078:   Input Parameters:
1079: + dm   - the DMMesh representing the mesh
1080: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1081:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1082: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1083: . step  - the time step to write
1084: - exofield - the position in the exodus field of the first component

1086:   Interpolated meshes are not supported.

1088:   Level: beginner

1090: .keywords: mesh,ExodusII
1091: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusVertex()
1092:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1093:           VecViewExodusCell() VecLoadExodusCell()
1094: @*/
1095: PetscErrorCode VecViewExodusVertex(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1096: {
1097: #ifdef PETSC_HAVE_EXODUSII
1098:   PetscInt                rank,num_proc;
1099:   PetscErrorCode          ierr;
1100:   PetscInt                c,num_vertices,num_vertices_zero,num_dof;
1101:   Vec                     vdof,vdof_zero;
1102:   PetscReal               *vdof_array;
1103:   VecScatter              scatter;
1104: #endif

1107: #ifdef PETSC_HAVE_EXODUSII
1108:   MPI_Comm_size(comm,&num_proc);
1109:   MPI_Comm_rank(comm,&rank);
1110:   VecGetBlockSize(v,&num_dof);

1112:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);

1114:   VecCreate(comm,&vdof);
1115:   VecSetSizes(vdof,num_vertices,PETSC_DETERMINE);
1116:   VecSetFromOptions(vdof);

1118:   if (num_proc == 1) {
1119:     PetscMalloc(num_vertices*sizeof(PetscReal),&vdof_array);
1120:     for (c = 0; c < num_dof; c++) {
1121:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1122:       VecGetArray(vdof,&vdof_array);
1123:       ex_put_nodal_var(exoid,step,exofield+c,num_vertices,vdof_array);
1124:       VecRestoreArray(vdof,&vdof_array);
1125:       if (ierr != 0) {
1126:         SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_nodal_var returned %i",exoid,ierr);
1127:       }
1128:     }
1129:     PetscFree(vdof_array);
1130:   } else {
1131:     /*
1132:       Build the scatter sending one dof towards cpu 0
1133:     */
1134:     DMMeshCreateScatterToZeroVertex(dm,&scatter);
1135:     /*
1136:       Another an ugly hack to get the total number of vertices in the mesh. This has got to stop...
1137:     */
1138:     num_vertices_zero = scatter->to_n;

1140:     VecCreate(comm,&vdof_zero);
1141:     VecSetSizes(vdof_zero,num_vertices_zero,PETSC_DECIDE);
1142:     VecSetFromOptions(vdof_zero);

1144:     for (c = 0; c < num_dof; c++) {
1145:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1146:       VecScatterBegin(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1147:       VecScatterEnd(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1148:       if (rank == 0) {
1149:         VecGetArray(vdof_zero,&vdof_array);
1150:         ex_put_nodal_var(exoid,step,exofield+c,num_vertices_zero,vdof_array);
1151:         VecRestoreArray(vdof_zero,&vdof_array);
1152:       }
1153:     }
1154:     /*
1155:       Clean up
1156:     */
1157:     VecScatterDestroy(&scatter);
1158:     VecDestroy(&vdof_zero);
1159:   }
1160:   VecDestroy(&vdof);
1161: #else
1162:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1163: #endif
1164:   return(0);
1165: }

1169: /*@

1171:   VecLoadExodusVertex - Loads a Vec representing nodal values of some field from an exodusII file.

1173:   Collective on comm

1175:   The behavior differs depending on the size of comm:
1176:     if size(comm) == 1 each processor reads its local vector from a separate file
1177:     if size(comm) > 1, the values are read by cpu 0 from a single file then scattered to each processor


1180:   Input Parameters:
1181: + dm   - the DMMesh representing the mesh
1182: . v    - the LOCAL vector of values to be read (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1183:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1184: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1185: . step  - the time step to read
1186: - exofield - the position in the exodus field of the first component

1188:   Interpolated meshes are not supported.

1190:   Level: beginner

1192: .keywords: mesh,ExodusII
1193: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecViewExodusVertex()
1194:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1195:           VecViewExodusCell() VecLoadExodusCell()

1197: @*/
1198: PetscErrorCode VecLoadExodusVertex(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1199: {
1200: #ifdef PETSC_HAVE_EXODUSII
1201:   PetscInt                rank,num_proc;
1202:   PetscErrorCode          ierr;
1203:   PetscInt                c,num_vertices,num_vertices_global,num_dof;
1204:   Vec                     vdof,vzero;
1205:   PetscReal               *vdof_array;
1206:   VecScatter              scatter;
1207: #endif

1209: #ifdef PETSC_HAVE_EXODUSII
1211:   MPI_Comm_size(comm,&num_proc);
1212:   MPI_Comm_rank(comm,&rank);
1213:   VecGetBlockSize(v,&num_dof);

1215:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);

1217:   VecCreate(comm,&vdof);
1218:   VecSetSizes(vdof,num_vertices,PETSC_DETERMINE);
1219:   VecSetFromOptions(vdof);

1221:   if (num_proc == 1) {
1222:     for (c = 0; c < num_dof; c++) {
1223:       VecGetArray(vdof,&vdof_array);
1224:       ex_get_nodal_var(exoid,step,exofield+c,num_vertices,vdof_array);
1225:       VecRestoreArray(vdof,&vdof_array);
1226:       if (ierr != 0) {
1227:         SETERRQ2(comm,PETSC_ERR_FILE_READ,"Unable to read file id %i. ex_put_nodal_var returned %i",exoid,ierr);
1228:       }
1229:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1230:     }
1231:   } else {
1232:     /*
1233:       Build the scatter sending one dof towards cpu 0
1234:     */
1235:     DMMeshCreateScatterToZeroVertex(dm,&scatter);
1236:     /*
1237:       Another an ugly hack to get the total number of vertices in the mesh. This has got to stop...
1238:     */
1239:     num_vertices_global = scatter->to_n;
1240:     MPI_Bcast(&num_vertices_global,1,MPIU_INT,0,comm);

1242:     VecCreate(comm,&vzero);
1243:     VecSetSizes(vzero,num_vertices_global*(rank==0),PETSC_DECIDE);
1244:     VecSetFromOptions(vzero);

1246:     for (c = 0; c < num_dof; c++) {
1247:       if (rank == 0) {
1248:         VecGetArray(vzero,&vdof_array);
1249:         ex_get_nodal_var(exoid,step,exofield+c,num_vertices_global,vdof_array);
1250:         VecRestoreArray(vzero,&vdof_array);
1251:       }
1252:       VecScatterBegin(scatter,vzero,vdof,INSERT_VALUES,SCATTER_REVERSE);
1253:       VecScatterEnd(scatter,vzero,vdof,INSERT_VALUES,SCATTER_REVERSE);
1254:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1255:     }
1256:     /*
1257:       Clean up
1258:     */
1259:     VecScatterDestroy(&scatter);
1260:     VecDestroy(&vzero);
1261:   }
1262:   VecDestroy(&vdof);
1263: #else
1264:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1265: #endif
1266:   return(0);
1267: }

1271: /*@

1273:   VecViewExodusVertexSet - Write a Vec representing nodal values of some field at a vertex set in an exodusII file.

1275:   Collective on comm

1277:   The behavior differs depending on the size of comm:
1278:     if size(comm) == 1 each processor writes its local vector into a separate file
1279:     if size(comm) > 1, the values are sent to cpu0, and written in a single file


1282:   Input Parameters:
1283: + dm   - the DMMesh representing the mesh
1284: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1285:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1286: . vsID  - the vertex set ID
1287: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1288: . step  - the time step to write
1289: - exofield - the position in the exodus field of the first component

1291:   Interpolated meshes are not supported.

1293:   Level: beginner

1295: .keywords: mesh,ExodusII
1296: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusVertexSet()
1297:           VecViewExodusVertex() VecLoadExodusVertex() VecViewExodusCellSet() VecLoadExodusCellSet()
1298:           VecViewExodusCell() VecLoadExodusCell()

1300: @*/
1301: PetscErrorCode VecViewExodusVertexSet(DM dm,Vec v,PetscInt vsID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1302: {
1303: #ifdef PETSC_HAVE_EXODUSII
1304:   PetscInt                rank,num_proc;
1305:   PetscErrorCode          ierr;
1306:   PetscInt                c,num_vertices_in_set=0,num_vertices_zero=0,num_cells_zero,num_dof;
1307:   Vec                     vdof,vdof_zero;
1308:   PetscReal               *vdof_array;
1309:   VecScatter              scatter;
1310:   PetscInt                i,junk1,junk2,junk3,junk4,junk5;
1311:   char                    junk[MAX_LINE_LENGTH+1];
1312:   IS                      vsIS,vsIS_zero;
1313:   PetscInt               *vs_vertices_zero;
1314:   MPI_Comm                dmcomm;
1315: #endif

1317: #ifdef PETSC_HAVE_EXODUSII
1319:   MPI_Comm_size(comm,&num_proc);
1320:   MPI_Comm_rank(comm,&rank);
1321:   VecGetBlockSize(v,&num_dof);
1322:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
1323: 
1324:   DMMeshGetStratumSize(dm,"Vertex Sets",vsID,&num_vertices_in_set);
1325:   VecCreateSeq(PETSC_COMM_SELF,num_vertices_in_set,&vdof);

1327:   if (num_proc == 1) {
1328:     if (num_vertices_in_set > 0) {
1329:       for (c = 0; c < num_dof; c++) {
1330:         VecStrideGather(v,c,vdof,INSERT_VALUES);
1331:         VecGetArray(vdof,&vdof_array);
1332:         ex_put_nset_var(exoid,step,exofield+c,vsID,num_vertices_in_set,vdof_array);
1333:         if (ierr != 0) {
1334:           SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_nset_var returned %i",exoid,ierr);
1335:         }
1336:         VecRestoreArray(vdof,&vdof_array);
1337:       }
1338:     }
1339:   } else {
1340:     /*
1341:       Build the scatter sending one dof towards cpu 0
1342:     */
1343:     DMMeshGetStratumIS(dm,"Vertex Sets",vsID,&vsIS);
1344:     if (rank == 0) {
1345:       ex_get_node_set_param(exoid,vsID,&num_vertices_zero,&junk1);
1346:     }
1347:     PetscMalloc(num_vertices_zero*sizeof(PetscInt),&vs_vertices_zero);
1348:     if (rank == 0) {
1349:       ex_get_node_set(exoid,vsID,vs_vertices_zero);
1350:       ex_get_init(exoid,junk,&junk1,&junk2,&num_cells_zero,&junk3,&junk4,&junk5);
1351:       for (i = 0; i < num_vertices_zero; i++) {
1352:         vs_vertices_zero[i] += num_cells_zero-1;
1353:       }
1354:     }
1355:     ISCreateGeneral(dmcomm,num_vertices_zero,vs_vertices_zero,PETSC_OWN_POINTER,&vsIS_zero);
1356:     DMMeshCreateScatterToZeroVertexSet(dm,vsIS,vsIS_zero,&scatter);
1357:     ISDestroy(&vsIS);
1358:     ISDestroy(&vsIS_zero);

1360:     VecCreateSeq(PETSC_COMM_SELF,num_vertices_zero*(rank == 0),&vdof_zero);

1362:     for (c = 0; c < num_dof; c++) {
1363:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1364:       VecScatterBegin(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1365:       VecScatterEnd(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);

1367:       if (rank == 0) {
1368:         VecGetArray(vdof_zero,&vdof_array);
1369:         ex_put_nset_var(exoid,step,exofield+c,vsID,num_vertices_zero,vdof_array);
1370:         if (ierr != 0) {
1371:           SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_nset_var returned %i",exoid,ierr);
1372:         }
1373:         VecRestoreArray(vdof_zero,&vdof_array);
1374:       }
1375:     }
1376:     /*
1377:       Clean up
1378:     */
1379:     VecScatterDestroy(&scatter);
1380:     VecDestroy(&vdof_zero);
1381:   }
1382:   VecDestroy(&vdof);
1383: #else
1384:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1385: #endif
1386:   return(0);
1387: }

1391: /*@

1393:   VecLoadExodusVertexSet - Write a Vec representing nodal values of some field at a vertex set in an exodusII file.

1395:   Collective on comm

1397:   The behavior differs depending on the size of comm:
1398:     if size(comm) == 1 each processor writes its local vector into a separate file
1399:     if size(comm) > 1, the values are sent to cpu0, and written in a single file


1402:   Input Parameters:
1403: + dm   - the DMMesh representing the mesh
1404: . v    - the LOCAL vector of values to be read (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1405:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1406: . vsID  - the vertex set ID
1407: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1408: . step  - the time step to write
1409: - exofield - the position in the exodus field of the first component

1411:   Interpolated meshes are not supported.

1413:   Level: beginner

1415: .keywords: mesh,ExodusII
1416: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusVertex()

1418: @*/
1419: PetscErrorCode VecLoadExodusVertexSet(DM dm,Vec v,PetscInt vsID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1420: {
1421: #ifdef PETSC_HAVE_EXODUSII
1422:   PetscInt                rank,num_proc;
1423:   PetscErrorCode          ierr;
1424:   PetscInt                c,num_vertices_in_set=0,num_vertices_zero=0,num_cells_zero,num_dof;
1425:   Vec                     vdof,vdof_zero;
1426:   PetscReal               *vdof_array;
1427:   VecScatter              scatter;
1428:   PetscInt                i,junk1,junk2,junk3,junk4,junk5;
1429:   char                    junk[MAX_LINE_LENGTH+1];
1430:   IS                      vsIS,vsIS_zero;
1431:   PetscInt               *vs_vertices_zero;
1432:   MPI_Comm                dmcomm;
1433: #endif

1435: #ifdef PETSC_HAVE_EXODUSII
1437:   MPI_Comm_size(comm,&num_proc);
1438:   MPI_Comm_rank(comm,&rank);
1439:   VecGetBlockSize(v,&num_dof);
1440:   PetscObjectGetComm((PetscObject) dm,&dmcomm);

1442:   DMMeshGetStratumSize(dm,"Vertex Sets",vsID,&num_vertices_in_set);
1443:   VecCreateSeq(PETSC_COMM_SELF,num_vertices_in_set,&vdof);

1445:   if (num_proc == 1) {
1446:     if (num_vertices_in_set > 0) {
1447:       for (c = 0; c < num_dof; c++) {
1448:         VecGetArray(vdof,&vdof_array);
1449:         ex_get_nset_var(exoid,step,exofield+c,vsID,num_vertices_in_set,vdof_array);
1450:         if (ierr != 0) {
1451:           SETERRQ2(comm,PETSC_ERR_FILE_READ,"Unable to read from file id %i. ex_put_nset_var returned %i",exoid,ierr);
1452:         }
1453:         VecRestoreArray(vdof,&vdof_array);
1454:         VecStrideScatter(vdof,c,v,INSERT_VALUES);
1455:       }
1456:     }
1457:   } else {
1458:     /*
1459:       Build the scatter sending one dof towards cpu 0
1460:     */
1461:     DMMeshGetStratumIS(dm,"Vertex Sets",vsID,&vsIS);
1462:     if (rank == 0) {
1463:       ex_get_node_set_param(exoid,vsID,&num_vertices_zero,&junk1);
1464:     }
1465:     PetscMalloc(num_vertices_zero*sizeof(PetscInt),&vs_vertices_zero);
1466:     if (rank == 0) {
1467:       ex_get_node_set(exoid,vsID,vs_vertices_zero);
1468:       ex_get_init(exoid,junk,&junk1,&junk2,&num_cells_zero,&junk3,&junk4,&junk5);
1469:       for (i = 0; i < num_vertices_zero; i++) {
1470:         vs_vertices_zero[i] += num_cells_zero-1;
1471:       }
1472:     }
1473:     ISCreateGeneral(dmcomm,num_vertices_zero,vs_vertices_zero,PETSC_OWN_POINTER,&vsIS_zero);
1474:     DMMeshCreateScatterToZeroVertexSet(dm,vsIS,vsIS_zero,&scatter);
1475:     ISDestroy(&vsIS);
1476:     ISDestroy(&vsIS_zero);

1478:     VecCreateSeq(PETSC_COMM_SELF,num_vertices_zero*(rank == 0),&vdof_zero);

1480:     for (c = 0; c < num_dof; c++) {
1481:       if (rank == 0) {
1482:         VecGetArray(vdof_zero,&vdof_array);
1483:         ex_get_nset_var(exoid,step,exofield+c,vsID,num_vertices_zero,vdof_array);
1484:         if (ierr != 0) {
1485:           SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_READ,"Unable to read from file id %i. ex_get_nset_var returned %i",exoid,ierr);
1486:         }
1487:         VecRestoreArray(vdof_zero,&vdof_array);
1488:       }
1489:       VecScatterBegin(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);
1490:       VecScatterEnd(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);

1492:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1493:     }
1494:     /*
1495:       Clean up
1496:     */
1497:     VecScatterDestroy(&scatter);
1498:     VecDestroy(&vdof_zero);
1499:   }
1500:   VecDestroy(&vdof);
1501: #else
1502:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1503: #endif
1504:   return(0);
1505: }

1509: /*@

1511:   VecViewExodusCell - Write a Vec representing the values of a field at all cells to an exodusII file.

1513:   Input Parameters:
1514: + dm   - the DMMesh representing the mesh
1515: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1516:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1517: . comm - the communicator associated to the exo file
1518:   + if size(comm) == 1 each processor writes its local vector into a separate file
1519:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1520: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1521: . step  - the time step to write
1522: . exofield - the position in the exodus field of the first component

1524: - Interpolated meshes are not supported.

1526:   Level: beginner

1528: .keywords: mesh,ExodusII
1529: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusCell()
1530:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1531:           VecViewExodusVertex() VecLoadExodusVertex()

1533: @*/
1534: PetscErrorCode VecViewExodusCell(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1535: {
1536: #ifdef PETSC_HAVE_EXODUSII
1537:   PetscErrorCode          ierr;
1538:   PetscInt                rank,num_proc,num_dof,num_cells,num_cells_in_set,num_cells_zero=0;
1539:   PetscInt                num_cs_zero,num_cs,set,c,istart;
1540:   PetscInt               *setsID_zero;
1541:   const PetscInt         *setsID;
1542:   IS                      setIS,csIS,setIS_zero;
1543:   VecScatter              setscatter,setscattertozero;
1544:   Vec                     vdof,vdof_set,vdof_set_zero;
1545:   PetscReal              *vdof_set_array;
1546:   PetscInt                junk1,junk2,junk3,junk4,junk5;
1547:   char                    junk[MAX_LINE_LENGTH+1];
1548:   MPI_Comm                dmcomm;
1549: #endif

1551: #ifdef PETSC_HAVE_EXODUSII
1553:   MPI_Comm_size(comm,&num_proc);
1554:   MPI_Comm_rank(comm,&rank);
1555:   VecGetBlockSize(v,&num_dof);
1556:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
1557:   DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1558:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
1559: 
1560:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);
1561: 
1562:   if (num_proc == 1) {
1563:     DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1564:     DMMeshGetLabelIdIS(dm,"Cell Sets",&csIS);
1565:     ISGetIndices(csIS,&setsID);

1567:     for (set = 0; set < num_cs; set++) {
1568:       /*
1569:         Get the IS for the current set, the Vec containing a single dof and create
1570:         the scatter for the restriction to the current cell set
1571:       */
1572:       DMMeshGetStratumIS(dm,"Cell Sets",setsID[set],&setIS);
1573:       DMMeshGetStratumSize(dm,"Cell Sets",setsID[set],&num_cells_in_set);
1574:       VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1575:       VecScatterCreate(vdof,setIS,vdof_set,PETSC_NULL,&setscatter);
1576:       for (c = 0; c < num_dof; c++) {
1577:         VecStrideGather(v,c,vdof,INSERT_VALUES);

1579:         /*
1580:           Get the restriction of vdof to the current set
1581:         */
1582:         VecScatterBegin(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);
1583:         VecScatterEnd(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);
1584: 
1585:         /*
1586:           Write array to disk
1587:         */
1588:         VecGetArray(vdof_set,&vdof_set_array);
1589:         ex_put_elem_var(exoid,step,exofield+c,setsID[set],num_cells_in_set,vdof_set_array);
1590:         if (ierr != 0) {
1591:           SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_elem_var returned %i",exoid,ierr);
1592:         }
1593:         VecRestoreArray(vdof_set,&vdof_set_array);
1594:       }
1595:       VecDestroy(&vdof_set);
1596:       VecScatterDestroy(&setscatter);
1597:       ISDestroy(&setIS);
1598:     }
1599:     ISRestoreIndices(csIS,&setsID);
1600:     ISDestroy(&csIS);
1601:   } else {
1602:     /*
1603:       Get the number of blocks and the list of ID directly from  the file. This is easier
1604:       than trying to reconstruct the global lists from all individual IS
1605:     */
1606:     if (rank == 0) {
1607:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_zero,&junk4,&junk5);
1608:     }
1609:     MPI_Bcast(&num_cs_zero,1,MPIU_INT,0,dmcomm);
1610:     PetscMalloc(num_cs_zero * sizeof(PetscInt),&setsID_zero);
1611:     if (rank == 0) {
1612:       ex_get_elem_blk_ids(exoid,setsID_zero);
1613:     }
1614:     MPI_Bcast(setsID_zero,num_cs_zero,MPIU_INT,0,dmcomm);
1615: 
1616:     istart = 0;
1617:     for (set = 0; set < num_cs_zero; set++) {
1618:       /*
1619:         Get the size of the size of the set on cpu 0 and create a Vec to receive values
1620:       */
1621:       if (rank == 0) {
1622:         ex_get_elem_block(exoid,setsID_zero[set],junk,&num_cells_zero,&junk1,&junk2);
1623:       }
1624:       VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_set_zero);

1626:       /*
1627:         Get the IS for the current set, the Vec containing a single dof and create
1628:         the scatter for the restriction to the current cell set
1629:       */
1630:       DMMeshGetStratumIS(dm,"Cell Sets",setsID_zero[set],&setIS);
1631:       DMMeshGetStratumSize(dm,"Cell Sets",setsID_zero[set],&num_cells_in_set);
1632:       VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1633:       VecScatterCreate(vdof,setIS,vdof_set,PETSC_NULL,&setscatter);
1634:       /*
1635:         Get the scatter to send the values of a single dof at a single block to cpu 0
1636:       */
1637:       ISCreateStride(dmcomm,num_cells_zero,istart,1,&setIS_zero);
1638:       DMMeshCreateScatterToZeroCellSet(dm,setIS,setIS_zero,&setscattertozero);
1639:       ISDestroy(&setIS_zero);

1641:       for (c = 0; c < num_dof; c++) {
1642:         VecStrideGather(v,c,vdof,INSERT_VALUES);

1644:         /*
1645:           Get the restriction of vdof to the current set
1646:         */
1647:         VecScatterBegin(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);
1648:         VecScatterEnd(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);
1649: 
1650:         /*
1651:           Scatter vdof_set to cpu 0
1652:         */
1653:         VecScatterBegin(setscattertozero,vdof_set,vdof_set_zero,INSERT_VALUES,SCATTER_FORWARD);
1654:         VecScatterEnd(setscattertozero,vdof_set,vdof_set_zero,INSERT_VALUES,SCATTER_FORWARD);
1655: 
1656:         /*
1657:           Write array to disk
1658:         */
1659:         if (rank == 0) {
1660:           VecGetArray(vdof_set_zero,&vdof_set_array);
1661:           ex_put_elem_var(exoid,step,exofield+c,setsID_zero[set],num_cells_zero,vdof_set_array);
1662:           VecRestoreArray(vdof_set_zero,&vdof_set_array);
1663:         }
1664:       }
1665:       istart += num_cells_zero;
1666:       VecDestroy(&vdof_set_zero);
1667:       VecDestroy(&vdof_set);
1668:       /*
1669:         Does this really need to be protected with this test?
1670:       */
1671:       if (num_cells_in_set > 0) {
1672:         /*
1673:           Does this really need to be protected with this test?
1674:         */
1675:         VecScatterDestroy(&setscatter);
1676:       }
1677:       VecScatterDestroy(&setscattertozero);
1678:       ISDestroy(&setIS);
1679:     }
1680:     PetscFree(setsID_zero);
1681:   }
1682: 
1683:   VecDestroy(&vdof);
1684: #else
1685:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1686: #endif
1687:   return(0);
1688: }

1692: /*@

1694:   VecLoadExodusCell - Read a Vec representing the values of a field at all cells from an exodusII file.

1696:   Input Parameters:
1697: + dm   - the DMMesh representing the mesh
1698: . v    - the LOCAL vector of values to be read (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1699:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1700: . comm - the communicator associated to the exo file
1701:   + if size(comm) == 1 each processor writes its local vector into a separate file
1702:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1703: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1704: . step  - the time step to write
1705: . exofield - the position in the exodus field of the first component

1707: - Interpolated meshes are not supported.

1709:   Level: beginner

1711: .keywords: mesh,ExodusII
1712: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecViewExodusCell()
1713:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1714:           VecViewExodusVertex() VecLoadExodusVertex()

1716: @*/
1717: PetscErrorCode VecLoadExodusCell(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1718: {
1719: #ifdef PETSC_HAVE_EXODUSII
1720:   PetscErrorCode          ierr;
1721:   PetscInt                rank,num_proc,num_dof,num_cells,num_cells_in_set,num_cells_zero=0;
1722:   PetscInt                num_cs_zero,num_cs,set,c,istart;
1723:   PetscInt               *setsID_zero;
1724:   const PetscInt         *setsID;
1725:   IS                      setIS,csIS,setIS_zero;
1726:   VecScatter              setscatter,setscattertozero;
1727:   Vec                     vdof,vdof_set,vdof_set_zero;
1728:   PetscReal              *vdof_set_array;
1729:   PetscInt                junk1,junk2,junk3,junk4,junk5;
1730:   char                    junk[MAX_LINE_LENGTH+1];
1731:   MPI_Comm                dmcomm;
1732: #endif

1734: #ifdef PETSC_HAVE_EXODUSII
1736:   MPI_Comm_size(comm,&num_proc);
1737:   MPI_Comm_rank(comm,&rank);
1738:   VecGetBlockSize(v,&num_dof);
1739:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
1740:   DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1741:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
1742: 
1743:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);
1744: 
1745:   if (num_proc == 1) {
1746:     DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1747:     DMMeshGetLabelIdIS(dm,"Cell Sets",&csIS);
1748:     ISGetIndices(csIS,&setsID);

1750:     for (c = 0; c < num_dof; c++) {
1751:       for (set = 0; set < num_cs; set++) {
1752:         /*
1753:           Get the IS for the current set, the Vec containing a single dof and create
1754:           the scatter for the restriction to the current cell set
1755:         */
1756:         DMMeshGetStratumIS(dm,"Cell Sets",setsID[set],&setIS);
1757:         DMMeshGetStratumSize(dm,"Cell Sets",setsID[set],&num_cells_in_set);
1758:         VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1759:         VecScatterCreate(vdof,setIS,vdof_set,PETSC_NULL,&setscatter);
1760:         /*
1761:           Read array from disk
1762:         */
1763:         VecGetArray(vdof_set,&vdof_set_array);
1764:         ex_get_elem_var(exoid,step,exofield+c,setsID[set],num_cells_in_set,vdof_set_array);
1765:         VecRestoreArray(vdof_set,&vdof_set_array);
1766:         /*
1767:           Copy values to full dof vec
1768:         */
1769:         VecScatterBegin(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);
1770:         VecScatterEnd(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);

1772:       }
1773:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1774:       VecDestroy(&vdof_set);
1775:       VecScatterDestroy(&setscatter);
1776:       ISDestroy(&setIS);
1777:     }
1778:     ISRestoreIndices(csIS,&setsID);
1779:     ISDestroy(&csIS);
1780:   } else {
1781:     /*
1782:       Get the number of blocks and the list of ID directly from  the file. This is easier
1783:       than trying to reconstruct the global lists from all individual IS
1784:     */
1785:     if (rank == 0) {
1786:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_zero,&junk4,&junk5);
1787:     }
1788:     MPI_Bcast(&num_cs_zero,1,MPIU_INT,0,dmcomm);
1789:     PetscMalloc(num_cs_zero * sizeof(PetscInt),&setsID_zero);
1790:     if (rank == 0) {
1791:       ex_get_elem_blk_ids(exoid,setsID_zero);
1792:     }
1793:     MPI_Bcast(setsID_zero,num_cs_zero,MPIU_INT,0,dmcomm);
1794: 
1795:     for (c = 0; c < num_dof; c++) {
1796:       istart = 0;
1797:       for (set = 0; set < num_cs_zero; set++) {
1798:         /*
1799:           Get the size of the size of the set on cpu 0 and create a Vec to receive values
1800:         */
1801:         ex_get_elem_block(exoid,setsID_zero[set],junk,&num_cells_zero,&junk1,&junk2);
1802:         VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_set_zero);
1803: 
1804:         /*
1805:           Get the IS for the current set, the Vec containing a single dof and create
1806:           the scatter for the restriction to the current cell set
1807:         */
1808:         DMMeshGetStratumIS(dm,"Cell Sets",setsID_zero[set],&setIS);
1809:         DMMeshGetStratumSize(dm,"Cell Sets",setsID_zero[set],&num_cells_in_set);
1810:         VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1811:         VecScatterCreate(vdof,setIS,vdof_set,PETSC_NULL,&setscatter);
1812:         /*
1813:           Get the scatter to send the values of a single dof at a single block to cpu 0
1814:         */
1815:         ISCreateStride(dmcomm,num_cells_zero,istart,1,&setIS_zero);
1816:         DMMeshCreateScatterToZeroCellSet(dm,setIS,setIS_zero,&setscattertozero);
1817:         ISDestroy(&setIS_zero);

1819:         /*
1820:           Read array from disk
1821:         */
1822:         if (rank == 0) {
1823:           VecGetArray(vdof_set_zero,&vdof_set_array);
1824:           ex_get_elem_var(exoid,step,exofield+c,setsID_zero[set],num_cells_zero,vdof_set_array);
1825:           VecRestoreArray(vdof_set_zero,&vdof_set_array);
1826:         }
1827:         /*
1828:           Send values back to their owning cpu
1829:         */
1830:         VecScatterBegin(setscattertozero,vdof_set_zero,vdof_set,INSERT_VALUES,SCATTER_REVERSE);
1831:         VecScatterEnd(setscattertozero,vdof_set_zero,vdof_set,INSERT_VALUES,SCATTER_REVERSE);
1832:         /*
1833:           Copy the values associated to the current set back into the component vec
1834:         */
1835:         VecScatterBegin(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);
1836:         VecScatterEnd(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);
1837:         istart += num_cells_zero;
1838:         MPI_Bcast(&istart,1,MPIU_INT,0,dmcomm);
1839:       }
1840:       /*
1841:         Copy the component back into v
1842:       */
1843:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1844:       VecDestroy(&vdof_set_zero);
1845:       VecDestroy(&vdof_set);
1846:       VecScatterDestroy(&setscatter);
1847:       VecScatterDestroy(&setscattertozero);
1848:       ISDestroy(&setIS);
1849:     }
1850:     PetscFree(setsID_zero);
1851:     VecDestroy(&vdof);
1852:   }

1854:   VecDestroy(&vdof);
1855: #else
1856:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1857: #endif
1858:   return(0);
1859: }

1863: /*@

1865:   VecViewExodusCellSet - Write a Vec representing the values of a field at a cell set to an exodusII file.

1867:   Input Parameters:
1868: + dm   - the DMMesh representing the mesh
1869: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1870:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1871: . csID - the ID of the cell set
1872: . comm - the communicator associated to the exo file
1873:   + if size(comm) == 1 each processor writes its local vector into a separate file
1874:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1875: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1876: . step  - the time step to write
1877: . exofield - the position in the exodus field of the first component

1879: - Interpolated meshes are not supported.

1881:   Level: beginner

1883: .keywords: mesh,ExodusII
1884: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusCellSet()
1885:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCell() VecLoadExodusCell()
1886:           VecViewExodusVertex() VecLoadExodusVertex()

1888: @*/
1889: PetscErrorCode VecViewExodusCellSet(DM dm,Vec v,PetscInt csID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1890: {
1891: #ifdef PETSC_HAVE_EXODUSII
1892:   PetscErrorCode          ierr;
1893:   PetscInt                rank,num_proc,num_dof,num_cells,num_cells_zero=0;
1894:   PetscInt                i,c;
1895:   PetscReal              *vdof_array;
1896:   IS                      csIS,csIS_zero;
1897:   Vec                     vdof,vdof_zero;
1898:   VecScatter              scatter;
1899:   PetscInt                istart = 0;

1901:   PetscInt                junk1,junk2,junk3,junk4,junk5;
1902:   char                    junk[MAX_LINE_LENGTH+1];
1903:   PetscInt                num_cs_global;
1904:   PetscInt               *csIDs;
1905:   MPI_Comm                dmcomm;
1906: #endif

1908: #ifdef PETSC_HAVE_EXODUSII
1910:   MPI_Comm_size(comm,&num_proc);
1911:   MPI_Comm_rank(comm,&rank);
1912:   VecGetBlockSize(v,&num_dof);
1913:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
1914:   DMMeshGetStratumIS(dm,"Cell Sets",csID,&csIS);
1915:   ISGetSize(csIS,&num_cells);

1917:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);
1918: 
1919:   if (num_proc == 1) {
1920:     for (c = 0; c < num_dof; c++) {
1921:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1922:       VecGetArray(vdof,&vdof_array);
1923:       ex_put_elem_var(exoid,step,exofield+c,csID,num_cells,vdof_array);
1924:       VecRestoreArray(vdof,&vdof_array);
1925:       if (ierr != 0) {
1926:         SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_elem_var returned %i",exoid,ierr);
1927:       }
1928:     }
1929:   } else {
1930:     /*
1931:       Get the global size of the cell set from the file because we can
1932:       There is no direct way to get the index of the first cell in a cell set from exo.
1933:       (which depends on the order on the file) so we need to seek the cell set in the file and 
1934:       accumulate offsets... 
1935:     */
1936:     if (rank == 0) {
1937:       istart = 0;
1938:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_global,&junk4,&junk5);
1939:       PetscMalloc(num_cs_global * sizeof(PetscInt),&csIDs);
1940:       ex_get_elem_blk_ids(exoid,csIDs);
1941:       for (i = 0; i < num_cs_global; i++) {
1942:         ex_get_elem_block(exoid,csIDs[i],junk,&num_cells_zero,&junk1,&junk2);
1943:         if (csIDs[i] == csID) {
1944:           break;
1945:         } else {
1946:           istart += num_cells_zero;
1947:         }
1948:       }
1949:       PetscFree(csIDs);
1950:       ex_get_elem_block(exoid,csID,junk,&num_cells_zero,&junk1,&junk2);
1951:     }
1952:     ISCreateStride(dmcomm,num_cells_zero,istart,1,&csIS_zero);
1953:     DMMeshCreateScatterToZeroCellSet(dm,csIS,csIS_zero,&scatter);
1954:     ISDestroy(&csIS_zero);

1956:     VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_zero);

1958:     for (c = 0; c < num_dof; c++) {
1959:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1960:       VecScatterBegin(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1961:       VecScatterEnd(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1962:       if (rank == 0) {
1963:         VecGetArray(vdof_zero,&vdof_array);
1964:         ex_put_elem_var(exoid,step,exofield+c,csID,num_cells_zero,vdof_array);
1965:         VecRestoreArray(vdof_zero,&vdof_array);
1966:       }
1967:     }
1968:     VecDestroy(&vdof_zero);
1969:     VecScatterDestroy(&scatter);
1970:   }
1971:   ISDestroy(&csIS);
1972:   VecDestroy(&vdof);
1973: #else
1974:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1975: #endif
1976:   return(0);
1977: }

1981: /*@

1983:   VecLoadExodusCellSet - Read a Vec representing the values of a field at a cell set from an exodusII file.

1985:   Input Parameters:
1986: + dm   - the DMMesh representing the mesh
1987: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1988:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1989: . csID - the ID of the cell set
1990: . comm - the communicator associated to the exo file
1991:   + if size(comm) == 1 each processor writes its local vector into a separate file
1992:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1993: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1994: . step  - the time step to write
1995: . exofield - the position in the exodus field of the first component

1997: - Interpolated meshes are not supported.

1999:   Level: beginner

2001: .keywords: mesh,ExodusII
2002: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecViewExodusCellSet()
2003:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCell() VecLoadExodusCell()
2004:           VecViewExodusVertex() VecLoadExodusVertex()

2006: @*/
2007: PetscErrorCode VecLoadExodusCellSet(DM dm,Vec v,PetscInt csID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
2008: {
2009: #ifdef PETSC_HAVE_EXODUSII
2010:   PetscErrorCode          ierr;
2011:   PetscInt                rank,num_proc,num_dof,num_cells,num_cells_zero=0;
2012:   PetscInt                i,c;
2013:   PetscReal              *vdof_array;
2014:   IS                      csIS,csIS_zero;
2015:   Vec                     vdof,vdof_zero;
2016:   VecScatter              scatter;
2017:   PetscInt                istart = 0;

2019:   PetscInt                junk1,junk2,junk3,junk4,junk5;
2020:   PetscInt                num_cs_global;
2021:   PetscInt               *csIDs;
2022:   char                    junk[MAX_LINE_LENGTH+1];
2023:   MPI_Comm                dmcomm;
2024: #endif

2026: #ifdef PETSC_HAVE_EXODUSII
2028:   MPI_Comm_size(comm,&num_proc);
2029:   MPI_Comm_rank(comm,&rank);
2030:   VecGetBlockSize(v,&num_dof);
2031:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
2032: 
2033:   DMMeshGetStratumIS(dm,"Cell Sets",csID,&csIS);
2034:   ISGetSize(csIS,&num_cells);

2036:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);

2038:   if (num_proc == 1) {
2039:     for (c = 0; c < num_dof; c++) {
2040:       VecGetArray(vdof,&vdof_array);
2041:       ex_get_elem_var(exoid,step,exofield+c,csID,num_cells,vdof_array);
2042:       VecRestoreArray(vdof,&vdof_array);
2043:       if (ierr != 0) {
2044:         SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_elem_var returned %i",exoid,ierr);
2045:       }
2046:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
2047:     }
2048:   } else {
2049:     /*
2050:       Get the global size of the cell set from the file because we can
2051:       There is no direct way to get the index of the first cell in a cell set from exo.
2052:       (which depends on the order on the file) so we need to seek the cell set in the file and 
2053:       accumulate offsets... 
2054:     */
2055:     if (rank == 0) {
2056:       istart = 0;
2057:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_global,&junk4,&junk5);
2058:       PetscMalloc(num_cs_global * sizeof(PetscInt),&csIDs);
2059:       ex_get_elem_blk_ids(exoid,csIDs);
2060:       for (i = 0; i < num_cs_global; i++) {
2061:         ex_get_elem_block(exoid,csIDs[i],junk,&num_cells_zero,&junk1,&junk2);
2062:         if (csIDs[i] == csID) {
2063:           break;
2064:         } else {
2065:           istart += num_cells_zero;
2066:         }
2067:       }
2068:       PetscFree(csIDs);
2069:       ex_get_elem_block(exoid,csID,junk,&num_cells_zero,&junk1,&junk2);
2070:     }
2071:     ISCreateStride(dmcomm,num_cells_zero,istart,1,&csIS_zero);
2072:     DMMeshCreateScatterToZeroCellSet(dm,csIS,csIS_zero,&scatter);
2073:     ISDestroy(&csIS_zero);

2075:     VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_zero);
2076: 
2077:     for (c = 0; c < num_dof; c++) {
2078:       if (rank == 0) {
2079:         VecGetArray(vdof_zero,&vdof_array);
2080:         ex_get_elem_var(exoid,step,exofield+c,csID,num_cells_zero,vdof_array);
2081:         VecRestoreArray(vdof_zero,&vdof_array);
2082:       }
2083:       VecScatterBegin(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);
2084:       VecScatterEnd(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);
2085:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
2086:     }
2087:     VecDestroy(&vdof_zero);
2088:     VecScatterDestroy(&scatter);
2089:   }
2090:   ISDestroy(&csIS);
2091:   VecDestroy(&vdof);
2092: #else
2093:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
2094: #endif
2095:   return(0);
2096: }