Actual source code: meshexodus.c

petsc-3.4.5 2014-06-29
  1: #include <petscdmmesh_formats.hh>   /*I "petscdmmesh.h" I*/
  2: #include <petsc-private/vecimpl.h>

  4: #if defined(PETSC_HAVE_EXODUSII)
  5: #include <netcdf.h>
  6: #include <exodusII.h>

 10: PetscErrorCode PetscReadExodusII(MPI_Comm comm, const char filename[], ALE::Obj<PETSC_MESH_TYPE>& mesh)
 11: {
 12:   int               exoid;
 13:   int               CPU_word_size = 0, IO_word_size = 0;
 14:   const PetscMPIInt rank          = mesh->commRank();
 15:   float             version;
 16:   char              title[PETSC_MAX_PATH_LEN+1], elem_type[PETSC_MAX_PATH_LEN+1];
 17:   int               num_dim, num_nodes, num_elem, num_elem_blk, num_node_sets, num_side_sets;
 18:   int               ierr;

 21:   // Open EXODUS II file
 22:   exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version);CHKERRQ(!exoid);
 23:   // Read database parameters
 24:   ex_get_init(exoid, title, &num_dim, &num_nodes, &num_elem, &num_elem_blk, &num_node_sets, &num_side_sets);

 26:   // Read vertex coordinates
 27:   float *x, *y, *z;
 28:   PetscMalloc3(num_nodes,float,&x,num_nodes,float,&y,num_nodes,float,&z);
 29:   ex_get_coord(exoid, x, y, z);

 31:   // Read element connectivity
 32:   int  *eb_ids, *num_elem_in_block, *num_nodes_per_elem, *num_attr;
 33:   int  **connect;
 34:   char **block_names;
 35:   if (num_elem_blk > 0) {
 36:     PetscMalloc5(num_elem_blk,int,&eb_ids,num_elem_blk,int,&num_elem_in_block,num_elem_blk,int,&num_nodes_per_elem,num_elem_blk,int,&num_attr,num_elem_blk,char*,&block_names);
 37:     ex_get_elem_blk_ids(exoid, eb_ids);
 38:     for (int eb = 0; eb < num_elem_blk; ++eb) {
 39:       PetscMalloc((PETSC_MAX_PATH_LEN+1) * sizeof(char), &block_names[eb]);
 40:     }
 41:     ex_get_names(exoid, EX_ELEM_BLOCK, block_names);
 42:     for (int eb = 0; eb < num_elem_blk; ++eb) {
 43:       ex_get_elem_block(exoid, eb_ids[eb], elem_type, &num_elem_in_block[eb], &num_nodes_per_elem[eb], &num_attr[eb]);
 44:       PetscFree(block_names[eb]);
 45:     }
 46:     PetscMalloc(num_elem_blk * sizeof(int*),&connect);
 47:     for (int eb = 0; eb < num_elem_blk; ++eb) {
 48:       if (num_elem_in_block[eb] > 0) {
 49:         PetscMalloc(num_nodes_per_elem[eb]*num_elem_in_block[eb] * sizeof(int),&connect[eb]);
 50:         ex_get_elem_conn(exoid, eb_ids[eb], connect[eb]);
 51:       }
 52:     }
 53:   }

 55:   // Read node sets
 56:   int *ns_ids, *num_nodes_in_set;
 57:   int **node_list;
 58:   if (num_node_sets > 0) {
 59:     PetscMalloc3(num_node_sets,int,&ns_ids,num_node_sets,int,&num_nodes_in_set,num_node_sets,int*,&node_list);
 60:     ex_get_node_set_ids(exoid, ns_ids);
 61:     for (int ns = 0; ns < num_node_sets; ++ns) {
 62:       int num_df_in_set;
 63:       ex_get_node_set_param (exoid, ns_ids[ns], &num_nodes_in_set[ns], &num_df_in_set);
 64:       PetscMalloc(num_nodes_in_set[ns] * sizeof(int), &node_list[ns]);
 65:       ex_get_node_set(exoid, ns_ids[ns], node_list[ns]);
 66:     }
 67:   }
 68:   ex_close(exoid);

 70:   // Build mesh topology
 71:   int numCorners = num_nodes_per_elem[0];
 72:   int *cells;
 73:   mesh->setDimension(num_dim);
 74:   PetscMalloc(numCorners*num_elem * sizeof(int), &cells);
 75:   for (int eb = 0, k = 0; eb < num_elem_blk; ++eb) {
 76:     for (int e = 0; e < num_elem_in_block[eb]; ++e, ++k) {
 77:       for (int c = 0; c < numCorners; ++c) {
 78:         cells[k*numCorners+c] = connect[eb][e*numCorners+c];
 79:       }
 80:     }
 81:     PetscFree(connect[eb]);
 82:   }
 83:   ALE::Obj<PETSC_MESH_TYPE::sieve_type> sieve       = new PETSC_MESH_TYPE::sieve_type(mesh->comm(), mesh->debug());
 84:   bool                                  interpolate = false;

 86:   try {
 87:     mesh->setSieve(sieve);
 88:     if (0 == rank) {
 89:       if (!interpolate) {
 90:         // Create the ISieve
 91:         sieve->setChart(PETSC_MESH_TYPE::sieve_type::chart_type(0, num_elem+num_nodes));
 92:         // Set cone and support sizes
 93:         for (int c = 0; c < num_elem; ++c) {
 94:           sieve->setConeSize(c, numCorners);
 95:         }
 96:         sieve->symmetrizeSizes(num_elem, numCorners, cells, num_elem - 1); /* Notice the -1 for 1-based indexing in cells[] */
 97:         // Allocate point storage
 98:         sieve->allocate();
 99:         // Fill up cones
100:         int *cone  = new int[numCorners];
101:         int *coneO = new int[numCorners];
102:         for (int v = 0; v < numCorners; ++v) coneO[v] = 1;
103:         for (int c = 0; c < num_elem; ++c) {
104:           for (int v = 0; v < numCorners; ++v) {
105:             cone[v] = cells[c*numCorners+v]+num_elem - 1;
106:           }
107:           sieve->setCone(cone, c);
108:           sieve->setConeOrientation(coneO, c);
109:         } // for
110:         delete[] cone; cone   = 0;
111:         delete[] coneO; coneO = 0;
112:         // Symmetrize to fill up supports
113:         sieve->symmetrize();
114:       } else {
115:         // Same old thing
116:         typedef ALE::Mesh<PetscInt,PetscScalar> FlexMesh;
117:         ALE::Obj<FlexMesh::sieve_type> s = new FlexMesh::sieve_type(sieve->comm(), sieve->debug());

119:         ALE::SieveBuilder<FlexMesh>::buildTopology(s, num_dim, num_elem, cells, num_nodes, interpolate, numCorners);
120:         std::map<FlexMesh::point_type,FlexMesh::point_type> renumbering;
121:         ALE::ISieveConverter::convertSieve(*s, *sieve, renumbering);
122:       }
123:       if (!interpolate) {
124:         // Optimized stratification
125:         const ALE::Obj<PETSC_MESH_TYPE::label_type>& height = mesh->createLabel("height");
126:         const ALE::Obj<PETSC_MESH_TYPE::label_type>& depth  = mesh->createLabel("depth");

128:         for (int c = 0; c < num_elem; ++c) {
129:           height->setCone(0, c);
130:           depth->setCone(1, c);
131:         }
132:         for (int v = num_elem; v < num_elem+num_nodes; ++v) {
133:           height->setCone(1, v);
134:           depth->setCone(0, v);
135:         }
136:         mesh->setHeight(1);
137:         mesh->setDepth(1);
138:       } else {
139:         mesh->stratify();
140:       }
141:     } else {
142:       mesh->getSieve()->setChart(PETSC_MESH_TYPE::sieve_type::chart_type());
143:       mesh->getSieve()->allocate();
144:       mesh->stratify();
145:     }
146:   } catch (ALE::Exception e) {
147:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB, e.msg().c_str());
148:   }
149:   PetscFree(cells);

151:   // Build cell blocks
152:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& cellBlocks = mesh->createLabel("CellBlocks");
153:   if (!rank) {
154:     for (int eb = 0, k = 0; eb < num_elem_blk; ++eb) {
155:       for (int e = 0; e < num_elem_in_block[eb]; ++e, ++k) {
156:         mesh->setValue(cellBlocks, k, eb_ids[eb]);
157:       }
158:     }
159:   }
160:   if (num_elem_blk > 0) {
161:     PetscFree(connect);
162:     PetscFree5(eb_ids, num_elem_in_block, num_nodes_per_elem, num_attr, block_names);
163:   }

165:   // Build coordinates
166:   double *coords;
167:   PetscMalloc(num_dim*num_nodes * sizeof(double), &coords);
168:   if (num_dim > 0) {
169:     for (int v = 0; v < num_nodes; ++v) coords[v*num_dim+0] = x[v];
170:   }
171:   if (num_dim > 1) {
172:     for (int v = 0; v < num_nodes; ++v) coords[v*num_dim+1] = y[v];
173:   }
174:   if (num_dim > 2) {
175:     for (int v = 0; v < num_nodes; ++v) coords[v*num_dim+2] = z[v];
176:   }
177:   ALE::SieveBuilder<PETSC_MESH_TYPE>::buildCoordinates(mesh, num_dim, coords);
178:   PetscFree(coords);
179:   PetscFree3(x, y, z);

181:   // Build vertex sets
182:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& vertexSets = mesh->createLabel("VertexSets");
183:   if (!rank) {
184:     for (int ns = 0; ns < num_node_sets; ++ns) {
185:       for (int n = 0; n < num_nodes_in_set[ns]; ++n) {
186:         mesh->addValue(vertexSets, node_list[ns][n]+num_elem-1, ns_ids[ns]);
187:       }
188:       PetscFree(node_list[ns]);
189:     }
190:   }
191:   if (num_node_sets > 0) {
192:     PetscFree3(ns_ids,num_nodes_in_set,node_list);
193:   }
194:   return(0);
195: }

197: #endif // PETSC_HAVE_EXODUSII

201: /*@C
202:   DMMeshCreateExodus - Create a DMMesh from an ExodusII file.

204:   Not Collective

206:   Input Parameters:
207: + comm - The MPI communicator
208: - filename - The ExodusII filename

210:   Output Parameter:
211: . dm - The DMMesh object

213:   Level: beginner

215: .keywords: mesh, ExodusII
216: .seealso: DMMeshCreate()
217: @*/
218: PetscErrorCode DMMeshCreateExodus(MPI_Comm comm, const char filename[], DM *dm)
219: {
220:   PetscInt       debug = 0;
221:   PetscBool      flag;

225:   DMMeshCreate(comm, dm);
226:   PetscOptionsGetInt(NULL, "-debug", &debug, &flag);
227:   ALE::Obj<PETSC_MESH_TYPE> m = new PETSC_MESH_TYPE(comm, -1, debug);
228: #if defined(PETSC_HAVE_EXODUSII)
229:   PetscReadExodusII(comm, filename, m);
230: #else
231:   SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
232: #endif
233:   if (debug) m->view("Mesh");
234:   DMMeshSetMesh(*dm, m);
235:   return(0);
236: }

240: /*@
241:   DMMeshCreateExodusNG - Create a Mesh from an ExodusII file.

243:   Collective on comm

245:   Input Parameters:
246: + comm  - The MPI communicator
247: - exoid - The ExodusII id associated with a exodus file and obtained using ex_open

249:   Output Parameter:
250: . dm  - The DM object representing the mesh

252:   ExodusII side sets are ignored


255:   Interpolated meshes are not supported.

257:   Level: beginner

259: .keywords: mesh,ExodusII
260: .seealso: MeshCreate() MeshCreateExodus()
261: @*/
262: PetscErrorCode DMMeshCreateExodusNG(MPI_Comm comm, PetscInt exoid,DM *dm)
263: {
264: #if defined(PETSC_HAVE_EXODUSII)
265:   PetscBool      debug = PETSC_FALSE;
266:   PetscMPIInt    num_proc,rank;

269:   int  num_dim,num_vertices = 0,num_cell = 0;
270:   int  num_cs = 0,num_vs = 0,num_fs = 0;
271:   char title[PETSC_MAX_PATH_LEN+1];
272:   char buffer[PETSC_MAX_PATH_LEN+1];

274:   int *cs_id;
275:   int num_cell_in_set,num_vertex_per_cell,num_attr;
276:   int *cs_connect;

278:   int       *vs_id;
279:   int       num_vertex_in_set;
280:   int       *vs_vertex_list;
281:   PetscReal *x,*y,*z;
282:   PetscReal *coords;

284:   PetscInt v,c,v_loc,c_loc,vs,cs;

286:   typedef ALE::Mesh<PetscInt,PetscScalar> FlexMesh;
287:   typedef PETSC_MESH_TYPE::sieve_type     sieve_type;
288:   ALE::Obj<PETSC_MESH_TYPE>               mesh;
289:   ALE::Obj<FlexMesh::sieve_type>          flexmesh_sieve;
290:   ALE::Obj<FlexMesh>                      flexmesh;
291:   ALE::Obj<PETSC_MESH_TYPE::sieve_type>   sieve;

293:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;
294: #endif

297: #if defined(PETSC_HAVE_EXODUSII)
298:   MPI_Comm_rank(comm,&rank);
299:   MPI_Comm_size(comm,&num_proc);
300:   PetscOptionsGetBool(NULL,"-debug",&debug,NULL);

302:   DMMeshCreate(comm,dm);
303:   /*
304:     I _really don't understand how this is needed
305:   */
306:   PetscObjectGetComm((PetscObject) *dm,&comm);


309:   mesh = new PETSC_MESH_TYPE(comm,-1,debug);
310:   DMMeshSetMesh(*dm,mesh);

312:   sieve          = new PETSC_MESH_TYPE::sieve_type(mesh->comm(),mesh->debug());
313:   flexmesh_sieve = new FlexMesh::sieve_type(mesh->comm(),mesh->debug());

315:   /*
316:     Open EXODUS II file and read basic informations on rank 0,
317:     then broadcast to all processors
318:   */
319:   if (!rank) {
320:     ex_get_init(exoid,title,&num_dim,&num_vertices,&num_cell,&num_cs,&num_vs,&num_fs);
321:     if (num_cs == 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Exodus file does not contain any cell set\n");
322:   }

324:   MPI_Bcast(&num_dim,1,MPIU_INT,0,comm);
325:   MPI_Bcast(&num_cs,1,MPIU_INT,0,comm);
326:   MPI_Bcast(&num_vs,1,MPIU_INT,0,comm);
327:   MPI_Bcast(&num_fs,1,MPIU_INT,0,comm);
328:   MPI_Bcast(title,PETSC_MAX_PATH_LEN+1,MPI_CHAR,0,comm);

330:   PetscObjectSetName((PetscObject)*dm,title);
331:   mesh->setDimension(num_dim);

333:   /*
334:     Read cell sets information then broadcast them
335:   */
336:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& cellSets = mesh->createLabel("Cell Sets");
337:   if (!rank) {
338:     /*
339:       Get cell sets IDs
340:     */
341:     PetscMalloc(num_cs*sizeof(int),&cs_id);
342:     ex_get_elem_blk_ids(exoid,cs_id);

344:     /*
345:       Read the cell set connectivity table and build mesh topology
346:       EXO standard requires that cells in cell sets be numbered sequentially
347:       and be pairwise disjoint.
348:     */
349:     for (c=0,cs = 0; cs < num_cs; cs++) {
350:       if (debug) {
351:         PetscPrintf(comm,"Building cell set %i\n",cs);
352:       }
353:       ex_get_elem_block(exoid,cs_id[cs],buffer,&num_cell_in_set,&num_vertex_per_cell,&num_attr);
354:       PetscMalloc(num_vertex_per_cell*num_cell_in_set*sizeof(int),&cs_connect);
355:       ex_get_elem_conn(exoid,cs_id[cs],cs_connect);
356:       /*
357:         EXO uses Fortran-based indexing, sieve uses C-style and numbers cell first then vertices.
358:       */
359:       for (v = 0,c_loc = 0; c_loc < num_cell_in_set; c_loc++,c++) {
360:         for (v_loc = 0; v_loc < num_vertex_per_cell; v_loc++,v++) {
361:           if (debug) {
362:             PetscPrintf(PETSC_COMM_SELF,"[%i]:\tinserting vertex \t%i in cell \t%i local vertex number \t%i\n",
363:                                rank,cs_connect[v]+num_cell-1,c,v_loc);
364:           }
365:           flexmesh_sieve->addArrow(sieve_type::point_type(cs_connect[v]+num_cell-1),
366:                                    sieve_type::point_type(c),
367:                                    v_loc);
368:         }
369:         mesh->setValue(cellSets,c,cs_id[cs]);
370:       }
371:       PetscFree(cs_connect);
372:     }
373:     PetscFree(cs_id);
374:   }
375:   /*
376:     We do not interpolate and know that the numbering is compact (this is required in exo)
377:     so no need to renumber (renumber = false)
378:   */
379:   ALE::ISieveConverter::convertSieve(*flexmesh_sieve,*sieve,renumbering,false);
380:   mesh->setSieve(sieve);
381:   mesh->stratify();
382:   flexmesh = new FlexMesh(mesh->comm(),mesh->debug());
383:   ALE::ISieveConverter::convertOrientation(*flexmesh_sieve,*sieve,renumbering,flexmesh->getArrowSection("orientation").ptr());

385:   /*
386:     Create Vertex set label
387:   */
388:   const ALE::Obj<PETSC_MESH_TYPE::label_type>& vertexSets = mesh->createLabel("Vertex Sets");
389:   if (num_vs >0) {
390:     if (!rank) {
391:       /*
392:         Get vertex set ids
393:       */
394:       PetscMalloc(num_vs*sizeof(int),&vs_id);
395:       ex_get_node_set_ids(exoid,vs_id);
396:       for (vs = 0; vs < num_vs; vs++) {
397:         ex_get_node_set_param(exoid,vs_id[vs],&num_vertex_in_set,&num_attr);
398:         PetscMalloc(num_vertex_in_set * sizeof(int),&vs_vertex_list);
399:         ex_get_node_set(exoid,vs_id[vs],vs_vertex_list);
400:         for (v = 0; v < num_vertex_in_set; v++) {
401:           mesh->addValue(vertexSets,vs_vertex_list[v]+num_cell-1,vs_id[vs]);
402:         }
403:         PetscFree(vs_vertex_list);
404:       }
405:       PetscFree(vs_id);
406:     }
407:   }
408:   /*
409:     Read coordinates
410:   */
411:   PetscMalloc4(num_vertices,PetscReal,&x,
412:                       num_vertices,PetscReal,&y,
413:                       num_vertices,PetscReal,&z,
414:                       num_dim*num_vertices,PetscReal,&coords);
415:   if (!rank) {
416:     ex_get_coord(exoid,x,y,z);
417:     PetscMalloc(num_dim*num_vertices * sizeof(PetscReal), &coords);
418:     if (num_dim > 0) {
419:       for (v = 0; v < num_vertices; ++v) coords[v*num_dim+0] = x[v];
420:     }
421:     if (num_dim > 1) {
422:       for (v = 0; v < num_vertices; ++v) coords[v*num_dim+1] = y[v];
423:     }
424:     if (num_dim > 2) {
425:       for (v = 0; v < num_vertices; ++v) coords[v*num_dim+2] = z[v];
426:     }
427:   }
428:   ALE::SieveBuilder<PETSC_MESH_TYPE>::buildCoordinates(mesh,num_dim,coords);
429:   if (!rank) {
430:     PetscFree4(x,y,z,coords);
431:   }

433:   /*
434:   if (!rank) {
435:     ex_close(exoid);
436:   }
437:   */
438: #else
439:   SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
440: #endif
441:   return(0);
442: }

446: /*@
447:   DMMeshViewExodusSplit - Write a dmMesh geometry and topology into several ExodusII files.

449:   Collective on comm

451:   Input Parameters:
452: + comm - The MPI communicator
453: - filename - The ExodusII filename. Must be different on each processor
454:              (suggest using prefix-<rank>.gen or prefix-<rank>.exo)
455: . dm  - The DM object representing the body

457:   Face Sets (Side Sets in Exodus terminology) are ignored

459:   Interpolated meshes are not supported.

461:   Level: beginner

463: .keywords: mesh,ExodusII
464: .seealso: MeshCreate()
465: @*/
466: PetscErrorCode DMMeshViewExodusSplit(DM dm,PetscInt exoid)
467: {
468: #if defined(PETSC_HAVE_EXODUSII)
469:   PetscBool      debug = PETSC_FALSE;
470:   MPI_Comm       comm;

473:   int            num_dim,num_vertices = 0,num_cells = 0;
474:   int            num_cs        = 0,num_vs = 0;
475:   int            num_cs_global = 0,num_vs_global = 0;
476:   const char     *title;
477:   IS             csIS,vsIS;
478:   IS             csIS_global,vsIS_global;
479:   const PetscInt *csID,*vsID,*labels;

481:   PetscReal *coord;

483:   PetscInt c,v,c_offset;

485:   PetscInt       set,num_cell_in_set,num_vertex_per_cell;
486:   const PetscInt *cells;
487:   IS             cellsIS;
488:   const char     *cell_type;
489:   int            *elem_map,*cs_connect,num_cs_attr=0;
490:   const PetscInt *elem_connect;

492:   PetscInt num_vertex_in_set,num_vs_attr=0;
493:   PetscInt *vertices;
494:   IS       verticesIS;
495: #endif

498: #if defined(PETSC_HAVE_EXODUSII)
499:   /*
500:     Extract mesh global properties from the DM
501:   */
502:   PetscObjectGetName((PetscObject)dm,&title);
503:   DMMeshGetDimension(dm,&num_dim);
504:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
505:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);

507:   /*
508:     Get the local and  global number of sets
509:   */
510:   PetscObjectGetComm((PetscObject) dm,&comm);
511:   DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
512:   DMMeshGetLabelIdIS(dm,"Cell Sets",&csIS);
513:   ISGetTotalIndices(csIS,&labels);
514:   ISGetSize(csIS,&num_cs_global);
515:   PetscSortRemoveDupsInt(&num_cs_global,(PetscInt*)labels);
516:   ISCreateGeneral(comm,num_cs_global,labels,PETSC_COPY_VALUES,&csIS_global);
517:   ISRestoreTotalIndices(csIS,&labels);

519:   DMMeshGetLabelSize(dm,"Vertex Sets",&num_vs);
520:   DMMeshGetLabelIdIS(dm,"Vertex Sets",&vsIS);
521:   ISGetSize(vsIS,&num_vs_global);
522:   if (num_vs_global > 0) {
523:     ISGetTotalIndices(vsIS,&labels);
524:     PetscSortRemoveDupsInt(&num_vs_global,(PetscInt*)labels);
525:     ISCreateGeneral(comm,num_vs_global,labels,PETSC_COPY_VALUES,&vsIS_global);
526:     ISRestoreTotalIndices(vsIS,&labels);
527:   }
528:   ex_put_init(exoid,title,num_dim,num_vertices,num_cells,num_cs_global,num_vs_global,0);

530:   /*
531:     Write coordinates
532:   */
533:   DMMeshGetCoordinates(dm,PETSC_TRUE,&num_vertices,&num_dim,&coord);
534:   if (debug) {
535:     for (c = 0; c < num_dim; c++) {
536:       PetscPrintf(comm,"Coordinate %i:\n",c);
537:       PetscRealView(num_vertices,&coord[c*num_vertices],PETSC_VIEWER_STDOUT_SELF);
538:     }
539:   }
540:   ex_put_coord(exoid,coord,&coord[num_vertices],&coord[2*num_vertices]);

542:   /*
543:     Write cell set connectivity table and parameters
544:     Compute the element number map
545:     The element number map is not needed as long as the cell sets are of the type
546:     0    .. n1
547:     n1+1 .. n2
548:     n2+1 ..n3
549:     which is the case when a mesh is read from an exo file then distributed, but one never knows
550:   */

552:   /*
553:     The following loop should be based on csIS and not csIS_global, but EXO has no
554:     way to write the block id's other than ex_put_elem_block
555:     and ensight is bothered if all cell set ID's are not on all files...
556:     Not a huge deal
557:   */

559:   PetscMalloc(num_cells*sizeof(int),&elem_map);
560:   ISGetIndices(csIS_global,&csID);
561:   for (c_offset = 0,set = 0; set < num_cs_global; set++) {
562:     DMMeshGetStratumSize(dm,"Cell Sets",csID[set],&num_cell_in_set);
563:     DMMeshGetStratumIS(dm,"Cell Sets",csID[set],&cellsIS);
564:     ISGetIndices(cellsIS,&cells);
565:     if (debug) {
566:       PetscPrintf(PETSC_COMM_SELF,"Cell set %i: %i cells\n",csID[set],num_cell_in_set);
567:       PetscIntView(num_cell_in_set,cells,PETSC_VIEWER_STDOUT_SELF);
568:     }
569:     /*
570:       Add current block indices to elem_map. EXO uses fortran numbering
571:     */
572:     for (c = 0; c < num_cell_in_set; c++,c_offset++) {
573:       elem_map[c_offset] = cells[c]+1;
574:     }
575:     /*
576:       We make an educated guess as to the type of cell. This misses out quads in
577:       a three-dimensional mesh.
578:       This most likely needs to be fixed by calling again ex_put_elem_block with
579:       the proper parameters
580:     */
581:     if (num_cell_in_set > 0) {
582:       DMMeshGetConeSize(dm,cells[0],&num_vertex_per_cell);
583:       if (num_vertex_per_cell == 2) {
584:         cell_type = "BAR";
585:       } else if (num_vertex_per_cell == 3) {
586:         cell_type = "TRI";
587:       } else if (num_vertex_per_cell == 8) {
588:         cell_type = "HEX";
589:       } else if (num_vertex_per_cell == 4 && num_dim == 2) {
590:           cell_type = "QUAD";
591:       } else if (num_vertex_per_cell == 4 && num_dim == 3) {
592:           cell_type = "TET";
593:       } else {
594:         cell_type = "UNKNOWN";
595:       }
596:     }
597:     ex_put_elem_block (exoid,csID[set],cell_type,num_cell_in_set,num_vertex_per_cell,num_cs_attr);
598:     /*
599:       Build connectivity table of the current block
600:     */
601:     PetscMalloc(num_cell_in_set*num_vertex_per_cell*sizeof(int),&cs_connect);
602:     for (c = 0; c < num_cell_in_set; c++) {
603:       DMMeshGetCone(dm,cells[c],&elem_connect);
604:       for (v = 0; v < num_vertex_per_cell; v++) {
605:         cs_connect[c*num_vertex_per_cell+v] = elem_connect[v]-num_cells+1;
606:       }
607:     }
608:     if (num_cell_in_set > 0) {
609:       ex_put_elem_conn(exoid,csID[set],cs_connect);
610:     }
611:     PetscFree(cs_connect);
612:     ISRestoreIndices(cellsIS,&cells);
613:     ISDestroy(&cellsIS);
614:   }
615:   ISRestoreIndices(csIS_global,&csID);
616:   ex_put_elem_num_map(exoid,elem_map);
617:   PetscFree(elem_map);

619:   /*
620:     Writing vertex sets
621:   */
622:   if (num_vs_global > 0) {
623:     ISGetIndices(vsIS_global,&vsID);
624:     for (set = 0; set < num_vs_global; set++) {
625:       DMMeshGetStratumSize(dm,"Vertex Sets",vsID[set],&num_vertex_in_set);
626:       ex_put_node_set_param(exoid,vsID[set],num_vertex_in_set,num_vs_attr);

628:       DMMeshGetStratumIS(dm,"Vertex Sets",vsID[set],&verticesIS);
629:       ISGetIndices(verticesIS,(const PetscInt**)&vertices);

631:       for (v = 0; v < num_vertex_in_set; v++) {
632:         vertices[v] -= num_cells-1;
633:       }

635:       if (num_vertex_in_set > 0) {
636:         ex_put_node_set(exoid,vsID[set],vertices);
637:       }
638:       ISRestoreIndices(verticesIS,(const PetscInt**)&vertices);
639:       ISDestroy(&verticesIS);
640:     }
641:     ISRestoreIndices(vsIS_global,&vsID);
642:   }
643:   ISDestroy(&csIS);
644:   ISDestroy(&vsIS);
645:   ISDestroy(&csIS_global);
646: #else
647:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
648: #endif
649:   return(0);
650: }

654: /*@
655:   DMMeshCreateScatterToZeroVertex - Creates the scatter required to scatter local (ghosted)
656:     Vecs associated with a field defined at the vertices of a mesh to a Vec on cpu 0.

658:   Input parameters:
659: . dm - the DMMesh representing the mesh

661:   Output parameters:
662: . scatter: the scatter

664:   Level: advanced

666: .keywords: mesh,ExodusII
667: .seealso DMMeshCreateScatterToZeroCell DMMeshCreateScatterToZeroVertexSet DMMeshCreateScatterToZeroCellSet
668: @*/
669: PetscErrorCode DMMeshCreateScatterToZeroVertex(DM dm,VecScatter *scatter)
670: {
671:   PetscErrorCode            ierr;
672:   PetscInt                  i;
673:   PetscInt                  *vertices;
674:   PetscInt                  num_vertices,num_vertices_global,my_num_vertices_global;
675:   PetscInt                  num_cells,num_cells_global,my_num_cells_global;
676:   Vec                       v_local,v_zero;
677:   MPI_Comm                  comm;
678:   int                       rank;
679:   IS                        is_local;
680:   ALE::Obj<PETSC_MESH_TYPE> mesh;
681:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
682:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


686:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);
687:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
688:   PetscObjectGetComm((PetscObject) dm,&comm);
689:   MPI_Comm_rank(comm,&rank);

691:   DMMeshGetMesh(dm,mesh);
692:   renumbering = mesh->getRenumbering();

694:   my_num_cells_global    = 0;
695:   my_num_vertices_global = 0;
696:   PetscMalloc(num_vertices*sizeof(PetscInt),&vertices);
697:   /*
698:     Get the total number of cells and vertices from the mapping, and build array of global indices of the local vertices
699:     (TO array for the scatter) from the iterator
700:   */

702:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
703:     /*
704:        r_iter->first  is a point number in the sequential (global) mesh
705:        r_iter->second is the matching local point number in the distributed (local) mesh
706:     */
707:     if (r_iter->second > num_cells - 1 && r_iter->first > my_num_vertices_global) {
708:       my_num_vertices_global = r_iter->first;
709:     }
710:     if (r_iter->second < num_cells && r_iter->first > my_num_cells_global) {
711:       my_num_cells_global = r_iter->first;
712:     }
713:     if (r_iter->second > num_cells - 1) {
714:       vertices[r_iter->second - num_cells] = r_iter->first;
715:     }
716:   }
717:   my_num_cells_global++;
718:   MPI_Allreduce(&my_num_cells_global,&num_cells_global,1,MPIU_INT,MPI_MAX,comm);
719:   my_num_vertices_global -= num_cells_global-1;
720:   MPI_Allreduce(&my_num_vertices_global,&num_vertices_global,1,MPIU_INT,MPI_MAX,comm);

722:   /*
723:     convert back vertices from point number to vertex numbers
724:   */
725:   for (i = 0; i < num_vertices; i++) {
726:     vertices[i] -= num_cells_global;
727:   }

729:   /*
730:     Build the IS and Vec required to create the VecScatter
731:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
732:     Vecs, but I would need to understand better how it works
733:   */
734:   VecCreate(comm,&v_local);
735:   VecSetSizes(v_local,num_vertices,PETSC_DETERMINE);
736:   VecSetFromOptions(v_local);
737:   VecCreate(comm,&v_zero);
738:   VecSetSizes(v_zero,num_vertices_global*(rank==0),PETSC_DECIDE);
739:   VecSetFromOptions(v_zero);

741:   ISCreateGeneral(comm,num_vertices,vertices,PETSC_OWN_POINTER,&is_local);
742:   VecScatterCreate(v_local,NULL,v_zero,is_local,scatter);

744:   ISDestroy(&is_local);
745:   VecDestroy(&v_local);
746:   VecDestroy(&v_zero);
747:   return(0);
748: }

752: /*@
753:   DMMeshCreateScatterToZeroVertexSet - Creates the scatter required to scatter local (ghosted)
754:     Vecs associated with a field defined at the a vertex set of a mesh to a Vec on cpu 0.

756:   Input parameters:
757: . dm - the DMMesh representing the mesh

759:   Output parameters:
760: . scatter - the scatter

762:   Level: advanced

764: .keywords: mesh,ExodusII
765: .seealso DMMeshCreateScatterToZeroCell DMMeshCreateScatterToZeroVertex DMMeshCreateScatterToZeroCellSet
766: @*/
767: PetscErrorCode DMMeshCreateScatterToZeroVertexSet(DM dm,IS is_local,IS is_zero,VecScatter *scatter)
768: {
769:   PetscErrorCode            ierr;
770:   const PetscInt            *setvertices_local;
771:   PetscInt                  *allvertices,*setvertices_zero,*setvertices_localtozero;
772:   PetscInt                  num_vertices,num_vertices_global,my_num_vertices_global=0;
773:   PetscInt                  num_cells,num_cells_global,my_num_cells_global=0;
774:   PetscInt                  setsize_local,setsize_zero;
775:   Vec                       v_local,v_zero;
776:   MPI_Comm                  comm;
777:   int                       rank,i,j,k,l,istart;
778:   IS                        is_localtozero;
779:   ALE::Obj<PETSC_MESH_TYPE> mesh;
780:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
781:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


785:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
786:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);
787:   PetscObjectGetComm((PetscObject) dm,&comm);
788:   MPI_Comm_rank(comm,&rank);

790:   DMMeshGetMesh(dm,mesh);
791:   renumbering = mesh->getRenumbering();

793:   PetscMalloc(num_vertices*sizeof(PetscInt),&allvertices);
794:   /*
795:     Build array of global indices of the local vertices (TO array for the scatter)
796:     from the iterator
797:   */

799:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
800:     /*
801:        r_iter->first  is a point number in the sequential (global) mesh
802:        r_iter->second is the matching local point number in the distributed (local) mesh
803:     */
804:     if (r_iter->second > num_cells - 1 && r_iter->first > my_num_vertices_global) {
805:       my_num_vertices_global = r_iter->first;
806:     }
807:     if (r_iter->second < num_cells && r_iter->first > my_num_cells_global) {
808:       my_num_cells_global = r_iter->first;
809:     }
810:     if (r_iter->second > num_cells-1) {
811:       allvertices[r_iter->second - num_cells] = r_iter->first;
812:     }
813:   }
814:   my_num_cells_global++;
815:   MPI_Allreduce(&my_num_cells_global,&num_cells_global,1,MPIU_INT,MPI_MAX,comm);
816:   my_num_vertices_global -= num_cells_global-1;
817:   MPI_Allreduce(&my_num_vertices_global,&num_vertices_global,1,MPIU_INT,MPI_MAX,comm);

819:   ISGetSize(is_local,&setsize_local);
820:   ISGetSize(is_zero,&setsize_zero);
821:   PetscMalloc(setsize_local*sizeof(PetscInt),&setvertices_localtozero);

823:   if (!rank)  {
824:     ISGetIndices(is_zero,(const PetscInt**)&setvertices_zero);
825:   } else {
826:     PetscMalloc(setsize_zero*sizeof(PetscInt),&setvertices_zero);
827:   }
828:   MPI_Bcast(setvertices_zero,setsize_zero,MPIU_INT,0,comm);
829:   ISGetIndices(is_local,&setvertices_local);


832:   istart = 0;
833:   for (i = 0; i < setsize_local; i++) {
834:     j = allvertices[setvertices_local[i]-num_cells];
835:     /*
836:       j is the cell number in the seq mesh of the i-th vertex of the local vertex set.
837:       Search for the matching vertex in vertex set. Because vertex in the zero set are usually
838:       numbered in ascending order, we start our search from the previous find.
839: `   */

841:     for (l = 0, k = istart; l < setsize_zero; l++,k = (l+istart)%setsize_zero) {
842:       if (setvertices_zero[k] == j) {
843:         break;
844:       }
845:     }
846:     setvertices_localtozero[i] = k;
847:     istart                     = (k+1)%setsize_zero;
848:   }
849:   ISRestoreIndices(is_local,&setvertices_local);
850:   if (!rank) {
851:     ISRestoreIndices(is_zero,(const PetscInt**)&setvertices_zero);
852:   } else {
853:     PetscFree(setvertices_zero);
854:   }
855:   /*
856:     Build the IS and Vec required to create the VecScatter
857:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
858:     Vecs, but I would need to understand better how it works
859:   */
860:   VecCreate(comm,&v_local);
861:   VecSetSizes(v_local,setsize_local,PETSC_DETERMINE);
862:   VecSetFromOptions(v_local);

864:   VecCreate(comm,&v_zero);
865:   VecSetSizes(v_zero,setsize_zero*(rank==0),PETSC_DECIDE);
866:   VecSetFromOptions(v_zero);

868:   ISCreateGeneral(comm,setsize_local,setvertices_localtozero,PETSC_OWN_POINTER,&is_localtozero);
869:   VecScatterCreate(v_local,NULL,v_zero,is_localtozero,scatter);

871:   ISDestroy(&is_localtozero);
872:   PetscFree(allvertices);
873:   VecDestroy(&v_local);
874:   VecDestroy(&v_zero);
875:   return(0);
876: }

880: /*@
881:   DMMeshCreateScatterToZeroCell - Creates the scatter required to scatter local (ghosted)
882:     Vecs associated with a field defined at the cells of a mesh to a Vec on cpu 0.

884:   Input parameters:
885: . dm - the DMMesh representing the mesh

887:   Output parameters:
888: . scatter - the scatter

890:   Level: advanced

892: .keywords: mesh,ExodusII
893: .seealso DMMeshCreateScatterToZeroVertex DMMeshCreateScatterToZeroVertexSet DMMeshCreateScatterToZeroCellSet
894: @*/
895: PetscErrorCode DMMeshCreateScatterToZeroCell(DM dm,VecScatter *scatter)
896: {
897:   PetscErrorCode            ierr;
898:   PetscInt                  *cells;
899:   PetscInt                  num_cells,num_cells_global,my_num_cells_global;
900:   Vec                       v_local,v_zero;
901:   MPI_Comm                  comm;
902:   int                       rank;
903:   IS                        is_local;
904:   ALE::Obj<PETSC_MESH_TYPE> mesh;
905:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
906:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


910:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
911:   PetscObjectGetComm((PetscObject) dm,&comm);
912:   MPI_Comm_rank(comm,&rank);

914:   DMMeshGetMesh(dm,mesh);
915:   renumbering = mesh->getRenumbering();

917:   my_num_cells_global = 0;
918:   PetscMalloc(num_cells*sizeof(PetscInt),&cells);
919:   /*
920:     Get the total number of cells from the mapping, and build array of global indices of the local cells (TO array for the scatter)
921:     from the iterator
922:   */

924:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
925:     /*
926:        r_iter->first  is a point number in the sequential (global) mesh
927:        r_iter->second is the matching local point number in the distributed (local) mesh
928:     */
929:     if (r_iter->second < num_cells && r_iter->first > my_num_cells_global) {
930:       my_num_cells_global = r_iter->first;
931:     }
932:     if (r_iter->second < num_cells) {
933:       cells[r_iter->second] = r_iter->first;
934:     }
935:   }
936:   my_num_cells_global++;
937:   MPI_Allreduce(&my_num_cells_global,&num_cells_global,1,MPIU_INT,MPI_MAX,comm);

939:   /*
940:     Build the IS and Vec required to create the VecScatter
941:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
942:     Vecs, but I would need to understand better how it works
943:   */
944:   VecCreate(comm,&v_local);
945:   VecSetSizes(v_local,num_cells,PETSC_DETERMINE);
946:   VecSetFromOptions(v_local);
947:   VecCreate(comm,&v_zero);
948:   VecSetSizes(v_zero,num_cells_global*(rank==0),PETSC_DECIDE);
949:   VecSetFromOptions(v_zero);

951:   ISCreateGeneral(comm,num_cells,cells,PETSC_OWN_POINTER,&is_local);
952:   VecScatterCreate(v_local,NULL,v_zero,is_local,scatter);

954:   ISDestroy(&is_local);
955:   VecDestroy(&v_local);
956:   VecDestroy(&v_zero);
957:   return(0);
958: }

962: /*@
963:   DMMeshCreateScatterToZeroCellSet - Creates the scatter required to scatter local (ghosted)
964:     Vecs associated with a field defined at a cell set of a mesh to a Vec on cpu 0.

966:   Input parameters:
967: . dm - the DMMesh representing the mesh

969:   Output parameters:
970: . scatter - the scatter

972:   Level: advanced

974: .keywords: mesh,ExodusII
975: .seealso DMMeshCreateScatterToZeroCell DMMeshCreateScatterToZeroVertexSet DMMeshCreateScatterToZeroVertex
976: @*/
977: PetscErrorCode DMMeshCreateScatterToZeroCellSet(DM dm,IS is_local,IS is_zero,VecScatter *scatter)
978: {
979:   PetscErrorCode            ierr;
980:   const PetscInt            *setcells_local;
981:   PetscInt                  *allcells,*setcells_zero,*setcells_localtozero;
982:   PetscInt                  num_cells;
983:   PetscInt                  setsize_local,setsize_zero;
984:   Vec                       v_local,v_zero;
985:   MPI_Comm                  comm;
986:   int                       rank,i,j,k,l,istart;
987:   IS                        is_localtozero;
988:   ALE::Obj<PETSC_MESH_TYPE> mesh;
989:   typedef ALE::Mesh<PetscInt,PetscScalar>                           FlexMesh;
990:   std::map<PETSC_MESH_TYPE::point_type,PETSC_MESH_TYPE::point_type> renumbering;


994:   DMMeshGetStratumSize(dm,"height",0,&num_cells);
995:   PetscObjectGetComm((PetscObject) dm,&comm);
996:   MPI_Comm_rank(comm,&rank);

998:   DMMeshGetMesh(dm,mesh);
999:   renumbering = mesh->getRenumbering();

1001:   PetscMalloc(num_cells*sizeof(PetscInt),&allcells);
1002:   /*
1003:     Build array of global indices of the local cells (TO array for the scatter)
1004:     from the iterator
1005:   */

1007:   for (FlexMesh::renumbering_type::const_iterator r_iter = renumbering.begin(); r_iter != renumbering.end(); ++r_iter) {
1008:     /*
1009:        r_iter->first  is a point number in the sequential (global) mesh
1010:        r_iter->second is the matching local point number in the distributed (local) mesh
1011:     */
1012:     if (r_iter->second < num_cells) {
1013:       allcells[r_iter->second] = r_iter->first;
1014:     }
1015:   }

1017:   ISGetSize(is_local,&setsize_local);
1018:   ISGetSize(is_zero,&setsize_zero);
1019:   PetscMalloc(setsize_local*sizeof(PetscInt),&setcells_localtozero);

1021:   if (!rank)  {
1022:     ISGetIndices(is_zero,(const PetscInt**)&setcells_zero);
1023:   } else {
1024:     PetscMalloc(setsize_zero*sizeof(PetscInt),&setcells_zero);
1025:   }
1026:   MPI_Bcast(setcells_zero,setsize_zero,MPIU_INT,0,comm);
1027:   ISGetIndices(is_local,&setcells_local);

1029:   istart = 0;
1030:   for (i = 0; i < setsize_local; i++) {
1031:     j = allcells[setcells_local[i]];
1032:     /*
1033:       j is the cell number in the seq mesh of the i-th cell of the local cell set.
1034:       Search for the matching cell in cell set. Because cells in the zero set are usually
1035:       numbered sequentially, we start our search from the previous find.
1036: `   */

1038:     for (l = 0, k = istart; l < setsize_zero; l++,k = (l+istart)%setsize_zero) {
1039:       if (setcells_zero[k] == j) {
1040:         break;
1041:       }
1042:     }
1043:     setcells_localtozero[i] = k;
1044:     istart                  = (k+1)%setsize_zero;
1045:   }
1046:   ISRestoreIndices(is_local,&setcells_local);
1047:   if (!rank) {
1048:     ISRestoreIndices(is_zero,(const PetscInt**)&setcells_zero);
1049:   } else {
1050:     PetscFree(setcells_zero);
1051:   }
1052:   /*
1053:     Build the IS and Vec required to create the VecScatter
1054:     A MUCH better way would be to use VecScatterCreateLocal and not create then destroy
1055:     Vecs, but I would need to understand better how it works
1056:   */
1057:   VecCreate(comm,&v_local);
1058:   VecSetSizes(v_local,setsize_local,PETSC_DETERMINE);
1059:   VecSetFromOptions(v_local);

1061:   VecCreate(comm,&v_zero);
1062:   VecSetSizes(v_zero,setsize_zero*(rank==0),PETSC_DECIDE);
1063:   VecSetFromOptions(v_zero);

1065:   ISCreateGeneral(comm,setsize_local,setcells_localtozero,PETSC_OWN_POINTER,&is_localtozero);
1066:   VecScatterCreate(v_local,NULL,v_zero,is_localtozero,scatter);

1068:   PetscFree(setcells_localtozero);
1069:   PetscFree(allcells);
1070:   VecDestroy(&v_local);
1071:   VecDestroy(&v_zero);
1072:   return(0);
1073: }

1077: /*@

1079:   VecViewExodusVertex - Write a Vec representing nodal values of some field in an exodusII file.

1081:   Collective on comm

1083:   The behavior differs depending on the size of comm:
1084:     if size(comm) == 1 each processor writes its local vector into a separate file
1085:     if size(comm) > 1, the values are sent to cpu0, and written in a single file


1088:   Input Parameters:
1089: + dm   - the DMMesh representing the mesh
1090: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1091:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1092: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1093: . step  - the time step to write
1094: - exofield - the position in the exodus field of the first component

1096:   Interpolated meshes are not supported.

1098:   Level: beginner

1100: .keywords: mesh,ExodusII
1101: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusVertex()
1102:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1103:           VecViewExodusCell() VecLoadExodusCell()
1104: @*/
1105: PetscErrorCode VecViewExodusVertex(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1106: {
1107: #if defined(PETSC_HAVE_EXODUSII)
1108:   PetscInt       rank,num_proc;
1110:   PetscInt       c,num_vertices,num_vertices_zero,num_dof;
1111:   Vec            vdof,vdof_zero;
1112:   PetscReal      *vdof_array;
1113:   VecScatter     scatter;
1114: #endif

1117: #if defined(PETSC_HAVE_EXODUSII)
1118:   MPI_Comm_size(comm,&num_proc);
1119:   MPI_Comm_rank(comm,&rank);
1120:   VecGetBlockSize(v,&num_dof);

1122:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);

1124:   VecCreate(comm,&vdof);
1125:   VecSetSizes(vdof,num_vertices,PETSC_DETERMINE);
1126:   VecSetFromOptions(vdof);

1128:   if (num_proc == 1) {
1129:     PetscMalloc(num_vertices*sizeof(PetscReal),&vdof_array);
1130:     for (c = 0; c < num_dof; c++) {
1131:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1132:       VecGetArray(vdof,&vdof_array);
1133:       ex_put_nodal_var(exoid,step,exofield+c,num_vertices,vdof_array);
1134:       VecRestoreArray(vdof,&vdof_array);
1135:       if (ierr != 0) SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_nodal_var returned %i",exoid,ierr);
1136:     }
1137:     PetscFree(vdof_array);
1138:   } else {
1139:     /*
1140:       Build the scatter sending one dof towards cpu 0
1141:     */
1142:     DMMeshCreateScatterToZeroVertex(dm,&scatter);
1143:     /*
1144:       Another an ugly hack to get the total number of vertices in the mesh. This has got to stop...
1145:     */
1146:     num_vertices_zero = scatter->to_n;

1148:     VecCreate(comm,&vdof_zero);
1149:     VecSetSizes(vdof_zero,num_vertices_zero,PETSC_DECIDE);
1150:     VecSetFromOptions(vdof_zero);

1152:     for (c = 0; c < num_dof; c++) {
1153:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1154:       VecScatterBegin(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1155:       VecScatterEnd(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1156:       if (!rank) {
1157:         VecGetArray(vdof_zero,&vdof_array);
1158:         ex_put_nodal_var(exoid,step,exofield+c,num_vertices_zero,vdof_array);
1159:         VecRestoreArray(vdof_zero,&vdof_array);
1160:       }
1161:     }
1162:     /*
1163:       Clean up
1164:     */
1165:     VecScatterDestroy(&scatter);
1166:     VecDestroy(&vdof_zero);
1167:   }
1168:   VecDestroy(&vdof);
1169: #else
1170:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1171: #endif
1172:   return(0);
1173: }

1177: /*@

1179:   VecLoadExodusVertex - Loads a Vec representing nodal values of some field from an exodusII file.

1181:   Collective on comm

1183:   The behavior differs depending on the size of comm:
1184:     if size(comm) == 1 each processor reads its local vector from a separate file
1185:     if size(comm) > 1, the values are read by cpu 0 from a single file then scattered to each processor


1188:   Input Parameters:
1189: + dm   - the DMMesh representing the mesh
1190: . v    - the LOCAL vector of values to be read (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1191:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1192: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1193: . step  - the time step to read
1194: - exofield - the position in the exodus field of the first component

1196:   Interpolated meshes are not supported.

1198:   Level: beginner

1200: .keywords: mesh,ExodusII
1201: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecViewExodusVertex()
1202:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1203:           VecViewExodusCell() VecLoadExodusCell()

1205: @*/
1206: PetscErrorCode VecLoadExodusVertex(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1207: {
1208: #if defined(PETSC_HAVE_EXODUSII)
1209:   PetscInt       rank,num_proc;
1211:   PetscInt       c,num_vertices,num_vertices_global,num_dof;
1212:   Vec            vdof,vzero;
1213:   PetscReal      *vdof_array;
1214:   VecScatter     scatter;
1215: #endif

1217: #if defined(PETSC_HAVE_EXODUSII)
1219:   MPI_Comm_size(comm,&num_proc);
1220:   MPI_Comm_rank(comm,&rank);
1221:   VecGetBlockSize(v,&num_dof);

1223:   DMMeshGetStratumSize(dm,"depth",0,&num_vertices);

1225:   VecCreate(comm,&vdof);
1226:   VecSetSizes(vdof,num_vertices,PETSC_DETERMINE);
1227:   VecSetFromOptions(vdof);

1229:   if (num_proc == 1) {
1230:     for (c = 0; c < num_dof; c++) {
1231:       VecGetArray(vdof,&vdof_array);
1232:       ex_get_nodal_var(exoid,step,exofield+c,num_vertices,vdof_array);
1233:       VecRestoreArray(vdof,&vdof_array);
1234:       if (ierr != 0) SETERRQ2(comm,PETSC_ERR_FILE_READ,"Unable to read file id %i. ex_put_nodal_var returned %i",exoid,ierr);
1235:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1236:     }
1237:   } else {
1238:     /*
1239:       Build the scatter sending one dof towards cpu 0
1240:     */
1241:     DMMeshCreateScatterToZeroVertex(dm,&scatter);
1242:     /*
1243:       Another an ugly hack to get the total number of vertices in the mesh. This has got to stop...
1244:     */
1245:     num_vertices_global = scatter->to_n;
1246:     MPI_Bcast(&num_vertices_global,1,MPIU_INT,0,comm);

1248:     VecCreate(comm,&vzero);
1249:     VecSetSizes(vzero,num_vertices_global*(rank==0),PETSC_DECIDE);
1250:     VecSetFromOptions(vzero);

1252:     for (c = 0; c < num_dof; c++) {
1253:       if (!rank) {
1254:         VecGetArray(vzero,&vdof_array);
1255:         ex_get_nodal_var(exoid,step,exofield+c,num_vertices_global,vdof_array);
1256:         VecRestoreArray(vzero,&vdof_array);
1257:       }
1258:       VecScatterBegin(scatter,vzero,vdof,INSERT_VALUES,SCATTER_REVERSE);
1259:       VecScatterEnd(scatter,vzero,vdof,INSERT_VALUES,SCATTER_REVERSE);
1260:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1261:     }
1262:     /*
1263:       Clean up
1264:     */
1265:     VecScatterDestroy(&scatter);
1266:     VecDestroy(&vzero);
1267:   }
1268:   VecDestroy(&vdof);
1269: #else
1270:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1271: #endif
1272:   return(0);
1273: }

1277: /*@

1279:   VecViewExodusVertexSet - Write a Vec representing nodal values of some field at a vertex set in an exodusII file.

1281:   Collective on comm

1283:   The behavior differs depending on the size of comm:
1284:     if size(comm) == 1 each processor writes its local vector into a separate file
1285:     if size(comm) > 1, the values are sent to cpu0, and written in a single file


1288:   Input Parameters:
1289: + dm   - the DMMesh representing the mesh
1290: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1291:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1292: . vsID  - the vertex set ID
1293: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1294: . step  - the time step to write
1295: - exofield - the position in the exodus field of the first component

1297:   Interpolated meshes are not supported.

1299:   Level: beginner

1301: .keywords: mesh,ExodusII
1302: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusVertexSet()
1303:           VecViewExodusVertex() VecLoadExodusVertex() VecViewExodusCellSet() VecLoadExodusCellSet()
1304:           VecViewExodusCell() VecLoadExodusCell()

1306: @*/
1307: PetscErrorCode VecViewExodusVertexSet(DM dm,Vec v,PetscInt vsID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1308: {
1309: #if defined(PETSC_HAVE_EXODUSII)
1310:   PetscInt       rank,num_proc;
1312:   PetscInt       c,num_vertices_in_set=0,num_vertices_zero=0,num_cells_zero,num_dof;
1313:   Vec            vdof,vdof_zero;
1314:   PetscReal      *vdof_array;
1315:   VecScatter     scatter;
1316:   PetscInt       i,junk1,junk2,junk3,junk4,junk5;
1317:   char           junk[MAX_LINE_LENGTH+1];
1318:   IS             vsIS,vsIS_zero;
1319:   PetscInt       *vs_vertices_zero;
1320:   MPI_Comm       dmcomm;
1321: #endif

1323: #if defined(PETSC_HAVE_EXODUSII)
1325:   MPI_Comm_size(comm,&num_proc);
1326:   MPI_Comm_rank(comm,&rank);
1327:   VecGetBlockSize(v,&num_dof);
1328:   PetscObjectGetComm((PetscObject) dm,&dmcomm);

1330:   DMMeshGetStratumSize(dm,"Vertex Sets",vsID,&num_vertices_in_set);
1331:   VecCreateSeq(PETSC_COMM_SELF,num_vertices_in_set,&vdof);

1333:   if (num_proc == 1) {
1334:     if (num_vertices_in_set > 0) {
1335:       for (c = 0; c < num_dof; c++) {
1336:         VecStrideGather(v,c,vdof,INSERT_VALUES);
1337:         VecGetArray(vdof,&vdof_array);
1338:         ex_put_nset_var(exoid,step,exofield+c,vsID,num_vertices_in_set,vdof_array);
1339:         if (ierr != 0) SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_nset_var returned %i",exoid,ierr);
1340:         VecRestoreArray(vdof,&vdof_array);
1341:       }
1342:     }
1343:   } else {
1344:     /*
1345:       Build the scatter sending one dof towards cpu 0
1346:     */
1347:     DMMeshGetStratumIS(dm,"Vertex Sets",vsID,&vsIS);
1348:     if (!rank) {
1349:       ex_get_node_set_param(exoid,vsID,&num_vertices_zero,&junk1);
1350:     }
1351:     PetscMalloc(num_vertices_zero*sizeof(PetscInt),&vs_vertices_zero);
1352:     if (!rank) {
1353:       ex_get_node_set(exoid,vsID,vs_vertices_zero);
1354:       ex_get_init(exoid,junk,&junk1,&junk2,&num_cells_zero,&junk3,&junk4,&junk5);
1355:       for (i = 0; i < num_vertices_zero; i++) {
1356:         vs_vertices_zero[i] += num_cells_zero-1;
1357:       }
1358:     }
1359:     ISCreateGeneral(dmcomm,num_vertices_zero,vs_vertices_zero,PETSC_OWN_POINTER,&vsIS_zero);
1360:     DMMeshCreateScatterToZeroVertexSet(dm,vsIS,vsIS_zero,&scatter);
1361:     ISDestroy(&vsIS);
1362:     ISDestroy(&vsIS_zero);

1364:     VecCreateSeq(PETSC_COMM_SELF,num_vertices_zero*(rank == 0),&vdof_zero);

1366:     for (c = 0; c < num_dof; c++) {
1367:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1368:       VecScatterBegin(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1369:       VecScatterEnd(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);

1371:       if (!rank) {
1372:         VecGetArray(vdof_zero,&vdof_array);
1373:         ex_put_nset_var(exoid,step,exofield+c,vsID,num_vertices_zero,vdof_array);
1374:         if (ierr != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_nset_var returned %i",exoid,ierr);
1375:         VecRestoreArray(vdof_zero,&vdof_array);
1376:       }
1377:     }
1378:     /*
1379:       Clean up
1380:     */
1381:     VecScatterDestroy(&scatter);
1382:     VecDestroy(&vdof_zero);
1383:   }
1384:   VecDestroy(&vdof);
1385: #else
1386:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1387: #endif
1388:   return(0);
1389: }

1393: /*@

1395:   VecLoadExodusVertexSet - Write a Vec representing nodal values of some field at a vertex set in an exodusII file.

1397:   Collective on comm

1399:   The behavior differs depending on the size of comm:
1400:     if size(comm) == 1 each processor writes its local vector into a separate file
1401:     if size(comm) > 1, the values are sent to cpu0, and written in a single file


1404:   Input Parameters:
1405: + dm   - the DMMesh representing the mesh
1406: . v    - the LOCAL vector of values to be read (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1407:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize()
1408: . vsID  - the vertex set ID
1409: . exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1410: . step  - the time step to write
1411: - exofield - the position in the exodus field of the first component

1413:   Interpolated meshes are not supported.

1415:   Level: beginner

1417: .keywords: mesh,ExodusII
1418: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusVertex()

1420: @*/
1421: PetscErrorCode VecLoadExodusVertexSet(DM dm,Vec v,PetscInt vsID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1422: {
1423: #if defined(PETSC_HAVE_EXODUSII)
1424:   PetscInt       rank,num_proc;
1426:   PetscInt       c,num_vertices_in_set=0,num_vertices_zero=0,num_cells_zero,num_dof;
1427:   Vec            vdof,vdof_zero;
1428:   PetscReal      *vdof_array;
1429:   VecScatter     scatter;
1430:   PetscInt       i,junk1,junk2,junk3,junk4,junk5;
1431:   char           junk[MAX_LINE_LENGTH+1];
1432:   IS             vsIS,vsIS_zero;
1433:   PetscInt       *vs_vertices_zero;
1434:   MPI_Comm       dmcomm;
1435: #endif

1437: #if defined(PETSC_HAVE_EXODUSII)
1439:   MPI_Comm_size(comm,&num_proc);
1440:   MPI_Comm_rank(comm,&rank);
1441:   VecGetBlockSize(v,&num_dof);
1442:   PetscObjectGetComm((PetscObject) dm,&dmcomm);

1444:   DMMeshGetStratumSize(dm,"Vertex Sets",vsID,&num_vertices_in_set);
1445:   VecCreateSeq(PETSC_COMM_SELF,num_vertices_in_set,&vdof);

1447:   if (num_proc == 1) {
1448:     if (num_vertices_in_set > 0) {
1449:       for (c = 0; c < num_dof; c++) {
1450:         VecGetArray(vdof,&vdof_array);
1451:         ex_get_nset_var(exoid,step,exofield+c,vsID,num_vertices_in_set,vdof_array);
1452:         if (ierr != 0) SETERRQ2(comm,PETSC_ERR_FILE_READ,"Unable to read from file id %i. ex_put_nset_var returned %i",exoid,ierr);
1453:         VecRestoreArray(vdof,&vdof_array);
1454:         VecStrideScatter(vdof,c,v,INSERT_VALUES);
1455:       }
1456:     }
1457:   } else {
1458:     /*
1459:       Build the scatter sending one dof towards cpu 0
1460:     */
1461:     DMMeshGetStratumIS(dm,"Vertex Sets",vsID,&vsIS);
1462:     if (!rank) {
1463:       ex_get_node_set_param(exoid,vsID,&num_vertices_zero,&junk1);
1464:     }
1465:     PetscMalloc(num_vertices_zero*sizeof(PetscInt),&vs_vertices_zero);
1466:     if (!rank) {
1467:       ex_get_node_set(exoid,vsID,vs_vertices_zero);
1468:       ex_get_init(exoid,junk,&junk1,&junk2,&num_cells_zero,&junk3,&junk4,&junk5);
1469:       for (i = 0; i < num_vertices_zero; i++) {
1470:         vs_vertices_zero[i] += num_cells_zero-1;
1471:       }
1472:     }
1473:     ISCreateGeneral(dmcomm,num_vertices_zero,vs_vertices_zero,PETSC_OWN_POINTER,&vsIS_zero);
1474:     DMMeshCreateScatterToZeroVertexSet(dm,vsIS,vsIS_zero,&scatter);
1475:     ISDestroy(&vsIS);
1476:     ISDestroy(&vsIS_zero);

1478:     VecCreateSeq(PETSC_COMM_SELF,num_vertices_zero*(rank == 0),&vdof_zero);

1480:     for (c = 0; c < num_dof; c++) {
1481:       if (!rank) {
1482:         VecGetArray(vdof_zero,&vdof_array);
1483:         ex_get_nset_var(exoid,step,exofield+c,vsID,num_vertices_zero,vdof_array);
1484:         if (ierr != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_READ,"Unable to read from file id %i. ex_get_nset_var returned %i",exoid,ierr);
1485:         VecRestoreArray(vdof_zero,&vdof_array);
1486:       }
1487:       VecScatterBegin(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);
1488:       VecScatterEnd(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);

1490:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1491:     }
1492:     /*
1493:       Clean up
1494:     */
1495:     VecScatterDestroy(&scatter);
1496:     VecDestroy(&vdof_zero);
1497:   }
1498:   VecDestroy(&vdof);
1499: #else
1500:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1501: #endif
1502:   return(0);
1503: }

1507: /*@

1509:   VecViewExodusCell - Write a Vec representing the values of a field at all cells to an exodusII file.

1511:   Input Parameters:
1512: + dm   - the DMMesh representing the mesh
1513: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1514:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1515: . comm - the communicator associated to the exo file
1516:   + if size(comm) == 1 each processor writes its local vector into a separate file
1517:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1518: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1519: . step  - the time step to write
1520: . exofield - the position in the exodus field of the first component

1522: - Interpolated meshes are not supported.

1524:   Level: beginner

1526: .keywords: mesh,ExodusII
1527: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusCell()
1528:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1529:           VecViewExodusVertex() VecLoadExodusVertex()

1531: @*/
1532: PetscErrorCode VecViewExodusCell(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1533: {
1534: #if defined(PETSC_HAVE_EXODUSII)
1536:   PetscInt       rank,num_proc,num_dof,num_cells,num_cells_in_set,num_cells_zero=0;
1537:   PetscInt       num_cs_zero,num_cs,set,c,istart;
1538:   PetscInt       *setsID_zero;
1539:   const PetscInt *setsID;
1540:   IS             setIS,csIS,setIS_zero;
1541:   VecScatter     setscatter,setscattertozero;
1542:   Vec            vdof,vdof_set,vdof_set_zero;
1543:   PetscReal      *vdof_set_array;
1544:   PetscInt       junk1,junk2,junk3,junk4,junk5;
1545:   char           junk[MAX_LINE_LENGTH+1];
1546:   MPI_Comm       dmcomm;
1547: #endif

1549: #if defined(PETSC_HAVE_EXODUSII)
1551:   MPI_Comm_size(comm,&num_proc);
1552:   MPI_Comm_rank(comm,&rank);
1553:   VecGetBlockSize(v,&num_dof);
1554:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
1555:   DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1556:   DMMeshGetStratumSize(dm,"height",0,&num_cells);

1558:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);

1560:   if (num_proc == 1) {
1561:     DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1562:     DMMeshGetLabelIdIS(dm,"Cell Sets",&csIS);
1563:     ISGetIndices(csIS,&setsID);

1565:     for (set = 0; set < num_cs; set++) {
1566:       /*
1567:         Get the IS for the current set, the Vec containing a single dof and create
1568:         the scatter for the restriction to the current cell set
1569:       */
1570:       DMMeshGetStratumIS(dm,"Cell Sets",setsID[set],&setIS);
1571:       DMMeshGetStratumSize(dm,"Cell Sets",setsID[set],&num_cells_in_set);
1572:       VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1573:       VecScatterCreate(vdof,setIS,vdof_set,NULL,&setscatter);
1574:       for (c = 0; c < num_dof; c++) {
1575:         VecStrideGather(v,c,vdof,INSERT_VALUES);

1577:         /*
1578:           Get the restriction of vdof to the current set
1579:         */
1580:         VecScatterBegin(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);
1581:         VecScatterEnd(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);

1583:         /*
1584:           Write array to disk
1585:         */
1586:         VecGetArray(vdof_set,&vdof_set_array);
1587:         ex_put_elem_var(exoid,step,exofield+c,setsID[set],num_cells_in_set,vdof_set_array);
1588:         if (ierr != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_elem_var returned %i",exoid,ierr);
1589:         VecRestoreArray(vdof_set,&vdof_set_array);
1590:       }
1591:       VecDestroy(&vdof_set);
1592:       VecScatterDestroy(&setscatter);
1593:       ISDestroy(&setIS);
1594:     }
1595:     ISRestoreIndices(csIS,&setsID);
1596:     ISDestroy(&csIS);
1597:   } else {
1598:     /*
1599:       Get the number of blocks and the list of ID directly from  the file. This is easier
1600:       than trying to reconstruct the global lists from all individual IS
1601:     */
1602:     if (!rank) {
1603:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_zero,&junk4,&junk5);
1604:     }
1605:     MPI_Bcast(&num_cs_zero,1,MPIU_INT,0,dmcomm);
1606:     PetscMalloc(num_cs_zero * sizeof(PetscInt),&setsID_zero);
1607:     if (!rank) {
1608:       ex_get_elem_blk_ids(exoid,setsID_zero);
1609:     }
1610:     MPI_Bcast(setsID_zero,num_cs_zero,MPIU_INT,0,dmcomm);

1612:     istart = 0;
1613:     for (set = 0; set < num_cs_zero; set++) {
1614:       /*
1615:         Get the size of the size of the set on cpu 0 and create a Vec to receive values
1616:       */
1617:       if (!rank) {
1618:         ex_get_elem_block(exoid,setsID_zero[set],junk,&num_cells_zero,&junk1,&junk2);
1619:       }
1620:       VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_set_zero);

1622:       /*
1623:         Get the IS for the current set, the Vec containing a single dof and create
1624:         the scatter for the restriction to the current cell set
1625:       */
1626:       DMMeshGetStratumIS(dm,"Cell Sets",setsID_zero[set],&setIS);
1627:       DMMeshGetStratumSize(dm,"Cell Sets",setsID_zero[set],&num_cells_in_set);
1628:       VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1629:       VecScatterCreate(vdof,setIS,vdof_set,NULL,&setscatter);
1630:       /*
1631:         Get the scatter to send the values of a single dof at a single block to cpu 0
1632:       */
1633:       ISCreateStride(dmcomm,num_cells_zero,istart,1,&setIS_zero);
1634:       DMMeshCreateScatterToZeroCellSet(dm,setIS,setIS_zero,&setscattertozero);
1635:       ISDestroy(&setIS_zero);

1637:       for (c = 0; c < num_dof; c++) {
1638:         VecStrideGather(v,c,vdof,INSERT_VALUES);

1640:         /*
1641:           Get the restriction of vdof to the current set
1642:         */
1643:         VecScatterBegin(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);
1644:         VecScatterEnd(setscatter,vdof,vdof_set,INSERT_VALUES,SCATTER_FORWARD);

1646:         /*
1647:           Scatter vdof_set to cpu 0
1648:         */
1649:         VecScatterBegin(setscattertozero,vdof_set,vdof_set_zero,INSERT_VALUES,SCATTER_FORWARD);
1650:         VecScatterEnd(setscattertozero,vdof_set,vdof_set_zero,INSERT_VALUES,SCATTER_FORWARD);

1652:         /*
1653:           Write array to disk
1654:         */
1655:         if (!rank) {
1656:           VecGetArray(vdof_set_zero,&vdof_set_array);
1657:           ex_put_elem_var(exoid,step,exofield+c,setsID_zero[set],num_cells_zero,vdof_set_array);
1658:           VecRestoreArray(vdof_set_zero,&vdof_set_array);
1659:         }
1660:       }
1661:       istart += num_cells_zero;
1662:       VecDestroy(&vdof_set_zero);
1663:       VecDestroy(&vdof_set);
1664:       /*
1665:         Does this really need to be protected with this test?
1666:       */
1667:       if (num_cells_in_set > 0) {
1668:         /*
1669:           Does this really need to be protected with this test?
1670:         */
1671:         VecScatterDestroy(&setscatter);
1672:       }
1673:       VecScatterDestroy(&setscattertozero);
1674:       ISDestroy(&setIS);
1675:     }
1676:     PetscFree(setsID_zero);
1677:   }

1679:   VecDestroy(&vdof);
1680: #else
1681:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1682: #endif
1683:   return(0);
1684: }

1688: /*@

1690:   VecLoadExodusCell - Read a Vec representing the values of a field at all cells from an exodusII file.

1692:   Input Parameters:
1693: + dm   - the DMMesh representing the mesh
1694: . v    - the LOCAL vector of values to be read (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1695:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1696: . comm - the communicator associated to the exo file
1697:   + if size(comm) == 1 each processor writes its local vector into a separate file
1698:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1699: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1700: . step  - the time step to write
1701: . exofield - the position in the exodus field of the first component

1703: - Interpolated meshes are not supported.

1705:   Level: beginner

1707: .keywords: mesh,ExodusII
1708: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecViewExodusCell()
1709:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCellSet() VecLoadExodusCellSet()
1710:           VecViewExodusVertex() VecLoadExodusVertex()

1712: @*/
1713: PetscErrorCode VecLoadExodusCell(DM dm,Vec v,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1714: {
1715: #if defined(PETSC_HAVE_EXODUSII)
1717:   PetscInt       rank,num_proc,num_dof,num_cells,num_cells_in_set,num_cells_zero=0;
1718:   PetscInt       num_cs_zero,num_cs,set,c,istart;
1719:   PetscInt       *setsID_zero;
1720:   const PetscInt *setsID;
1721:   IS             setIS,csIS,setIS_zero;
1722:   VecScatter     setscatter,setscattertozero;
1723:   Vec            vdof,vdof_set,vdof_set_zero;
1724:   PetscReal      *vdof_set_array;
1725:   PetscInt       junk1,junk2,junk3,junk4,junk5;
1726:   char           junk[MAX_LINE_LENGTH+1];
1727:   MPI_Comm       dmcomm;
1728: #endif

1730: #if defined(PETSC_HAVE_EXODUSII)
1732:   MPI_Comm_size(comm,&num_proc);
1733:   MPI_Comm_rank(comm,&rank);
1734:   VecGetBlockSize(v,&num_dof);
1735:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
1736:   DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1737:   DMMeshGetStratumSize(dm,"height",0,&num_cells);

1739:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);

1741:   if (num_proc == 1) {
1742:     DMMeshGetLabelSize(dm,"Cell Sets",&num_cs);
1743:     DMMeshGetLabelIdIS(dm,"Cell Sets",&csIS);
1744:     ISGetIndices(csIS,&setsID);

1746:     for (c = 0; c < num_dof; c++) {
1747:       for (set = 0; set < num_cs; set++) {
1748:         /*
1749:           Get the IS for the current set, the Vec containing a single dof and create
1750:           the scatter for the restriction to the current cell set
1751:         */
1752:         DMMeshGetStratumIS(dm,"Cell Sets",setsID[set],&setIS);
1753:         DMMeshGetStratumSize(dm,"Cell Sets",setsID[set],&num_cells_in_set);
1754:         VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1755:         VecScatterCreate(vdof,setIS,vdof_set,NULL,&setscatter);
1756:         /*
1757:           Read array from disk
1758:         */
1759:         VecGetArray(vdof_set,&vdof_set_array);
1760:         ex_get_elem_var(exoid,step,exofield+c,setsID[set],num_cells_in_set,vdof_set_array);
1761:         VecRestoreArray(vdof_set,&vdof_set_array);
1762:         /*
1763:           Copy values to full dof vec
1764:         */
1765:         VecScatterBegin(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);
1766:         VecScatterEnd(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);

1768:       }
1769:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1770:       VecDestroy(&vdof_set);
1771:       VecScatterDestroy(&setscatter);
1772:       ISDestroy(&setIS);
1773:     }
1774:     ISRestoreIndices(csIS,&setsID);
1775:     ISDestroy(&csIS);
1776:   } else {
1777:     /*
1778:       Get the number of blocks and the list of ID directly from  the file. This is easier
1779:       than trying to reconstruct the global lists from all individual IS
1780:     */
1781:     if (!rank) {
1782:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_zero,&junk4,&junk5);
1783:     }
1784:     MPI_Bcast(&num_cs_zero,1,MPIU_INT,0,dmcomm);
1785:     PetscMalloc(num_cs_zero * sizeof(PetscInt),&setsID_zero);
1786:     if (!rank) {
1787:       ex_get_elem_blk_ids(exoid,setsID_zero);
1788:     }
1789:     MPI_Bcast(setsID_zero,num_cs_zero,MPIU_INT,0,dmcomm);

1791:     for (c = 0; c < num_dof; c++) {
1792:       istart = 0;
1793:       for (set = 0; set < num_cs_zero; set++) {
1794:         /*
1795:           Get the size of the size of the set on cpu 0 and create a Vec to receive values
1796:         */
1797:         ex_get_elem_block(exoid,setsID_zero[set],junk,&num_cells_zero,&junk1,&junk2);
1798:         VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_set_zero);

1800:         /*
1801:           Get the IS for the current set, the Vec containing a single dof and create
1802:           the scatter for the restriction to the current cell set
1803:         */
1804:         DMMeshGetStratumIS(dm,"Cell Sets",setsID_zero[set],&setIS);
1805:         DMMeshGetStratumSize(dm,"Cell Sets",setsID_zero[set],&num_cells_in_set);
1806:         VecCreateSeq(PETSC_COMM_SELF,num_cells_in_set,&vdof_set);
1807:         VecScatterCreate(vdof,setIS,vdof_set,NULL,&setscatter);
1808:         /*
1809:           Get the scatter to send the values of a single dof at a single block to cpu 0
1810:         */
1811:         ISCreateStride(dmcomm,num_cells_zero,istart,1,&setIS_zero);
1812:         DMMeshCreateScatterToZeroCellSet(dm,setIS,setIS_zero,&setscattertozero);
1813:         ISDestroy(&setIS_zero);

1815:         /*
1816:           Read array from disk
1817:         */
1818:         if (!rank) {
1819:           VecGetArray(vdof_set_zero,&vdof_set_array);
1820:           ex_get_elem_var(exoid,step,exofield+c,setsID_zero[set],num_cells_zero,vdof_set_array);
1821:           VecRestoreArray(vdof_set_zero,&vdof_set_array);
1822:         }
1823:         /*
1824:           Send values back to their owning cpu
1825:         */
1826:         VecScatterBegin(setscattertozero,vdof_set_zero,vdof_set,INSERT_VALUES,SCATTER_REVERSE);
1827:         VecScatterEnd(setscattertozero,vdof_set_zero,vdof_set,INSERT_VALUES,SCATTER_REVERSE);
1828:         /*
1829:           Copy the values associated to the current set back into the component vec
1830:         */
1831:         VecScatterBegin(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);
1832:         VecScatterEnd(setscatter,vdof_set,vdof,INSERT_VALUES,SCATTER_REVERSE);
1833:         istart += num_cells_zero;
1834:         MPI_Bcast(&istart,1,MPIU_INT,0,dmcomm);
1835:       }
1836:       /*
1837:         Copy the component back into v
1838:       */
1839:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
1840:       VecDestroy(&vdof_set_zero);
1841:       VecDestroy(&vdof_set);
1842:       VecScatterDestroy(&setscatter);
1843:       VecScatterDestroy(&setscattertozero);
1844:       ISDestroy(&setIS);
1845:     }
1846:     PetscFree(setsID_zero);
1847:     VecDestroy(&vdof);
1848:   }

1850:   VecDestroy(&vdof);
1851: #else
1852:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1853: #endif
1854:   return(0);
1855: }

1859: /*@

1861:   VecViewExodusCellSet - Write a Vec representing the values of a field at a cell set to an exodusII file.

1863:   Input Parameters:
1864: + dm   - the DMMesh representing the mesh
1865: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1866:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1867: . csID - the ID of the cell set
1868: . comm - the communicator associated to the exo file
1869:   + if size(comm) == 1 each processor writes its local vector into a separate file
1870:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1871: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1872: . step  - the time step to write
1873: . exofield - the position in the exodus field of the first component

1875: - Interpolated meshes are not supported.

1877:   Level: beginner

1879: .keywords: mesh,ExodusII
1880: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecLoadExodusCellSet()
1881:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCell() VecLoadExodusCell()
1882:           VecViewExodusVertex() VecLoadExodusVertex()

1884: @*/
1885: PetscErrorCode VecViewExodusCellSet(DM dm,Vec v,PetscInt csID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
1886: {
1887: #if defined(PETSC_HAVE_EXODUSII)
1889:   PetscInt       rank,num_proc,num_dof,num_cells,num_cells_zero=0;
1890:   PetscInt       i,c;
1891:   PetscReal      *vdof_array;
1892:   IS             csIS,csIS_zero;
1893:   Vec            vdof,vdof_zero;
1894:   VecScatter     scatter;
1895:   PetscInt       istart = 0;

1897:   PetscInt junk1,junk2,junk3,junk4,junk5;
1898:   char     junk[MAX_LINE_LENGTH+1];
1899:   PetscInt num_cs_global;
1900:   PetscInt *csIDs;
1901:   MPI_Comm dmcomm;
1902: #endif

1904: #if defined(PETSC_HAVE_EXODUSII)
1906:   MPI_Comm_size(comm,&num_proc);
1907:   MPI_Comm_rank(comm,&rank);
1908:   VecGetBlockSize(v,&num_dof);
1909:   PetscObjectGetComm((PetscObject) dm,&dmcomm);
1910:   DMMeshGetStratumIS(dm,"Cell Sets",csID,&csIS);
1911:   ISGetSize(csIS,&num_cells);

1913:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);

1915:   if (num_proc == 1) {
1916:     for (c = 0; c < num_dof; c++) {
1917:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1918:       VecGetArray(vdof,&vdof_array);
1919:       ex_put_elem_var(exoid,step,exofield+c,csID,num_cells,vdof_array);
1920:       VecRestoreArray(vdof,&vdof_array);
1921:       if (ierr != 0) SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_elem_var returned %i",exoid,ierr);
1922:     }
1923:   } else {
1924:     /*
1925:       Get the global size of the cell set from the file because we can
1926:       There is no direct way to get the index of the first cell in a cell set from exo.
1927:       (which depends on the order on the file) so we need to seek the cell set in the file and
1928:       accumulate offsets...
1929:     */
1930:     if (!rank) {
1931:       istart = 0;
1932:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_global,&junk4,&junk5);
1933:       PetscMalloc(num_cs_global * sizeof(PetscInt),&csIDs);
1934:       ex_get_elem_blk_ids(exoid,csIDs);
1935:       for (i = 0; i < num_cs_global; i++) {
1936:         ex_get_elem_block(exoid,csIDs[i],junk,&num_cells_zero,&junk1,&junk2);
1937:         if (csIDs[i] == csID) break;
1938:         else {
1939:           istart += num_cells_zero;
1940:         }
1941:       }
1942:       PetscFree(csIDs);
1943:       ex_get_elem_block(exoid,csID,junk,&num_cells_zero,&junk1,&junk2);
1944:     }
1945:     ISCreateStride(dmcomm,num_cells_zero,istart,1,&csIS_zero);
1946:     DMMeshCreateScatterToZeroCellSet(dm,csIS,csIS_zero,&scatter);
1947:     ISDestroy(&csIS_zero);

1949:     VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_zero);

1951:     for (c = 0; c < num_dof; c++) {
1952:       VecStrideGather(v,c,vdof,INSERT_VALUES);
1953:       VecScatterBegin(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1954:       VecScatterEnd(scatter,vdof,vdof_zero,INSERT_VALUES,SCATTER_FORWARD);
1955:       if (!rank) {
1956:         VecGetArray(vdof_zero,&vdof_array);
1957:         ex_put_elem_var(exoid,step,exofield+c,csID,num_cells_zero,vdof_array);
1958:         VecRestoreArray(vdof_zero,&vdof_array);
1959:       }
1960:     }
1961:     VecDestroy(&vdof_zero);
1962:     VecScatterDestroy(&scatter);
1963:   }
1964:   ISDestroy(&csIS);
1965:   VecDestroy(&vdof);
1966: #else
1967:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1968: #endif
1969:   return(0);
1970: }

1974: /*@

1976:   VecLoadExodusCellSet - Read a Vec representing the values of a field at a cell set from an exodusII file.

1978:   Input Parameters:
1979: + dm   - the DMMesh representing the mesh
1980: . v    - the LOCAL vector of values to be saved (i.e. with ghost values) obtained with SectionRealCreateLocalVector.
1981:          if v represents a field with several components, the block size must be set accordingly using VecSetBlockSize().
1982: . csID - the ID of the cell set
1983: . comm - the communicator associated to the exo file
1984:   + if size(comm) == 1 each processor writes its local vector into a separate file
1985:   . if size(comm) > 1, the values are sent to cpu 0, and written in a single file
1986: - exoid - the id of the exodusII file (obtained with ex_open or ex_create)
1987: . step  - the time step to write
1988: . exofield - the position in the exodus field of the first component

1990: - Interpolated meshes are not supported.

1992:   Level: beginner

1994: .keywords: mesh,ExodusII
1995: .seealso: MeshCreate() MeshCreateExodus() SectionRealCreateLocalVector() VecViewExodusCellSet()
1996:           VecViewExodusVertexSet() VecLoadExodusVertexSet() VecViewExodusCell() VecLoadExodusCell()
1997:           VecViewExodusVertex() VecLoadExodusVertex()

1999: @*/
2000: PetscErrorCode VecLoadExodusCellSet(DM dm,Vec v,PetscInt csID,MPI_Comm comm,PetscInt exoid,PetscInt step,PetscInt exofield)
2001: {
2002: #if defined(PETSC_HAVE_EXODUSII)
2004:   PetscInt       rank,num_proc,num_dof,num_cells,num_cells_zero=0;
2005:   PetscInt       i,c;
2006:   PetscReal      *vdof_array;
2007:   IS             csIS,csIS_zero;
2008:   Vec            vdof,vdof_zero;
2009:   VecScatter     scatter;
2010:   PetscInt       istart = 0;

2012:   PetscInt junk1,junk2,junk3,junk4,junk5;
2013:   PetscInt num_cs_global;
2014:   PetscInt *csIDs;
2015:   char     junk[MAX_LINE_LENGTH+1];
2016:   MPI_Comm dmcomm;
2017: #endif

2019: #if defined(PETSC_HAVE_EXODUSII)
2021:   MPI_Comm_size(comm,&num_proc);
2022:   MPI_Comm_rank(comm,&rank);
2023:   VecGetBlockSize(v,&num_dof);
2024:   PetscObjectGetComm((PetscObject) dm,&dmcomm);

2026:   DMMeshGetStratumIS(dm,"Cell Sets",csID,&csIS);
2027:   ISGetSize(csIS,&num_cells);

2029:   VecCreateSeq(PETSC_COMM_SELF,num_cells,&vdof);

2031:   if (num_proc == 1) {
2032:     for (c = 0; c < num_dof; c++) {
2033:       VecGetArray(vdof,&vdof_array);
2034:       ex_get_elem_var(exoid,step,exofield+c,csID,num_cells,vdof_array);
2035:       VecRestoreArray(vdof,&vdof_array);
2036:       if (ierr) SETERRQ2(comm,PETSC_ERR_FILE_WRITE,"Unable to write to file id %i. ex_put_elem_var returned %i",exoid,ierr);
2037:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
2038:     }
2039:   } else {
2040:     /*
2041:       Get the global size of the cell set from the file because we can
2042:       There is no direct way to get the index of the first cell in a cell set from exo.
2043:       (which depends on the order on the file) so we need to seek the cell set in the file and
2044:       accumulate offsets...
2045:     */
2046:     if (!rank) {
2047:       istart = 0;
2048:       ex_get_init(exoid,junk,&junk1,&junk2,&junk3,&num_cs_global,&junk4,&junk5);
2049:       PetscMalloc(num_cs_global * sizeof(PetscInt),&csIDs);
2050:       ex_get_elem_blk_ids(exoid,csIDs);
2051:       for (i = 0; i < num_cs_global; i++) {
2052:         ex_get_elem_block(exoid,csIDs[i],junk,&num_cells_zero,&junk1,&junk2);
2053:         if (csIDs[i] == csID) break;
2054:         else istart += num_cells_zero;
2055:       }
2056:       PetscFree(csIDs);
2057:       ex_get_elem_block(exoid,csID,junk,&num_cells_zero,&junk1,&junk2);
2058:     }
2059:     ISCreateStride(dmcomm,num_cells_zero,istart,1,&csIS_zero);
2060:     DMMeshCreateScatterToZeroCellSet(dm,csIS,csIS_zero,&scatter);
2061:     ISDestroy(&csIS_zero);

2063:     VecCreateSeq(PETSC_COMM_SELF,num_cells_zero,&vdof_zero);

2065:     for (c = 0; c < num_dof; c++) {
2066:       if (!rank) {
2067:         VecGetArray(vdof_zero,&vdof_array);
2068:         ex_get_elem_var(exoid,step,exofield+c,csID,num_cells_zero,vdof_array);
2069:         VecRestoreArray(vdof_zero,&vdof_array);
2070:       }
2071:       VecScatterBegin(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);
2072:       VecScatterEnd(scatter,vdof_zero,vdof,INSERT_VALUES,SCATTER_REVERSE);
2073:       VecStrideScatter(vdof,c,v,INSERT_VALUES);
2074:     }
2075:     VecDestroy(&vdof_zero);
2076:     VecScatterDestroy(&scatter);
2077:   }
2078:   ISDestroy(&csIS);
2079:   VecDestroy(&vdof);
2080: #else
2081:   SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
2082: #endif
2083:   return(0);
2084: }