Actual source code: vecio.c
1: #define PETSCVEC_DLL
2: /*
3: This file contains simple binary input routines for vectors. The
4: analogous output routines are within each vector implementation's
5: VecView (with viewer types PETSC_VIEWER_BINARY)
6: */
8: #include petsc.h
9: #include petscsys.h
10: #include petscvec.h
11: #include private/vecimpl.h
12: #if defined(PETSC_HAVE_PNETCDF)
14: #include "pnetcdf.h"
16: #endif
17: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
18: EXTERN PetscErrorCode VecLoad_Netcdf(PetscViewer, Vec*);
19: EXTERN PetscErrorCode VecLoad_HDF5(PetscViewer, Vec*);
20: EXTERN PetscErrorCode VecLoadIntoVector_Binary(PetscViewer, Vec);
21: EXTERN PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer, Vec);
25: /*@C
26: VecLoad - Loads a vector that has been stored in binary format
27: with VecView().
29: Collective on PetscViewer
31: Input Parameters:
32: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
33: NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
34: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL (which indicates
35: using VECSEQ if the communicator in the Viewer is of size 1; otherwise
36: use VECMPI).
38: Output Parameter:
39: . newvec - the newly loaded vector
41: Level: intermediate
43: Notes:
44: The input file must contain the full global vector, as
45: written by the routine VecView().
47: Notes for advanced users:
48: Most users should not need to know the details of the binary storage
49: format, since VecLoad() and VecView() completely hide these details.
50: But for anyone who's interested, the standard binary matrix storage
51: format is
52: .vb
53: int VEC_FILE_COOKIE
54: int number of rows
55: PetscScalar *values of all entries
56: .ve
58: Note for old style Cray vector machine users, the int's stored in the binary file are 32 bit
59: integers; not 64 as they are represented in the memory, so if you
60: write your own routines to read/write these binary files from the Cray
61: you need to adjust the integer sizes that you read in, see
62: PetscBinaryRead() and PetscBinaryWrite() to see how this may be
63: done. This note is not for the Cray XT3 etc
65: In addition, PETSc automatically does the byte swapping for
66: machines that store the bytes reversed, e.g. DEC alpha, freebsd,
67: linux, Windows and the paragon; thus if you write your own binary
68: read/write routines you have to swap the bytes; see PetscBinaryRead()
69: and PetscBinaryWrite() to see how this may be done.
71: Concepts: vector^loading from file
73: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector()
74: @*/
75: PetscErrorCode VecLoad(PetscViewer viewer, const VecType outtype,Vec *newvec)
76: {
78: PetscTruth isbinary,flg;
79: char vtype[256];
80: const char *prefix;
81: #if defined(PETSC_HAVE_PNETCDF)
82: PetscTruth isnetcdf;
83: #endif
84: #if defined(PETSC_HAVE_HDF5)
85: PetscTruth ishdf5;
86: #endif
91: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
92: #if defined(PETSC_HAVE_HDF5)
93: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
94: #endif
95: #if defined(PETSC_HAVE_PNETCDF)
96: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
97: #endif
99: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
100: VecInitializePackage(PETSC_NULL);
101: #endif
103: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
104: #if defined(PETSC_HAVE_PNETCDF)
105: if (isnetcdf) {
106: VecLoad_Netcdf(viewer,newvec);
107: } else
108: #endif
109: #if defined(PETSC_HAVE_HDF5)
110: if (ishdf5) {
111: VecLoad_HDF5(viewer,newvec);
112: } else
113: #endif
114: {
115: Vec factory;
116: MPI_Comm comm;
117: PetscErrorCode (*r)(PetscViewer, const VecType,Vec*);
118: PetscMPIInt size;
120: PetscObjectGetOptionsPrefix((PetscObject)viewer,(const char**)&prefix);
121: PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
122: if (flg) {
123: outtype = vtype;
124: }
125: PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
126: if (flg) {
127: outtype = vtype;
128: }
129: PetscObjectGetComm((PetscObject)viewer,&comm);
130: if (!outtype) {
131: MPI_Comm_size(comm,&size);
132: outtype = (size > 1) ? VECMPI : VECSEQ;
133: }
135: VecCreate(comm,&factory);
136: VecSetSizes(factory,1,PETSC_DETERMINE);
137: VecSetType(factory,outtype);
138: r = factory->ops->load;
139: VecDestroy(factory);
140: if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
141: (*r)(viewer,outtype,newvec);
142: }
143: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
144: return(0);
145: }
147: #if defined(PETSC_HAVE_HDF5)
150: PetscErrorCode VecLoad_HDF5(PetscViewer viewer,Vec *newvec)
151: {
153: int rank = 1; /* Could have rank 2 for blocked vectors */
154: PetscInt n, N, bs, start;
155: PetscScalar *x;
156: PetscTruth flag;
157: hid_t file_id, dset_id, filespace, memspace, plist_id;
158: hsize_t dims[1];
159: hsize_t count[1];
160: hsize_t offset[1];
161: herr_t status;
162: MPI_Comm comm;
165: SETERRQ(PETSC_ERR_SUP,"Since HDF5 format gives ASCII name for each object in file; must use VecLoadIntoVector() after setting name of Vec with PetscObjectSetName()");
166: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
167: PetscObjectGetComm((PetscObject)viewer,&comm);
168: PetscViewerHDF5GetFileId(viewer, &file_id);
170: /* Create the dataset with default properties and close filespace */
171: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
172: dset_id = H5Dopen2(file_id, "Vec", H5P_DEFAULT);
173: #else
174: dset_id = H5Dopen(file_id, "Vec");
175: #endif
177: /* Retrieve the dataspace for the dataset */
178: filespace = H5Dget_space(dset_id);
179: H5Sget_simple_extent_dims(filespace, dims, PETSC_NULL);
180: N = dims[0];
181: VecCreate(comm,newvec);
182: VecSetSizes(*newvec,PETSC_DECIDE,N);
183: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
184: if (flag) {
185: VecSetBlockSize(*newvec,bs);
186: }
187: VecSetFromOptions(*newvec);
188: VecGetLocalSize(*newvec,&n);
189: VecGetOwnershipRange(*newvec,&start,PETSC_NULL);
191: /* Each process defines a dataset and reads it from the hyperslab in the file */
192: count[0] = n;
193: memspace = H5Screate_simple(rank, count, NULL);
195: /* Select hyperslab in the file */
196: offset[0] = start;
197: status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
199: /* Create property list for collective dataset read */
200: plist_id = H5Pcreate(H5P_DATASET_XFER);
201: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
202: status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
203: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
204: #endif
206: VecGetArray(*newvec, &x);
207: status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
208: VecRestoreArray(*newvec, &x);
210: /* Close/release resources */
211: status = H5Pclose(plist_id);CHKERRQ(status);
212: status = H5Sclose(filespace);CHKERRQ(status);
213: status = H5Sclose(memspace);CHKERRQ(status);
214: status = H5Dclose(dset_id);CHKERRQ(status);
216: VecAssemblyBegin(*newvec);
217: VecAssemblyEnd(*newvec);
218: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
219: return(0);
220: }
221: #endif
223: #if defined(PETSC_HAVE_PNETCDF)
226: PetscErrorCode VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
227: {
229: PetscMPIInt rank;
230: PetscInt N,n,bs;
231: PetscInt ncid,start;
232: Vec vec;
233: PetscScalar *avec;
234: MPI_Comm comm;
235: PetscTruth flag;
236: char name[NC_MAX_NAME];
239: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
240: PetscObjectGetComm((PetscObject)viewer,&comm);
241: MPI_Comm_rank(comm,&rank);
242: PetscViewerNetcdfGetID(viewer,&ncid);
243: ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
244: VecCreate(comm,&vec);
245: VecSetSizes(vec,PETSC_DECIDE,N);
246: if (!rank) {
247: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
248: if (flag) {
249: VecSetBlockSize(vec,bs);
250: }
251: }
252: VecSetFromOptions(vec);
253: VecGetLocalSize(vec,&n);
254: VecGetOwnershipRange(vec,&start,PETSC_NULL);
255: VecGetArray(vec,&avec);
256: ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
257: VecRestoreArray(vec,&avec);
258: *newvec = vec;
259: VecAssemblyBegin(vec);
260: VecAssemblyEnd(vec);
261: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
262: return(0);
263: }
264: #endif
268: PetscErrorCode VecLoad_Binary(PetscViewer viewer, const VecType itype,Vec *newvec)
269: {
270: PetscMPIInt size,rank,tag;
271: int fd;
272: PetscInt i,rows,type,n,*range,bs,tr[2];
274: Vec vec;
275: PetscScalar *avec,*avecwork;
276: MPI_Comm comm;
277: MPI_Request request;
278: MPI_Status status;
279: PetscTruth flag;
280: #if defined(PETSC_HAVE_MPIIO)
281: PetscTruth useMPIIO;
282: #endif
285: PetscLogEventBegin(VEC_Load,viewer,0,0,0);
286: PetscViewerBinaryGetDescriptor(viewer,&fd);
287: PetscObjectGetComm((PetscObject)viewer,&comm);
288: MPI_Comm_rank(comm,&rank);
289: MPI_Comm_size(comm,&size);
291: /* Read vector header. */
292: PetscViewerBinaryRead(viewer,tr,2,PETSC_INT);
293: type = tr[0];
294: rows = tr[1];
295: if (type != VEC_FILE_COOKIE) {
296: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
297: SETERRQ(PETSC_ERR_ARG_WRONG,"Not vector next in file");
298: }
299: VecCreate(comm,&vec);
300: VecSetSizes(vec,PETSC_DECIDE,rows);
301: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
302: if (flag) {
303: VecSetBlockSize(vec,bs);
304: }
305: VecSetFromOptions(vec);
306: VecGetLocalSize(vec,&n);
307: PetscObjectGetNewTag((PetscObject)viewer,&tag);
308: VecGetArray(vec,&avec);
309: #if defined(PETSC_HAVE_MPIIO)
310: PetscViewerBinaryGetMPIIO(viewer,&useMPIIO);
311: if (!useMPIIO) {
312: #endif
313: if (!rank) {
314: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
316: if (size > 1) {
317: /* read in other chuncks and send to other processors */
318: /* determine maximum chunck owned by other */
319: range = vec->map->range;
320: n = 1;
321: for (i=1; i<size; i++) {
322: n = PetscMax(n,range[i+1] - range[i]);
323: }
324: PetscMalloc(n*sizeof(PetscScalar),&avecwork);
325: for (i=1; i<size; i++) {
326: n = range[i+1] - range[i];
327: PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
328: MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
329: MPI_Wait(&request,&status);
330: }
331: PetscFree(avecwork);
332: }
333: } else {
334: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
335: }
336: #if defined(PETSC_HAVE_MPIIO)
337: } else {
338: PetscMPIInt gsizes[1],lsizes[1],lstarts[1];
339: MPI_Datatype view;
340: MPI_File mfdes;
341: MPI_Aint ub,ul;
342: MPI_Offset off;
344: gsizes[0] = PetscMPIIntCast(rows);
345: lsizes[0] = PetscMPIIntCast(n);
346: lstarts[0] = PetscMPIIntCast(vec->map->rstart);
347: MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
348: MPI_Type_commit(&view);
350: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
351: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
352: MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char *)"native",MPI_INFO_NULL);
353: MPIU_File_read_all(mfdes,avec,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
354: MPI_Type_get_extent(view,&ul,&ub);
355: PetscViewerBinaryAddMPIIOOffset(viewer,ub);
356: MPI_Type_free(&view);
357: }
358: #endif
359: VecRestoreArray(vec,&avec);
360: *newvec = vec;
361: VecAssemblyBegin(vec);
362: VecAssemblyEnd(vec);
363: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
364: return(0);
365: }
367: #if defined(PETSC_HAVE_PNETCDF)
370: PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
371: {
373: PetscMPIInt rank;
374: PetscInt N,rows,n,bs;
375: PetscInt ncid,start;
376: PetscScalar *avec;
377: MPI_Comm comm;
378: PetscTruth flag;
379: char name[NC_MAX_NAME];
382: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
383: PetscObjectGetComm((PetscObject)viewer,&comm);
384: MPI_Comm_rank(comm,&rank);
385: PetscViewerNetcdfGetID(viewer,&ncid);
386: ncmpi_inq_dim(ncid,0,name,(MPI_Offset*)&N); /* N gets the global vector size */
387: if (!rank) {
388: VecGetSize(vec,&rows);
389: if (N != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
390: PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
391: if (flag) {
392: VecSetBlockSize(vec,bs);
393: }
394: }
395: VecSetFromOptions(vec);
396: VecGetLocalSize(vec,&n);
397: VecGetOwnershipRange(vec,&start,PETSC_NULL);
398: VecGetArray(vec,&avec);
399: ncmpi_get_vara_double_all(ncid,0,(const MPI_Offset*)&start,(const MPI_Offset*)&n,(double *)avec);
400: VecRestoreArray(vec,&avec);
401: VecAssemblyBegin(vec);
402: VecAssemblyEnd(vec);
403: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
404: return(0);
405: }
406: #endif
408: #if defined(PETSC_HAVE_HDF5)
411: PetscErrorCode VecLoadIntoVector_HDF5(PetscViewer viewer, Vec xin)
412: {
413: int rdim,rank = 1; /* Could have rank 2 for blocked vectors */
414: PetscInt n, N, bs, low;
415: PetscScalar *x;
416: PetscTruth flag;
417: hid_t file_id, dset_id, filespace, memspace, plist_id;
418: hsize_t dims[1];
419: hsize_t count[1];
420: hsize_t offset[1];
421: herr_t status;
423: const char *vecname;
426: PetscLogEventBegin(VEC_Load,viewer,xin,0,0);
427: PetscOptionsGetInt(PETSC_NULL, "-vecload_block_size", &bs, &flag);
428: if (flag) {
429: VecSetBlockSize(xin, bs);
430: }
431: VecSetFromOptions(xin);
433: PetscViewerHDF5GetFileId(viewer, &file_id);
435: /* Create the dataset with default properties and close filespace */
436: PetscObjectGetName((PetscObject)xin,&vecname);
437: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
438: dset_id = H5Dopen2(file_id, vecname, H5P_DEFAULT);
439: #else
440: dset_id = H5Dopen(file_id, vecname);
441: #endif
442: if (dset_id == -1) SETERRQ1(PETSC_ERR_LIB,"Could not H5Dopen() with Vec named %s",vecname);
444: /* Retrieve the dataspace for the dataset */
445: VecGetSize(xin, &N);
446: filespace = H5Dget_space(dset_id);
447: if (filespace == -1) SETERRQ(PETSC_ERR_LIB,"Could not H5Dget_space()");
448: rdim = H5Sget_simple_extent_dims(filespace, dims, PETSC_NULL);
449: if (rdim != 1) SETERRQ1(PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not 1 as expected",rdim);
450: if (N != (int) dims[0]) SETERRQ(PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length then input vector");
452: /* Each process defines a dataset and writes it to the hyperslab in the file */
453: VecGetLocalSize(xin, &n);
454: count[0] = n;
455: memspace = H5Screate_simple(rank, count, NULL);
456: if (memspace == -1) SETERRQ(PETSC_ERR_LIB,"Could not H5Screate_simple()");
458: /* Select hyperslab in the file */
459: VecGetOwnershipRange(xin, &low, PETSC_NULL);
460: offset[0] = low;
461: status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
463: /* Create property list for collective dataset read */
464: plist_id = H5Pcreate(H5P_DATASET_XFER);
465: if (plist_id == -1) SETERRQ(PETSC_ERR_LIB,"Could not H5Pcreate()");
466: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
467: status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
468: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
469: #endif
471: VecGetArray(xin, &x);
472: status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
473: VecRestoreArray(xin, &x);
475: /* Close/release resources */
476: status = H5Pclose(plist_id);CHKERRQ(status);
477: status = H5Sclose(filespace);CHKERRQ(status);
478: status = H5Sclose(memspace);CHKERRQ(status);
479: status = H5Dclose(dset_id);CHKERRQ(status);
481: VecAssemblyBegin(xin);
482: VecAssemblyEnd(xin);
483: PetscLogEventEnd(VEC_Load,viewer,xin,0,0);
484: return(0);
485: }
486: #endif
490: PetscErrorCode VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
491: {
493: PetscMPIInt size,rank,tag;
494: PetscInt i,rows,type,n,*range;
495: int fd;
496: PetscScalar *avec;
497: MPI_Comm comm;
498: MPI_Request request;
499: MPI_Status status;
502: PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
504: PetscViewerBinaryGetDescriptor(viewer,&fd);
505: PetscObjectGetComm((PetscObject)viewer,&comm);
506: MPI_Comm_rank(comm,&rank);
507: MPI_Comm_size(comm,&size);
509: if (!rank) {
510: /* Read vector header. */
511: PetscBinaryRead(fd,&type,1,PETSC_INT);
512: if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
513: PetscBinaryRead(fd,&rows,1,PETSC_INT);
514: VecGetSize(vec,&n);
515: if (n != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
516: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
518: VecSetFromOptions(vec);
519: VecGetLocalSize(vec,&n);
520: VecGetArray(vec,&avec);
521: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
522: VecRestoreArray(vec,&avec);
524: if (size > 1) {
525: /* read in other chuncks and send to other processors */
526: /* determine maximum chunck owned by other */
527: range = vec->map->range;
528: n = 1;
529: for (i=1; i<size; i++) {
530: n = PetscMax(n,range[i+1] - range[i]);
531: }
532: PetscMalloc(n*sizeof(PetscScalar),&avec);
533: PetscObjectGetNewTag((PetscObject)viewer,&tag);
534: for (i=1; i<size; i++) {
535: n = range[i+1] - range[i];
536: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
537: MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
538: MPI_Wait(&request,&status);
539: }
540: PetscFree(avec);
541: }
542: } else {
543: MPI_Bcast(&rows,1,MPIU_INT,0,comm);
544: VecSetFromOptions(vec);
545: VecGetLocalSize(vec,&n);
546: PetscObjectGetNewTag((PetscObject)viewer,&tag);
547: VecGetArray(vec,&avec);
548: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
549: VecRestoreArray(vec,&avec);
550: }
551: VecAssemblyBegin(vec);
552: VecAssemblyEnd(vec);
553: PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
554: return(0);
555: }
559: PetscErrorCode VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
560: {
561: PetscTruth isbinary;
562: #if defined(PETSC_HAVE_PNETCDF)
563: PetscTruth isnetcdf;
564: #endif
565: #if defined(PETSC_HAVE_HDF5)
566: PetscTruth ishdf5;
567: #endif
571: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
572: #if defined(PETSC_HAVE_PNETCDF)
573: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
574: #endif
575: #if defined(PETSC_HAVE_HDF5)
576: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
577: #endif
579: if (isbinary) {
580: VecLoadIntoVector_Binary(viewer,vec);
581: #if defined(PETSC_HAVE_PNETCDF)
582: } else if (isnetcdf) {
583: VecLoadIntoVector_Netcdf(viewer,vec);
584: #endif
585: #if defined(PETSC_HAVE_HDF5)
586: } else if (ishdf5) {
587: VecLoadIntoVector_HDF5(viewer,vec);
588: #endif
589: } else {
590: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name);
591: }
592: return(0);
593: }