Actual source code: vecio.c
petsc-3.3-p5 2012-12-01
2: /*
3: This file contains simple binary input routines for vectors. The
4: analogous output routines are within each vector implementation's
5: VecView (with viewer types PETSCVIEWERBINARY)
6: */
8: #include <petscsys.h>
9: #include <petscvec.h> /*I "petscvec.h" I*/
10: #include <petsc-private/vecimpl.h>
11: #include <petscmat.h> /* so that MAT_FILE_CLASSID is defined */
15: static PetscErrorCode PetscViewerBinaryReadVecHeader_Private(PetscViewer viewer,PetscInt *rows)
16: {
18: MPI_Comm comm;
19: PetscInt tr[2],type;
22: PetscObjectGetComm((PetscObject)viewer,&comm);
23: /* Read vector header */
24: PetscViewerBinaryRead(viewer,tr,2,PETSC_INT);
25: type = tr[0];
26: if (type != VEC_FILE_CLASSID) {
27: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
28: if (type == MAT_FILE_CLASSID) {
29: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Matrix is next in file, not a vector as you requested");
30: } else {
31: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Not a vector next in file");
32: }
33: }
34: *rows = tr[1];
35: return(0);
36: }
38: #if defined(PETSC_HAVE_MPIIO)
41: static PetscErrorCode VecLoad_Binary_MPIIO(Vec vec, PetscViewer viewer)
42: {
44: PetscMPIInt gsizes[1],lsizes[1],lstarts[1];
45: PetscScalar *avec;
46: MPI_Datatype view;
47: MPI_File mfdes;
48: MPI_Aint ub,ul;
49: MPI_Offset off;
52: VecGetArray(vec,&avec);
53: gsizes[0] = PetscMPIIntCast(vec->map->N);
54: lsizes[0] = PetscMPIIntCast(vec->map->n);
55: lstarts[0] = PetscMPIIntCast(vec->map->rstart);
56: MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
57: MPI_Type_commit(&view);
59: PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
60: PetscViewerBinaryGetMPIIOOffset(viewer,&off);
61: MPI_File_set_view(mfdes,off,MPIU_SCALAR,view,(char *)"native",MPI_INFO_NULL);
62: MPIU_File_read_all(mfdes,avec,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
63: MPI_Type_get_extent(view,&ul,&ub);
64: PetscViewerBinaryAddMPIIOOffset(viewer,ub);
65: MPI_Type_free(&view);
67: VecRestoreArray(vec,&avec);
68: VecAssemblyBegin(vec);
69: VecAssemblyEnd(vec);
70: return(0);
71: }
72: #endif
73:
76: PetscErrorCode VecLoad_Binary(Vec vec, PetscViewer viewer)
77: {
78: PetscMPIInt size,rank,tag;
79: int fd;
80: PetscInt i,rows = 0,n,*range,N,bs;
82: PetscBool flag;
83: PetscScalar *avec,*avecwork;
84: MPI_Comm comm;
85: MPI_Request request;
86: MPI_Status status;
87: #if defined(PETSC_HAVE_MPIIO)
88: PetscBool useMPIIO;
89: #endif
92: PetscObjectGetComm((PetscObject)viewer,&comm);
93: MPI_Comm_rank(comm,&rank);
94: MPI_Comm_size(comm,&size);
95:
96: PetscViewerBinaryGetDescriptor(viewer,&fd);
97: PetscViewerBinaryReadVecHeader_Private(viewer,&rows);
98: /* Set Vec sizes,blocksize,and type if not already set. Block size first so that local sizes will be compatible. */
99: PetscOptionsGetInt(((PetscObject)vec)->prefix, "-vecload_block_size", &bs, &flag);
100: if (flag) {
101: VecSetBlockSize(vec, bs);
102: }
103: if (vec->map->n < 0 && vec->map->N < 0) {
104: VecSetSizes(vec,PETSC_DECIDE,rows);
105: }
107: /* If sizes and type already set,check if the vector global size is correct */
108: VecGetSize(vec, &N);
109: if (N != rows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%d) then input vector (%d)", rows, N);
111: #if defined(PETSC_HAVE_MPIIO)
112: PetscViewerBinaryGetMPIIO(viewer,&useMPIIO);
113: if (useMPIIO) {
114: VecLoad_Binary_MPIIO(vec, viewer);
115: return(0);
116: }
117: #endif
119: VecGetLocalSize(vec,&n);
120: PetscObjectGetNewTag((PetscObject)viewer,&tag);
121: VecGetArray(vec,&avec);
122: if (!rank) {
123: PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
125: if (size > 1) {
126: /* read in other chuncks and send to other processors */
127: /* determine maximum chunck owned by other */
128: range = vec->map->range;
129: n = 1;
130: for (i=1; i<size; i++) {
131: n = PetscMax(n,range[i+1] - range[i]);
132: }
133: PetscMalloc(n*sizeof(PetscScalar),&avecwork);
134: for (i=1; i<size; i++) {
135: n = range[i+1] - range[i];
136: PetscBinaryRead(fd,avecwork,n,PETSC_SCALAR);
137: MPI_Isend(avecwork,n,MPIU_SCALAR,i,tag,comm,&request);
138: MPI_Wait(&request,&status);
139: }
140: PetscFree(avecwork);
141: }
142: } else {
143: MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
144: }
146: VecRestoreArray(vec,&avec);
147: VecAssemblyBegin(vec);
148: VecAssemblyEnd(vec);
149: return(0);
150: }
152: #if defined(PETSC_HAVE_HDF5)
155: PetscErrorCode PetscViewerHDF5OpenGroup(PetscViewer viewer, hid_t *fileId, hid_t *groupId) {
156: hid_t file_id, group;
157: const char *groupName = PETSC_NULL;
161: PetscViewerHDF5GetFileId(viewer, &file_id);
162: PetscViewerHDF5GetGroup(viewer, &groupName);
163: /* Open group */
164: if (groupName) {
165: PetscBool root;
167: PetscStrcmp(groupName, "/", &root);
168: if (!root && !H5Lexists(file_id, groupName, H5P_DEFAULT)) {
169: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
170: group = H5Gcreate2(file_id, groupName, 0, H5P_DEFAULT, H5P_DEFAULT);
171: #else /* deprecated HDF5 1.6 API */
172: group = H5Gcreate(file_id, groupName, 0);
173: #endif
174: if (group < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Could not create group %s", groupName);
175: H5Gclose(group);
176: }
177: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
178: group = H5Gopen2(file_id, groupName, H5P_DEFAULT);
179: #else
180: group = H5Gopen(file_id, groupName);
181: #endif
182: if (group < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "Could not open group %s", groupName);
183: } else {
184: group = file_id;
185: }
186: *fileId = file_id;
187: *groupId = group;
188: return(0);
189: }
193: /*
194: This should handle properly the cases where PetscInt is 32 or 64 and hsize_t is 32 or 64. These means properly casting with
195: checks back and forth between the two types of variables.
196: */
197: PetscErrorCode VecLoad_HDF5(Vec xin, PetscViewer viewer)
198: {
199: hid_t file_id, group, dset_id, filespace, memspace, plist_id;
200: hsize_t rdim, dim;
201: hsize_t dims[4], count[4], offset[4];
202: herr_t status;
203: PetscInt n, N, bs = 1, bsInd, lenInd, low, timestep;
204: PetscScalar *x;
205: const char *vecname;
209: PetscViewerHDF5OpenGroup(viewer, &file_id, &group);
210: PetscViewerHDF5GetTimestep(viewer, ×tep);
211: VecGetBlockSize(xin,&bs);
212: /* Create the dataset with default properties and close filespace */
213: PetscObjectGetName((PetscObject)xin,&vecname);
214: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
215: dset_id = H5Dopen2(group, vecname, H5P_DEFAULT);
216: #else
217: dset_id = H5Dopen(group, vecname);
218: #endif
219: if (dset_id == -1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Dopen() with Vec named %s",vecname);
220: /* Retrieve the dataspace for the dataset */
221: filespace = H5Dget_space(dset_id);
222: if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Dget_space()");
223: dim = 0;
224: if (timestep >= 0) {
225: ++dim;
226: }
227: ++dim;
228: if (bs >= 1) {
229: ++dim;
230: }
231: #if defined(PETSC_USE_COMPLEX)
232: ++dim;
233: #endif
234: rdim = H5Sget_simple_extent_dims(filespace, dims, PETSC_NULL);
235: #if defined(PETSC_USE_COMPLEX)
236: bsInd = rdim-2;
237: #else
238: bsInd = rdim-1;
239: #endif
240: lenInd = timestep >= 0 ? 1 : 0;
241: if (rdim != dim) {
242: if (rdim == dim+1 && bs == 1) {
243: bs = dims[bsInd];
244: } else SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file %d not %d as expected",rdim,dim);
245: } else if (bs >= 1 && bs != (PetscInt) dims[bsInd]) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Block size %d specified for vector does not match blocksize in file %d",bs,dims[bsInd]);
247: /* Set Vec sizes,blocksize,and type if not already set */
248: if ((xin)->map-> n < 0 && (xin)->map->N < 0) {
249: VecSetSizes(xin, PETSC_DECIDE, dims[lenInd]*bs);
250: }
251: /* If sizes and type already set,check if the vector global size is correct */
252: VecGetSize(xin, &N);
253: if (N/bs != (PetscInt) dims[lenInd]) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Vector in file different length (%d) then input vector (%d)", (PetscInt) dims[lenInd], N/bs);
255: /* Each process defines a dataset and reads it from the hyperslab in the file */
256: VecGetLocalSize(xin, &n);
257: dim = 0;
258: if (timestep >= 0) {
259: count[dim] = 1;
260: ++dim;
261: }
262: count[dim] = PetscHDF5IntCast(n)/bs;
263: ++dim;
264: if (bs >= 1) {
265: count[dim] = bs;
266: ++dim;
267: }
268: #if defined(PETSC_USE_COMPLEX)
269: count[dim] = 2;
270: ++dim;
271: #endif
272: memspace = H5Screate_simple(dim, count, NULL);
273: if (memspace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Screate_simple()");
275: /* Select hyperslab in the file */
276: VecGetOwnershipRange(xin, &low, PETSC_NULL);
277: dim = 0;
278: if (timestep >= 0) {
279: offset[dim] = timestep;
280: ++dim;
281: }
282: offset[dim] = PetscHDF5IntCast(low/bs);
283: ++dim;
284: if (bs >= 1) {
285: offset[dim] = 0;
286: ++dim;
287: }
288: #if defined(PETSC_USE_COMPLEX)
289: offset[dim] = 0;
290: ++dim;
291: #endif
292: status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
294: /* Create property list for collective dataset read */
295: plist_id = H5Pcreate(H5P_DATASET_XFER);
296: if (plist_id == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Could not H5Pcreate()");
297: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
298: status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
299: #endif
300: /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */
302: VecGetArray(xin, &x);
303: status = H5Dread(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
304: VecRestoreArray(xin, &x);
306: /* Close/release resources */
307: if (group != file_id) {
308: status = H5Gclose(group);CHKERRQ(status);
309: }
310: status = H5Pclose(plist_id);CHKERRQ(status);
311: status = H5Sclose(filespace);CHKERRQ(status);
312: status = H5Sclose(memspace);CHKERRQ(status);
313: status = H5Dclose(dset_id);CHKERRQ(status);
315: VecAssemblyBegin(xin);
316: VecAssemblyEnd(xin);
317: return(0);
318: }
319: #endif
324: PetscErrorCode VecLoad_Default(Vec newvec, PetscViewer viewer)
325: {
327: PetscBool isbinary;
328: #if defined(PETSC_HAVE_HDF5)
329: PetscBool ishdf5;
330: #endif
333: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
334: #if defined(PETSC_HAVE_HDF5)
335: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
336: #endif
338: #if defined(PETSC_HAVE_HDF5)
339: if (ishdf5) {
340: if (!((PetscObject)newvec)->name) {
341: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Since HDF5 format gives ASCII name for each object in file; must use VecLoad() after setting name of Vec with PetscObjectSetName()");
342: PetscLogEventEnd(VEC_Load,viewer,0,0,0);
343: }
344: VecLoad_HDF5(newvec, viewer);
345: } else
346: #endif
347: {
348: VecLoad_Binary(newvec, viewer);
349: }
350: return(0);
351: }