Actual source code: pbvec.c
petsc-3.3-p2 2012-07-13
2: /*
3: This file contains routines for Parallel vector operations.
4: */
5: #include <../src/vec/vec/impls/mpi/pvecimpl.h> /*I "petscvec.h" I*/
9: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
10: {
11: PetscScalar sum,work;
15: VecDot_Seq(xin,yin,&work);
16: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
17: *z = sum;
18: return(0);
19: }
23: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
24: {
25: PetscScalar sum,work;
29: VecTDot_Seq(xin,yin,&work);
30: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
31: *z = sum;
32: return(0);
33: }
35: EXTERN_C_BEGIN
36: extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
37: EXTERN_C_END
41: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
42: {
44: Vec_MPI *v = (Vec_MPI *)vin->data;
47: if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
48: v->unplacedarray = v->array; /* save previous array so reset can bring it back */
49: v->array = (PetscScalar *)a;
50: if (v->localrep) {
51: VecPlaceArray(v->localrep,a);
52: }
53: return(0);
54: }
56: extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
60: static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
61: {
63: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
64: PetscScalar *array;
67: VecCreate(((PetscObject)win)->comm,v);
68: PetscLayoutReference(win->map,&(*v)->map);
70: VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);
71: vw = (Vec_MPI *)(*v)->data;
72: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
74: /* save local representation of the parallel vector (and scatter) if it exists */
75: if (w->localrep) {
76: VecGetArray(*v,&array);
77: VecCreateSeqWithArray(PETSC_COMM_SELF,1,win->map->n+w->nghost,array,&vw->localrep);
78: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
79: VecRestoreArray(*v,&array);
80: PetscLogObjectParent(*v,vw->localrep);
81: vw->localupdate = w->localupdate;
82: if (vw->localupdate) {
83: PetscObjectReference((PetscObject)vw->localupdate);
84: }
85: }
87: /* New vector should inherit stashing property of parent */
88: (*v)->stash.donotstash = win->stash.donotstash;
89: (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
90:
91: PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
92: PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
93: (*v)->map->bs = win->map->bs;
94: (*v)->bstash.bs = win->bstash.bs;
96: return(0);
97: }
99: extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
100: extern PetscErrorCode VecResetArray_MPI(Vec);
102: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
103: VecDuplicateVecs_Default,
104: VecDestroyVecs_Default,
105: VecDot_MPI,
106: VecMDot_MPI,
107: VecNorm_MPI,
108: VecTDot_MPI,
109: VecMTDot_MPI,
110: VecScale_Seq,
111: VecCopy_Seq, /* 10 */
112: VecSet_Seq,
113: VecSwap_Seq,
114: VecAXPY_Seq,
115: VecAXPBY_Seq,
116: VecMAXPY_Seq,
117: VecAYPX_Seq,
118: VecWAXPY_Seq,
119: VecAXPBYPCZ_Seq,
120: VecPointwiseMult_Seq,
121: VecPointwiseDivide_Seq,
122: VecSetValues_MPI, /* 20 */
123: VecAssemblyBegin_MPI,
124: VecAssemblyEnd_MPI,
125: 0,
126: VecGetSize_MPI,
127: VecGetSize_Seq,
128: 0,
129: VecMax_MPI,
130: VecMin_MPI,
131: VecSetRandom_Seq,
132: VecSetOption_MPI,
133: VecSetValuesBlocked_MPI,
134: VecDestroy_MPI,
135: VecView_MPI,
136: VecPlaceArray_MPI,
137: VecReplaceArray_Seq,
138: VecDot_Seq,
139: VecTDot_Seq,
140: VecNorm_Seq,
141: VecMDot_Seq,
142: VecMTDot_Seq,
143: VecLoad_Default,
144: VecReciprocal_Default,
145: VecConjugate_Seq,
146: 0,
147: 0,
148: VecResetArray_MPI,
149: 0,
150: VecMaxPointwiseDivide_Seq,
151: VecPointwiseMax_Seq,
152: VecPointwiseMaxAbs_Seq,
153: VecPointwiseMin_Seq,
154: VecGetValues_MPI,
155: 0,
156: 0,
157: 0,
158: 0,
159: 0,
160: 0,
161: VecStrideGather_Default,
162: VecStrideScatter_Default
163: };
167: /*
168: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
169: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
170: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
172: If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
173: no space is allocated.
174: */
175: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
176: {
177: Vec_MPI *s;
182: PetscNewLog(v,Vec_MPI,&s);
183: v->data = (void*)s;
184: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
185: s->nghost = nghost;
186: v->petscnative = PETSC_TRUE;
188: PetscLayoutSetUp(v->map);
189: s->array = (PetscScalar *)array;
190: s->array_allocated = 0;
191: if (alloc && !array) {
192: PetscInt n = v->map->n+nghost;
193: PetscMalloc(n*sizeof(PetscScalar),&s->array);
194: PetscLogObjectMemory(v,n*sizeof(PetscScalar));
195: PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));
196: s->array_allocated = s->array;
197: }
199: /* By default parallel vectors do not have local representation */
200: s->localrep = 0;
201: s->localupdate = 0;
203: v->stash.insertmode = NOT_SET_VALUES;
204: /* create the stashes. The block-size for bstash is set later when
205: VecSetValuesBlocked is called.
206: */
207: VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);
208: VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);
209:
210: #if defined(PETSC_HAVE_MATLAB_ENGINE)
211: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
212: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
213: #endif
214: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
215: return(0);
216: }
218: /*MC
219: VECMPI - VECMPI = "mpi" - The basic parallel vector
221: Options Database Keys:
222: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
224: Level: beginner
226: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
227: M*/
229: EXTERN_C_BEGIN
232: PetscErrorCode VecCreate_MPI(Vec vv)
233: {
237: VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);
238: return(0);
239: }
240: EXTERN_C_END
242: /*MC
243: VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process
245: Options Database Keys:
246: . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()
248: Level: beginner
250: .seealso: VecCreateSeq(), VecCreateMPI()
251: M*/
253: EXTERN_C_BEGIN
256: PetscErrorCode VecCreate_Standard(Vec v)
257: {
259: PetscMPIInt size;
262: MPI_Comm_size(((PetscObject)v)->comm,&size);
263: if (size == 1) {
264: VecSetType(v,VECSEQ);
265: } else {
266: VecSetType(v,VECMPI);
267: }
268: return(0);
269: }
270: EXTERN_C_END
275: /*@C
276: VecCreateMPIWithArray - Creates a parallel, array-style vector,
277: where the user provides the array space to store the vector values.
279: Collective on MPI_Comm
281: Input Parameters:
282: + comm - the MPI communicator to use
283: . n - local vector length, cannot be PETSC_DECIDE
284: . N - global vector length (or PETSC_DECIDE to have calculated)
285: - array - the user provided array to store the vector values
287: Output Parameter:
288: . vv - the vector
289:
290: Notes:
291: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
292: same type as an existing vector.
294: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
295: at a later stage to SET the array for storing the vector values.
297: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
298: The user should not free the array until the vector is destroyed.
300: Level: intermediate
302: Concepts: vectors^creating with array
304: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
305: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
307: @*/
308: PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
309: {
313: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
314: PetscSplitOwnership(comm,&n,&N);
315: VecCreate(comm,vv);
316: VecSetSizes(*vv,n,N);
317: VecSetBlockSize(*vv,bs);
318: VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);
319: return(0);
320: }
324: /*@C
325: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
326: the caller allocates the array space.
328: Collective on MPI_Comm
330: Input Parameters:
331: + comm - the MPI communicator to use
332: . n - local vector length
333: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
334: . nghost - number of local ghost points
335: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
336: - array - the space to store the vector values (as long as n + nghost)
338: Output Parameter:
339: . vv - the global vector representation (without ghost points as part of vector)
340:
341: Notes:
342: Use VecGhostGetLocalForm() to access the local, ghosted representation
343: of the vector.
345: This also automatically sets the ISLocalToGlobalMapping() for this vector.
347: Level: advanced
349: Concepts: vectors^creating with array
350: Concepts: vectors^ghosted
352: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
353: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
354: VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
356: @*/
357: PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
358: {
359: PetscErrorCode ierr;
360: Vec_MPI *w;
361: PetscScalar *larray;
362: IS from,to;
363: ISLocalToGlobalMapping ltog;
364: PetscInt rstart,i,*indices;
367: *vv = 0;
369: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
370: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
371: if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
372: PetscSplitOwnership(comm,&n,&N);
373: /* Create global representation */
374: VecCreate(comm,vv);
375: VecSetSizes(*vv,n,N);
376: VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);
377: w = (Vec_MPI *)(*vv)->data;
378: /* Create local representation */
379: VecGetArray(*vv,&larray);
380: VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);
381: PetscLogObjectParent(*vv,w->localrep);
382: VecRestoreArray(*vv,&larray);
384: /*
385: Create scatter context for scattering (updating) ghost values
386: */
387: ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);
388: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
389: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
390: PetscLogObjectParent(*vv,w->localupdate);
391: ISDestroy(&to);
392: ISDestroy(&from);
394: /* set local to global mapping for ghosted vector */
395: PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
396: VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
397: for (i=0; i<n; i++) {
398: indices[i] = rstart + i;
399: }
400: for (i=0; i<nghost; i++) {
401: indices[n+i] = ghosts[i];
402: }
403: ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);
404: VecSetLocalToGlobalMapping(*vv,ltog);
405: ISLocalToGlobalMappingDestroy(<og);
406: return(0);
407: }
411: /*@
412: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
414: Collective on MPI_Comm
416: Input Parameters:
417: + comm - the MPI communicator to use
418: . n - local vector length
419: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
420: . nghost - number of local ghost points
421: - ghosts - global indices of ghost points
423: Output Parameter:
424: . vv - the global vector representation (without ghost points as part of vector)
425:
426: Notes:
427: Use VecGhostGetLocalForm() to access the local, ghosted representation
428: of the vector.
430: This also automatically sets the ISLocalToGlobalMapping() for this vector.
432: Level: advanced
434: Concepts: vectors^ghosted
436: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
437: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
438: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
439: VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()
441: @*/
442: PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
443: {
447: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
448: return(0);
449: }
453: /*@
454: VecMPISetGhost - Sets the ghost points for an MPI ghost vector
456: Collective on Vec
458: Input Parameters:
459: + vv - the MPI vector
460: . nghost - number of local ghost points
461: - ghosts - global indices of ghost points
463:
464: Notes:
465: Use VecGhostGetLocalForm() to access the local, ghosted representation
466: of the vector.
468: This also automatically sets the ISLocalToGlobalMapping() for this vector.
470: You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).
472: Level: advanced
474: Concepts: vectors^ghosted
476: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
477: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
478: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
479: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
481: @*/
482: PetscErrorCode VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
483: {
485: PetscBool flg;
488: PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);
489: /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
490: if (flg) {
491: PetscInt n,N;
492: Vec_MPI *w;
493: PetscScalar *larray;
494: IS from,to;
495: ISLocalToGlobalMapping ltog;
496: PetscInt rstart,i,*indices;
497: MPI_Comm comm = ((PetscObject)vv)->comm;
499: n = vv->map->n;
500: N = vv->map->N;
501: (*vv->ops->destroy)(vv);
502: VecSetSizes(vv,n,N);
503: VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);
504: w = (Vec_MPI *)(vv)->data;
505: /* Create local representation */
506: VecGetArray(vv,&larray);
507: VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);
508: PetscLogObjectParent(vv,w->localrep);
509: VecRestoreArray(vv,&larray);
511: /*
512: Create scatter context for scattering (updating) ghost values
513: */
514: ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);
515: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
516: VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);
517: PetscLogObjectParent(vv,w->localupdate);
518: ISDestroy(&to);
519: ISDestroy(&from);
521: /* set local to global mapping for ghosted vector */
522: PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
523: VecGetOwnershipRange(vv,&rstart,PETSC_NULL);
524: for (i=0; i<n; i++) {
525: indices[i] = rstart + i;
526: }
527: for (i=0; i<nghost; i++) {
528: indices[n+i] = ghosts[i];
529: }
530: ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,<og);
531: VecSetLocalToGlobalMapping(vv,ltog);
532: ISLocalToGlobalMappingDestroy(<og);
533: } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
534: else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
535: return(0);
536: }
539: /* ------------------------------------------------------------------------------------------*/
542: /*@C
543: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
544: the caller allocates the array space. Indices in the ghost region are based on blocks.
546: Collective on MPI_Comm
548: Input Parameters:
549: + comm - the MPI communicator to use
550: . bs - block size
551: . n - local vector length
552: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
553: . nghost - number of local ghost blocks
554: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index
555: - array - the space to store the vector values (as long as n + nghost*bs)
557: Output Parameter:
558: . vv - the global vector representation (without ghost points as part of vector)
559:
560: Notes:
561: Use VecGhostGetLocalForm() to access the local, ghosted representation
562: of the vector.
564: n is the local vector size (total local size not the number of blocks) while nghost
565: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
566: portion is bs*nghost
568: Level: advanced
570: Concepts: vectors^creating ghosted
571: Concepts: vectors^creating with array
573: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
574: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
575: VecCreateGhostWithArray(), VecCreateGhostBlock()
577: @*/
578: PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
579: {
580: PetscErrorCode ierr;
581: Vec_MPI *w;
582: PetscScalar *larray;
583: IS from,to;
584: ISLocalToGlobalMapping ltog;
585: PetscInt rstart,i,nb,*indices;
588: *vv = 0;
590: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
591: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
592: if (nghost < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
593: if (n % bs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
594: PetscSplitOwnership(comm,&n,&N);
595: /* Create global representation */
596: VecCreate(comm,vv);
597: VecSetSizes(*vv,n,N);
598: VecSetBlockSize(*vv,bs);
599: VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);
600: w = (Vec_MPI *)(*vv)->data;
601: /* Create local representation */
602: VecGetArray(*vv,&larray);
603: VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);
604: PetscLogObjectParent(*vv,w->localrep);
605: VecRestoreArray(*vv,&larray);
607: /*
608: Create scatter context for scattering (updating) ghost values
609: */
610: ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);
611: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
612: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
613: PetscLogObjectParent(*vv,w->localupdate);
614: ISDestroy(&to);
615: ISDestroy(&from);
617: /* set local to global mapping for ghosted vector */
618: nb = n/bs;
619: PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);
620: VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
621: for (i=0; i<nb; i++) {
622: indices[i] = rstart + i*bs;
623: }
624: for (i=0; i<nghost; i++) {
625: indices[nb+i] = ghosts[i];
626: }
627: ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,<og);
628: VecSetLocalToGlobalMappingBlock(*vv,ltog);
629: ISLocalToGlobalMappingDestroy(<og);
630: return(0);
631: }
635: /*@
636: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
637: The indicing of the ghost points is done with blocks.
639: Collective on MPI_Comm
641: Input Parameters:
642: + comm - the MPI communicator to use
643: . bs - the block size
644: . n - local vector length
645: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
646: . nghost - number of local ghost blocks
647: - ghosts - global indices of ghost blocks, counts are by block, not by individual index
649: Output Parameter:
650: . vv - the global vector representation (without ghost points as part of vector)
651:
652: Notes:
653: Use VecGhostGetLocalForm() to access the local, ghosted representation
654: of the vector.
656: n is the local vector size (total local size not the number of blocks) while nghost
657: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
658: portion is bs*nghost
660: Level: advanced
662: Concepts: vectors^ghosted
664: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
665: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
666: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
668: @*/
669: PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
670: {
674: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
675: return(0);
676: }