Actual source code: pbvec.c

petsc-3.5.1 2014-07-24
Report Typos and Errors
  2: /*
  3:    This file contains routines for Parallel vector operations.
  4:  */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/

  9: static PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 10: {
 11:   PetscScalar    sum,work;

 15:   VecDot_Seq(xin,yin,&work);
 16:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
 17:   *z   = sum;
 18:   return(0);
 19: }

 23: static PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
 24: {
 25:   PetscScalar    sum,work;

 29:   VecTDot_Seq(xin,yin,&work);
 30:   MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,PetscObjectComm((PetscObject)xin));
 31:   *z   = sum;
 32:   return(0);
 33: }

 35: extern PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);

 39: static PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
 40: {
 42:   Vec_MPI        *v = (Vec_MPI*)vin->data;

 45:   if (v->unplacedarray) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
 46:   v->unplacedarray = v->array;  /* save previous array so reset can bring it back */
 47:   v->array         = (PetscScalar*)a;
 48:   if (v->localrep) {
 49:     VecPlaceArray(v->localrep,a);
 50:   }
 51:   return(0);
 52: }

 54: extern PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt[],PetscScalar[]);

 58: static PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
 59: {
 61:   Vec_MPI        *vw,*w = (Vec_MPI*)win->data;
 62:   PetscScalar    *array;

 65:   VecCreate(PetscObjectComm((PetscObject)win),v);
 66:   PetscLayoutReference(win->map,&(*v)->map);

 68:   VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);
 69:   vw   = (Vec_MPI*)(*v)->data;
 70:   PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));

 72:   /* save local representation of the parallel vector (and scatter) if it exists */
 73:   if (w->localrep) {
 74:     VecGetArray(*v,&array);
 75:     VecCreateSeqWithArray(PETSC_COMM_SELF,PetscAbs(win->map->bs),win->map->n+w->nghost,array,&vw->localrep);
 76:     PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
 77:     VecRestoreArray(*v,&array);
 78:     PetscLogObjectParent((PetscObject)*v,(PetscObject)vw->localrep);

 80:     vw->localupdate = w->localupdate;
 81:     if (vw->localupdate) {
 82:       PetscObjectReference((PetscObject)vw->localupdate);
 83:     }
 84:   }

 86:   /* New vector should inherit stashing property of parent */
 87:   (*v)->stash.donotstash   = win->stash.donotstash;
 88:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;

 90:   PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
 91:   PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);

 93:   (*v)->map->bs   = PetscAbs(win->map->bs);
 94:   (*v)->bstash.bs = win->bstash.bs;
 95:   return(0);
 96: }

 98: extern PetscErrorCode VecSetOption_MPI(Vec,VecOption,PetscBool);
 99: extern PetscErrorCode VecResetArray_MPI(Vec);

101: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
102:                                 VecDuplicateVecs_Default,
103:                                 VecDestroyVecs_Default,
104:                                 VecDot_MPI,
105:                                 VecMDot_MPI,
106:                                 VecNorm_MPI,
107:                                 VecTDot_MPI,
108:                                 VecMTDot_MPI,
109:                                 VecScale_Seq,
110:                                 VecCopy_Seq, /* 10 */
111:                                 VecSet_Seq,
112:                                 VecSwap_Seq,
113:                                 VecAXPY_Seq,
114:                                 VecAXPBY_Seq,
115:                                 VecMAXPY_Seq,
116:                                 VecAYPX_Seq,
117:                                 VecWAXPY_Seq,
118:                                 VecAXPBYPCZ_Seq,
119:                                 VecPointwiseMult_Seq,
120:                                 VecPointwiseDivide_Seq,
121:                                 VecSetValues_MPI, /* 20 */
122:                                 VecAssemblyBegin_MPI,
123:                                 VecAssemblyEnd_MPI,
124:                                 0,
125:                                 VecGetSize_MPI,
126:                                 VecGetSize_Seq,
127:                                 0,
128:                                 VecMax_MPI,
129:                                 VecMin_MPI,
130:                                 VecSetRandom_Seq,
131:                                 VecSetOption_MPI,
132:                                 VecSetValuesBlocked_MPI,
133:                                 VecDestroy_MPI,
134:                                 VecView_MPI,
135:                                 VecPlaceArray_MPI,
136:                                 VecReplaceArray_Seq,
137:                                 VecDot_Seq,
138:                                 VecTDot_Seq,
139:                                 VecNorm_Seq,
140:                                 VecMDot_Seq,
141:                                 VecMTDot_Seq,
142:                                 VecLoad_Default,
143:                                 VecReciprocal_Default,
144:                                 VecConjugate_Seq,
145:                                 0,
146:                                 0,
147:                                 VecResetArray_MPI,
148:                                 0,
149:                                 VecMaxPointwiseDivide_Seq,
150:                                 VecPointwiseMax_Seq,
151:                                 VecPointwiseMaxAbs_Seq,
152:                                 VecPointwiseMin_Seq,
153:                                 VecGetValues_MPI,
154:                                 0,
155:                                 0,
156:                                 0,
157:                                 0,
158:                                 0,
159:                                 0,
160:                                 VecStrideGather_Default,
161:                                 VecStrideScatter_Default,
162:                                 0,
163:                                 0,
164:                                 0,
165:                                 0,
166:                                 0,
167:                                 VecStrideSubSetGather_Default,
168:                                 VecStrideSubSetScatter_Default
169: };

173: /*
174:     VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
175:     VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
176:     VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()

178:     If alloc is true and array is NULL then this routine allocates the space, otherwise
179:     no space is allocated.
180: */
181: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscBool alloc,PetscInt nghost,const PetscScalar array[])
182: {
183:   Vec_MPI        *s;

187:   PetscNewLog(v,&s);
188:   v->data        = (void*)s;
189:   PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
190:   s->nghost      = nghost;
191:   v->petscnative = PETSC_TRUE;

193:   PetscLayoutSetUp(v->map);

195:   s->array           = (PetscScalar*)array;
196:   s->array_allocated = 0;
197:   if (alloc && !array) {
198:     PetscInt n = v->map->n+nghost;
199:     PetscMalloc1(n,&s->array);
200:     PetscLogObjectMemory((PetscObject)v,n*sizeof(PetscScalar));
201:     PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));
202:     s->array_allocated = s->array;
203:   }

205:   /* By default parallel vectors do not have local representation */
206:   s->localrep    = 0;
207:   s->localupdate = 0;

209:   v->stash.insertmode = NOT_SET_VALUES;
210:   /* create the stashes. The block-size for bstash is set later when
211:      VecSetValuesBlocked is called.
212:   */
213:   VecStashCreate_Private(PetscObjectComm((PetscObject)v),1,&v->stash);
214:   VecStashCreate_Private(PetscObjectComm((PetscObject)v),PetscAbs(v->map->bs),&v->bstash);

216: #if defined(PETSC_HAVE_MATLAB_ENGINE)
217:   PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEnginePut_C",VecMatlabEnginePut_Default);
218:   PetscObjectComposeFunction((PetscObject)v,"PetscMatlabEngineGet_C",VecMatlabEngineGet_Default);
219: #endif
220:   PetscObjectChangeTypeName((PetscObject)v,VECMPI);
221:   return(0);
222: }

224: /*MC
225:    VECMPI - VECMPI = "mpi" - The basic parallel vector

227:    Options Database Keys:
228: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()

230:   Level: beginner

232: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
233: M*/

237: PETSC_EXTERN PetscErrorCode VecCreate_MPI(Vec vv)
238: {

242:   VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);
243:   return(0);
244: }

246: /*MC
247:    VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process

249:    Options Database Keys:
250: . -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()

252:   Level: beginner

254: .seealso: VecCreateSeq(), VecCreateMPI()
255: M*/

259: PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
260: {
262:   PetscMPIInt    size;

265:   MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);
266:   if (size == 1) {
267:     VecSetType(v,VECSEQ);
268:   } else {
269:     VecSetType(v,VECMPI);
270:   }
271:   return(0);
272: }

276: /*@C
277:    VecCreateMPIWithArray - Creates a parallel, array-style vector,
278:    where the user provides the array space to store the vector values.

280:    Collective on MPI_Comm

282:    Input Parameters:
283: +  comm  - the MPI communicator to use
284: .  bs    - block size, same meaning as VecSetBlockSize()
285: .  n     - local vector length, cannot be PETSC_DECIDE
286: .  N     - global vector length (or PETSC_DECIDE to have calculated)
287: -  array - the user provided array to store the vector values

289:    Output Parameter:
290: .  vv - the vector

292:    Notes:
293:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
294:    same type as an existing vector.

296:    If the user-provided array is NULL, then VecPlaceArray() can be used
297:    at a later stage to SET the array for storing the vector values.

299:    PETSc does NOT free the array when the vector is destroyed via VecDestroy().
300:    The user should not free the array until the vector is destroyed.

302:    Level: intermediate

304:    Concepts: vectors^creating with array

306: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
307:           VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()

309: @*/
310: PetscErrorCode  VecCreateMPIWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
311: {

315:   if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
316:   PetscSplitOwnership(comm,&n,&N);
317:   VecCreate(comm,vv);
318:   VecSetSizes(*vv,n,N);
319:   VecSetBlockSize(*vv,bs);
320:   VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);
321:   return(0);
322: }

326: /*@C
327:    VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
328:    the caller allocates the array space.

330:    Collective on MPI_Comm

332:    Input Parameters:
333: +  comm - the MPI communicator to use
334: .  n - local vector length
335: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
336: .  nghost - number of local ghost points
337: .  ghosts - global indices of ghost points (or NULL if not needed), these do not need to be in increasing order (sorted)
338: -  array - the space to store the vector values (as long as n + nghost)

340:    Output Parameter:
341: .  vv - the global vector representation (without ghost points as part of vector)

343:    Notes:
344:    Use VecGhostGetLocalForm() to access the local, ghosted representation
345:    of the vector.

347:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

349:    Level: advanced

351:    Concepts: vectors^creating with array
352:    Concepts: vectors^ghosted

354: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
355:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
356:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()

358: @*/
359: PetscErrorCode  VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
360: {
361:   PetscErrorCode         ierr;
362:   Vec_MPI                *w;
363:   PetscScalar            *larray;
364:   IS                     from,to;
365:   ISLocalToGlobalMapping ltog;
366:   PetscInt               rstart,i,*indices;

369:   *vv = 0;

371:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
372:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
373:   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
374:   PetscSplitOwnership(comm,&n,&N);
375:   /* Create global representation */
376:   VecCreate(comm,vv);
377:   VecSetSizes(*vv,n,N);
378:   VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);
379:   w    = (Vec_MPI*)(*vv)->data;
380:   /* Create local representation */
381:   VecGetArray(*vv,&larray);
382:   VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);
383:   PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);
384:   VecRestoreArray(*vv,&larray);

386:   /*
387:        Create scatter context for scattering (updating) ghost values
388:   */
389:   ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);
390:   ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
391:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
392:   PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);
393:   ISDestroy(&to);
394:   ISDestroy(&from);

396:   /* set local to global mapping for ghosted vector */
397:   PetscMalloc1((n+nghost),&indices);
398:   VecGetOwnershipRange(*vv,&rstart,NULL);
399:   for (i=0; i<n; i++) {
400:     indices[i] = rstart + i;
401:   }
402:   for (i=0; i<nghost; i++) {
403:     indices[n+i] = ghosts[i];
404:   }
405:   ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);
406:   VecSetLocalToGlobalMapping(*vv,ltog);
407:   ISLocalToGlobalMappingDestroy(&ltog);
408:   return(0);
409: }

413: /*@
414:    VecCreateGhost - Creates a parallel vector with ghost padding on each processor.

416:    Collective on MPI_Comm

418:    Input Parameters:
419: +  comm - the MPI communicator to use
420: .  n - local vector length
421: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
422: .  nghost - number of local ghost points
423: -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)

425:    Output Parameter:
426: .  vv - the global vector representation (without ghost points as part of vector)

428:    Notes:
429:    Use VecGhostGetLocalForm() to access the local, ghosted representation
430:    of the vector.

432:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

434:    Level: advanced

436:    Concepts: vectors^ghosted

438: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
439:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
440:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
441:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray(), VecMPISetGhost()

443: @*/
444: PetscErrorCode  VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
445: {

449:   VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
450:   return(0);
451: }

455: /*@
456:    VecMPISetGhost - Sets the ghost points for an MPI ghost vector

458:    Collective on Vec

460:    Input Parameters:
461: +  vv - the MPI vector
462: .  nghost - number of local ghost points
463: -  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)


466:    Notes:
467:    Use VecGhostGetLocalForm() to access the local, ghosted representation
468:    of the vector.

470:    This also automatically sets the ISLocalToGlobalMapping() for this vector.

472:    You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).

474:    Level: advanced

476:    Concepts: vectors^ghosted

478: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
479:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
480:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
481:           VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

483: @*/
484: PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
485: {
487:   PetscBool      flg;

490:   PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);
491:   /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
492:   if (flg) {
493:     PetscInt               n,N;
494:     Vec_MPI                *w;
495:     PetscScalar            *larray;
496:     IS                     from,to;
497:     ISLocalToGlobalMapping ltog;
498:     PetscInt               rstart,i,*indices;
499:     MPI_Comm               comm;

501:     PetscObjectGetComm((PetscObject)vv,&comm);
502:     n    = vv->map->n;
503:     N    = vv->map->N;
504:     (*vv->ops->destroy)(vv);
505:     VecSetSizes(vv,n,N);
506:     VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,NULL);
507:     w    = (Vec_MPI*)(vv)->data;
508:     /* Create local representation */
509:     VecGetArray(vv,&larray);
510:     VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);
511:     PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localrep);
512:     VecRestoreArray(vv,&larray);

514:     /*
515:      Create scatter context for scattering (updating) ghost values
516:      */
517:     ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);
518:     ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
519:     VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);
520:     PetscLogObjectParent((PetscObject)vv,(PetscObject)w->localupdate);
521:     ISDestroy(&to);
522:     ISDestroy(&from);

524:     /* set local to global mapping for ghosted vector */
525:     PetscMalloc1((n+nghost),&indices);
526:     VecGetOwnershipRange(vv,&rstart,NULL);

528:     for (i=0; i<n; i++)      indices[i]   = rstart + i;
529:     for (i=0; i<nghost; i++) indices[n+i] = ghosts[i];

531:     ISLocalToGlobalMappingCreate(comm,1,n+nghost,indices,PETSC_OWN_POINTER,&ltog);
532:     VecSetLocalToGlobalMapping(vv,ltog);
533:     ISLocalToGlobalMappingDestroy(&ltog);
534:   } else if (vv->ops->create == VecCreate_MPI) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
535:   else if (!((PetscObject)vv)->type_name) SETERRQ(PetscObjectComm((PetscObject)vv),PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
536:   return(0);
537: }


540: /* ------------------------------------------------------------------------------------------*/
543: /*@C
544:    VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
545:    the caller allocates the array space. Indices in the ghost region are based on blocks.

547:    Collective on MPI_Comm

549:    Input Parameters:
550: +  comm - the MPI communicator to use
551: .  bs - block size
552: .  n - local vector length
553: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
554: .  nghost - number of local ghost blocks
555: .  ghosts - global indices of ghost blocks (or NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
556: -  array - the space to store the vector values (as long as n + nghost*bs)

558:    Output Parameter:
559: .  vv - the global vector representation (without ghost points as part of vector)

561:    Notes:
562:    Use VecGhostGetLocalForm() to access the local, ghosted representation
563:    of the vector.

565:    n is the local vector size (total local size not the number of blocks) while nghost
566:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
567:    portion is bs*nghost

569:    Level: advanced

571:    Concepts: vectors^creating ghosted
572:    Concepts: vectors^creating with array

574: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
575:           VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
576:           VecCreateGhostWithArray(), VecCreateGhostBlock()

578: @*/
579: PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
580: {
581:   PetscErrorCode         ierr;
582:   Vec_MPI                *w;
583:   PetscScalar            *larray;
584:   IS                     from,to;
585:   ISLocalToGlobalMapping ltog;
586:   PetscInt               rstart,i,nb,*indices;

589:   *vv = 0;

591:   if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
592:   if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
593:   if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
594:   if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
595:   PetscSplitOwnership(comm,&n,&N);
596:   /* Create global representation */
597:   VecCreate(comm,vv);
598:   VecSetSizes(*vv,n,N);
599:   VecSetBlockSize(*vv,bs);
600:   VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);
601:   w    = (Vec_MPI*)(*vv)->data;
602:   /* Create local representation */
603:   VecGetArray(*vv,&larray);
604:   VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);
605:   PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localrep);
606:   VecRestoreArray(*vv,&larray);

608:   /*
609:        Create scatter context for scattering (updating) ghost values
610:   */
611:   ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);
612:   ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
613:   VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
614:   PetscLogObjectParent((PetscObject)*vv,(PetscObject)w->localupdate);
615:   ISDestroy(&to);
616:   ISDestroy(&from);

618:   /* set local to global mapping for ghosted vector */
619:   nb   = n/bs;
620:   PetscMalloc1((nb+nghost),&indices);
621:   VecGetOwnershipRange(*vv,&rstart,NULL);

623:   for (i=0; i<nb; i++)      indices[i]    = rstart + i;
624:   for (i=0; i<nghost; i++)  indices[nb+i] = ghosts[i];

626:   ISLocalToGlobalMappingCreate(comm,bs,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);
627:   VecSetLocalToGlobalMapping(*vv,ltog);
628:   ISLocalToGlobalMappingDestroy(&ltog);
629:   return(0);
630: }

634: /*@
635:    VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
636:         The indicing of the ghost points is done with blocks.

638:    Collective on MPI_Comm

640:    Input Parameters:
641: +  comm - the MPI communicator to use
642: .  bs - the block size
643: .  n - local vector length
644: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
645: .  nghost - number of local ghost blocks
646: -  ghosts - global indices of ghost blocks, counts are by block, not by individual index, these do not need to be in increasing order (sorted)

648:    Output Parameter:
649: .  vv - the global vector representation (without ghost points as part of vector)

651:    Notes:
652:    Use VecGhostGetLocalForm() to access the local, ghosted representation
653:    of the vector.

655:    n is the local vector size (total local size not the number of blocks) while nghost
656:    is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
657:    portion is bs*nghost

659:    Level: advanced

661:    Concepts: vectors^ghosted

663: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
664:           VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
665:           VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()

667: @*/
668: PetscErrorCode  VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
669: {

673:   VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
674:   return(0);
675: }