Actual source code: isltog.c

petsc-3.5.2 2014-09-08
Report Typos and Errors
  2: #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
  3: #include <petscsf.h>
  4: #include <petscviewer.h>

  6: PetscClassId IS_LTOGM_CLASSID;


 11: PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
 12: {
 14:   PetscInt       i,start,end;

 17:   if (!mapping->globals) {
 18:     ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);
 19:   }
 20:   start = mapping->globalstart;
 21:   end = mapping->globalend;
 22:   for (i=0; i<n; i++) {
 23:     if (in[i] < 0)          out[i] = in[i];
 24:     else if (in[i] < start) out[i] = -1;
 25:     else if (in[i] > end)   out[i] = -1;
 26:     else                    out[i] = mapping->globals[in[i] - start];
 27:   }
 28:   return(0);
 29: }


 34: /*@
 35:     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping

 37:     Not Collective

 39:     Input Parameter:
 40: .   ltog - local to global mapping

 42:     Output Parameter:
 43: .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length

 45:     Level: advanced

 47:     Concepts: mapping^local to global

 49: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
 50: @*/
 51: PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
 52: {
 56:   *n = mapping->bs*mapping->n;
 57:   return(0);
 58: }

 62: /*@C
 63:     ISLocalToGlobalMappingView - View a local to global mapping

 65:     Not Collective

 67:     Input Parameters:
 68: +   ltog - local to global mapping
 69: -   viewer - viewer

 71:     Level: advanced

 73:     Concepts: mapping^local to global

 75: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
 76: @*/
 77: PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
 78: {
 79:   PetscInt       i;
 80:   PetscMPIInt    rank;
 81:   PetscBool      iascii;

 86:   if (!viewer) {
 87:     PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);
 88:   }

 91:   MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);
 92:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 93:   if (iascii) {
 94:     PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);
 95:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 96:     for (i=0; i<mapping->n; i++) {
 97:       PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);
 98:     }
 99:     PetscViewerFlush(viewer);
100:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
101:   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102:   return(0);
103: }

107: /*@
108:     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109:     ordering and a global parallel ordering.

111:     Not collective

113:     Input Parameter:
114: .   is - index set containing the global numbers for each local number

116:     Output Parameter:
117: .   mapping - new mapping data structure

119:     Notes: the block size of the IS determines the block size of the mapping
120:     Level: advanced

122:     Concepts: mapping^local to global

124: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125: @*/
126: PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127: {
129:   PetscInt       n,bs;
130:   const PetscInt *indices;
131:   MPI_Comm       comm;
132:   PetscBool      isblock;


138:   PetscObjectGetComm((PetscObject)is,&comm);
139:   ISGetLocalSize(is,&n);
140:   PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);
141:   if (!isblock) {
142:     ISGetIndices(is,&indices);
143:     ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);
144:     ISRestoreIndices(is,&indices);
145:   } else {
146:     ISGetBlockSize(is,&bs);
147:     ISBlockGetIndices(is,&indices);
148:     ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);
149:     ISBlockRestoreIndices(is,&indices);
150:   }
151:   return(0);
152: }

156: /*@C
157:     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158:     ordering and a global parallel ordering.

160:     Collective

162:     Input Parameter:
163: +   sf - star forest mapping contiguous local indices to (rank, offset)
164: -   start - first global index on this process

166:     Output Parameter:
167: .   mapping - new mapping data structure

169:     Level: advanced

171:     Concepts: mapping^local to global

173: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174: @*/
175: PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176: {
178:   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179:   const PetscInt *ilocal;
180:   MPI_Comm       comm;


186:   PetscObjectGetComm((PetscObject)sf,&comm);
187:   PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);
188:   if (ilocal) {
189:     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190:   }
191:   else maxlocal = nleaves;
192:   PetscMalloc1(nroots,&globals);
193:   PetscMalloc1(maxlocal,&ltog);
194:   for (i=0; i<nroots; i++) globals[i] = start + i;
195:   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196:   PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);
197:   PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);
198:   ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);
199:   PetscFree(globals);
200:   return(0);
201: }

205: /*@
206:     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207:     ordering and a global parallel ordering.

209:     Not Collective

211:     Input Parameters:
212: .   mapping - mapping data structure

214:     Output Parameter:
215: .   bs - the blocksize

217:     Level: advanced

219:     Concepts: mapping^local to global

221: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222: @*/
223: PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224: {
226:   *bs = mapping->bs;
227:   return(0);
228: }

232: /*@
233:     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
234:     ordering and a global parallel ordering.

236:     Not Collective, but communicator may have more than one process

238:     Input Parameters:
239: +   comm - MPI communicator
240: .   bs - the block size
241: .   n - the number of local elements divided by the block size, or equivalently the number of block indices
242: .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
243: -   mode - see PetscCopyMode

245:     Output Parameter:
246: .   mapping - new mapping data structure

248:     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
249:     Level: advanced

251:     Concepts: mapping^local to global

253: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
254: @*/
255: PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
256: {
258:   PetscInt       *in;


264:   *mapping = NULL;
265:   ISInitializePackage();

267:   PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
268:                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
269:   (*mapping)->n  = n;
270:   (*mapping)->bs = bs;
271:   /*
272:     Do not create the global to local mapping. This is only created if
273:     ISGlobalToLocalMapping() is called
274:   */
275:   (*mapping)->globals = 0;
276:   if (mode == PETSC_COPY_VALUES) {
277:     PetscMalloc1(n,&in);
278:     PetscMemcpy(in,indices,n*sizeof(PetscInt));
279:     (*mapping)->indices = in;
280:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
281:   } else if (mode == PETSC_OWN_POINTER) {
282:     (*mapping)->indices = (PetscInt*)indices;
283:     PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));
284:   }
285:   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
286:   return(0);
287: }

291: /*@
292:    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
293:    ordering and a global parallel ordering.

295:    Note Collective

297:    Input Parameters:
298: .  mapping - mapping data structure

300:    Level: advanced

302: .seealso: ISLocalToGlobalMappingCreate()
303: @*/
304: PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
305: {

309:   if (!*mapping) return(0);
311:   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;return(0);}
312:   PetscFree((*mapping)->indices);
313:   PetscFree((*mapping)->globals);
314:   PetscHeaderDestroy(mapping);
315:   *mapping = 0;
316:   return(0);
317: }

321: /*@
322:     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
323:     a new index set using the global numbering defined in an ISLocalToGlobalMapping
324:     context.

326:     Not collective

328:     Input Parameters:
329: +   mapping - mapping between local and global numbering
330: -   is - index set in local numbering

332:     Output Parameters:
333: .   newis - index set in global numbering

335:     Level: advanced

337:     Concepts: mapping^local to global

339: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
340:           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
341: @*/
342: PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
343: {
345:   PetscInt       n,*idxout;
346:   const PetscInt *idxin;


353:   ISGetLocalSize(is,&n);
354:   ISGetIndices(is,&idxin);
355:   PetscMalloc1(n,&idxout);
356:   ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);
357:   ISRestoreIndices(is,&idxin);
358:   ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);
359:   return(0);
360: }

364: /*@
365:    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
366:    and converts them to the global numbering.

368:    Not collective

370:    Input Parameters:
371: +  mapping - the local to global mapping context
372: .  N - number of integers
373: -  in - input indices in local numbering

375:    Output Parameter:
376: .  out - indices in global numbering

378:    Notes:
379:    The in and out array parameters may be identical.

381:    Level: advanced

383: .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
384:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
385:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

387:     Concepts: mapping^local to global
388: @*/
389: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
390: {
391:   PetscInt       i,bs = mapping->bs,Nmax = bs*mapping->n;
392:   const PetscInt *idx = mapping->indices;

395:   if (bs == 1) {
396:     for (i=0; i<N; i++) {
397:       if (in[i] < 0) {
398:         out[i] = in[i];
399:         continue;
400:       }
401:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
402:       out[i] = idx[in[i]];
403:     }
404:   } else {
405:     for (i=0; i<N; i++) {
406:       if (in[i] < 0) {
407:         out[i] = in[i];
408:         continue;
409:       }
410:       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
411:       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
412:     }
413:   }
414:   return(0);
415: }

419: /*@
420:    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering

422:    Not collective

424:    Input Parameters:
425: +  mapping - the local to global mapping context
426: .  N - number of integers
427: -  in - input indices in local block numbering

429:    Output Parameter:
430: .  out - indices in global block numbering

432:    Notes:
433:    The in and out array parameters may be identical.

435:    Example:
436:      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
437:      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.

439:    Level: advanced

441: .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
442:           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
443:           AOPetscToApplication(), ISGlobalToLocalMappingApply()

445:     Concepts: mapping^local to global
446: @*/
447: PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
448: {
449:   PetscInt       i,Nmax = mapping->n;
450:   const PetscInt *idx = mapping->indices;

453:   for (i=0; i<N; i++) {
454:     if (in[i] < 0) {
455:       out[i] = in[i];
456:       continue;
457:     }
458:     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
459:     out[i] = idx[in[i]];
460:   }
461:   return(0);
462: }

464: /* -----------------------------------------------------------------------------------------*/

468: /*
469:     Creates the global fields in the ISLocalToGlobalMapping structure
470: */
471: static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
472: {
474:   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;

477:   end   = 0;
478:   start = PETSC_MAX_INT;

480:   for (i=0; i<n; i++) {
481:     if (idx[i] < 0) continue;
482:     if (idx[i] < start) start = idx[i];
483:     if (idx[i] > end)   end   = idx[i];
484:   }
485:   if (start > end) {start = 0; end = -1;}
486:   mapping->globalstart = start;
487:   mapping->globalend   = end;

489:   PetscMalloc1((end-start+2),&globals);
490:   mapping->globals = globals;
491:   for (i=0; i<end-start+1; i++) globals[i] = -1;
492:   for (i=0; i<n; i++) {
493:     if (idx[i] < 0) continue;
494:     globals[idx[i] - start] = i;
495:   }

497:   PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));
498:   return(0);
499: }

503: /*@
504:     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
505:     specified with a global numbering.

507:     Not collective

509:     Input Parameters:
510: +   mapping - mapping between local and global numbering
511: .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
512:            IS_GTOLM_DROP - drops the indices with no local value from the output list
513: .   n - number of global indices to map
514: -   idx - global indices to map

516:     Output Parameters:
517: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
518: -   idxout - local index of each global index, one must pass in an array long enough
519:              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
520:              idxout == NULL to determine the required length (returned in nout)
521:              and then allocate the required space and call ISGlobalToLocalMappingApply()
522:              a second time to set the values.

524:     Notes:
525:     Either nout or idxout may be NULL. idx and idxout may be identical.

527:     This is not scalable in memory usage. Each processor requires O(Nglobal) size
528:     array to compute these.

530:     Level: advanced

532:     Developer Note: The manual page states that idx and idxout may be identical but the calling
533:        sequence declares idx as const so it cannot be the same as idxout.

535:     Concepts: mapping^global to local

537: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
538:           ISLocalToGlobalMappingDestroy()
539: @*/
540: PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
541:                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
542: {
543:   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;

548:   if (!mapping->globals) {
549:     ISGlobalToLocalMappingSetUp_Private(mapping);
550:   }
551:   globals = mapping->globals;
552:   start   = mapping->globalstart;
553:   end     = mapping->globalend;
554:   bs      = mapping->bs;

556:   if (type == IS_GTOLM_MASK) {
557:     if (idxout) {
558:       for (i=0; i<n; i++) {
559:         if (idx[i] < 0) idxout[i] = idx[i];
560:         else if (idx[i] < bs*start) idxout[i] = -1;
561:         else if (idx[i] > bs*end)   idxout[i] = -1;
562:         else                        idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
563:       }
564:     }
565:     if (nout) *nout = n;
566:   } else {
567:     if (idxout) {
568:       for (i=0; i<n; i++) {
569:         if (idx[i] < 0) continue;
570:         if (idx[i] < bs*start) continue;
571:         if (idx[i] > bs*end) continue;
572:         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
573:         if (tmp < 0) continue;
574:         idxout[nf++] = tmp;
575:       }
576:     } else {
577:       for (i=0; i<n; i++) {
578:         if (idx[i] < 0) continue;
579:         if (idx[i] < bs*start) continue;
580:         if (idx[i] > bs*end) continue;
581:         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
582:         if (tmp < 0) continue;
583:         nf++;
584:       }
585:     }
586:     if (nout) *nout = nf;
587:   }
588:   return(0);
589: }

593: /*@
594:     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
595:     specified with a block global numbering.

597:     Not collective

599:     Input Parameters:
600: +   mapping - mapping between local and global numbering
601: .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
602:            IS_GTOLM_DROP - drops the indices with no local value from the output list
603: .   n - number of global indices to map
604: -   idx - global indices to map

606:     Output Parameters:
607: +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
608: -   idxout - local index of each global index, one must pass in an array long enough
609:              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
610:              idxout == NULL to determine the required length (returned in nout)
611:              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
612:              a second time to set the values.

614:     Notes:
615:     Either nout or idxout may be NULL. idx and idxout may be identical.

617:     This is not scalable in memory usage. Each processor requires O(Nglobal) size
618:     array to compute these.

620:     Level: advanced

622:     Developer Note: The manual page states that idx and idxout may be identical but the calling
623:        sequence declares idx as const so it cannot be the same as idxout.

625:     Concepts: mapping^global to local

627: .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
628:           ISLocalToGlobalMappingDestroy()
629: @*/
630: PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
631:                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
632: {
633:   PetscInt       i,*globals,nf = 0,tmp,start,end;

638:   if (!mapping->globals) {
639:     ISGlobalToLocalMappingSetUp_Private(mapping);
640:   }
641:   globals = mapping->globals;
642:   start   = mapping->globalstart;
643:   end     = mapping->globalend;

645:   if (type == IS_GTOLM_MASK) {
646:     if (idxout) {
647:       for (i=0; i<n; i++) {
648:         if (idx[i] < 0) idxout[i] = idx[i];
649:         else if (idx[i] < start) idxout[i] = -1;
650:         else if (idx[i] > end)   idxout[i] = -1;
651:         else                     idxout[i] = globals[idx[i] - start];
652:       }
653:     }
654:     if (nout) *nout = n;
655:   } else {
656:     if (idxout) {
657:       for (i=0; i<n; i++) {
658:         if (idx[i] < 0) continue;
659:         if (idx[i] < start) continue;
660:         if (idx[i] > end) continue;
661:         tmp = globals[idx[i] - start];
662:         if (tmp < 0) continue;
663:         idxout[nf++] = tmp;
664:       }
665:     } else {
666:       for (i=0; i<n; i++) {
667:         if (idx[i] < 0) continue;
668:         if (idx[i] < start) continue;
669:         if (idx[i] > end) continue;
670:         tmp = globals[idx[i] - start];
671:         if (tmp < 0) continue;
672:         nf++;
673:       }
674:     }
675:     if (nout) *nout = nf;
676:   }
677:   return(0);
678: }

682: /*@C
683:     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
684:      each index shared by more than one processor

686:     Collective on ISLocalToGlobalMapping

688:     Input Parameters:
689: .   mapping - the mapping from local to global indexing

691:     Output Parameter:
692: +   nproc - number of processors that are connected to this one
693: .   proc - neighboring processors
694: .   numproc - number of indices for each subdomain (processor)
695: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

697:     Level: advanced

699:     Concepts: mapping^local to global

701:     Fortran Usage:
702: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
703: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
704:           PetscInt indices[nproc][numprocmax],ierr)
705:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
706:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


709: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
710:           ISLocalToGlobalMappingRestoreInfo()
711: @*/
712: PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
713: {
715:   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
716:   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
717:   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
718:   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
719:   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
720:   PetscInt       first_procs,first_numprocs,*first_indices;
721:   MPI_Request    *recv_waits,*send_waits;
722:   MPI_Status     recv_status,*send_status,*recv_statuses;
723:   MPI_Comm       comm;
724:   PetscBool      debug = PETSC_FALSE;

728:   PetscObjectGetComm((PetscObject)mapping,&comm);
729:   MPI_Comm_size(comm,&size);
730:   MPI_Comm_rank(comm,&rank);
731:   if (size == 1) {
732:     *nproc         = 0;
733:     *procs         = NULL;
734:     PetscMalloc(sizeof(PetscInt),numprocs);
735:     (*numprocs)[0] = 0;
736:     PetscMalloc(sizeof(PetscInt*),indices);
737:     (*indices)[0]  = NULL;
738:     return(0);
739:   }

741:   PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);

743:   /*
744:     Notes on ISLocalToGlobalMappingGetBlockInfo

746:     globally owned node - the nodes that have been assigned to this processor in global
747:            numbering, just for this routine.

749:     nontrivial globally owned node - node assigned to this processor that is on a subdomain
750:            boundary (i.e. is has more than one local owner)

752:     locally owned node - node that exists on this processors subdomain

754:     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
755:            local subdomain
756:   */
757:   PetscObjectGetNewTag((PetscObject)mapping,&tag1);
758:   PetscObjectGetNewTag((PetscObject)mapping,&tag2);
759:   PetscObjectGetNewTag((PetscObject)mapping,&tag3);

761:   for (i=0; i<n; i++) {
762:     if (lindices[i] > max) max = lindices[i];
763:   }
764:   MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);
765:   Ng++;
766:   MPI_Comm_size(comm,&size);
767:   MPI_Comm_rank(comm,&rank);
768:   scale  = Ng/size + 1;
769:   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
770:   rstart = scale*rank;

772:   /* determine ownership ranges of global indices */
773:   PetscMalloc1(2*size,&nprocs);
774:   PetscMemzero(nprocs,2*size*sizeof(PetscInt));

776:   /* determine owners of each local node  */
777:   PetscMalloc1(n,&owner);
778:   for (i=0; i<n; i++) {
779:     proc             = lindices[i]/scale; /* processor that globally owns this index */
780:     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
781:     owner[i]         = proc;
782:     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
783:   }
784:   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
785:   PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);

787:   /* inform other processors of number of messages and max length*/
788:   PetscMaxSum(comm,nprocs,&nmax,&nrecvs);
789:   PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);

791:   /* post receives for owned rows */
792:   PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);
793:   PetscMalloc1((nrecvs+1),&recv_waits);
794:   for (i=0; i<nrecvs; i++) {
795:     MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);
796:   }

798:   /* pack messages containing lists of local nodes to owners */
799:   PetscMalloc1((2*n+1),&sends);
800:   PetscMalloc1((size+1),&starts);
801:   starts[0] = 0;
802:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
803:   for (i=0; i<n; i++) {
804:     sends[starts[owner[i]]++] = lindices[i];
805:     sends[starts[owner[i]]++] = i;
806:   }
807:   PetscFree(owner);
808:   starts[0] = 0;
809:   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];

811:   /* send the messages */
812:   PetscMalloc1((nsends+1),&send_waits);
813:   PetscMalloc1((nsends+1),&dest);
814:   cnt = 0;
815:   for (i=0; i<size; i++) {
816:     if (nprocs[2*i]) {
817:       MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);
818:       dest[cnt] = i;
819:       cnt++;
820:     }
821:   }
822:   PetscFree(starts);

824:   /* wait on receives */
825:   PetscMalloc1((nrecvs+1),&source);
826:   PetscMalloc1((nrecvs+1),&len);
827:   cnt  = nrecvs;
828:   PetscMalloc1((ng+1),&nownedsenders);
829:   PetscMemzero(nownedsenders,ng*sizeof(PetscInt));
830:   while (cnt) {
831:     MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);
832:     /* unpack receives into our local space */
833:     MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);
834:     source[imdex] = recv_status.MPI_SOURCE;
835:     len[imdex]    = len[imdex]/2;
836:     /* count how many local owners for each of my global owned indices */
837:     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
838:     cnt--;
839:   }
840:   PetscFree(recv_waits);

842:   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
843:   nowned  = 0;
844:   nownedm = 0;
845:   for (i=0; i<ng; i++) {
846:     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
847:   }

849:   /* create single array to contain rank of all local owners of each globally owned index */
850:   PetscMalloc1((nownedm+1),&ownedsenders);
851:   PetscMalloc1((ng+1),&starts);
852:   starts[0] = 0;
853:   for (i=1; i<ng; i++) {
854:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
855:     else starts[i] = starts[i-1];
856:   }

858:   /* for each nontrival globally owned node list all arriving processors */
859:   for (i=0; i<nrecvs; i++) {
860:     for (j=0; j<len[i]; j++) {
861:       node = recvs[2*i*nmax+2*j]-rstart;
862:       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
863:     }
864:   }

866:   if (debug) { /* -----------------------------------  */
867:     starts[0] = 0;
868:     for (i=1; i<ng; i++) {
869:       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
870:       else starts[i] = starts[i-1];
871:     }
872:     for (i=0; i<ng; i++) {
873:       if (nownedsenders[i] > 1) {
874:         PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);
875:         for (j=0; j<nownedsenders[i]; j++) {
876:           PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);
877:         }
878:         PetscSynchronizedPrintf(comm,"\n");
879:       }
880:     }
881:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
882:   } /* -----------------------------------  */

884:   /* wait on original sends */
885:   if (nsends) {
886:     PetscMalloc1(nsends,&send_status);
887:     MPI_Waitall(nsends,send_waits,send_status);
888:     PetscFree(send_status);
889:   }
890:   PetscFree(send_waits);
891:   PetscFree(sends);
892:   PetscFree(nprocs);

894:   /* pack messages to send back to local owners */
895:   starts[0] = 0;
896:   for (i=1; i<ng; i++) {
897:     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
898:     else starts[i] = starts[i-1];
899:   }
900:   nsends2 = nrecvs;
901:   PetscMalloc1((nsends2+1),&nprocs); /* length of each message */
902:   for (i=0; i<nrecvs; i++) {
903:     nprocs[i] = 1;
904:     for (j=0; j<len[i]; j++) {
905:       node = recvs[2*i*nmax+2*j]-rstart;
906:       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
907:     }
908:   }
909:   nt = 0;
910:   for (i=0; i<nsends2; i++) nt += nprocs[i];

912:   PetscMalloc1((nt+1),&sends2);
913:   PetscMalloc1((nsends2+1),&starts2);

915:   starts2[0] = 0;
916:   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
917:   /*
918:      Each message is 1 + nprocs[i] long, and consists of
919:        (0) the number of nodes being sent back
920:        (1) the local node number,
921:        (2) the number of processors sharing it,
922:        (3) the processors sharing it
923:   */
924:   for (i=0; i<nsends2; i++) {
925:     cnt = 1;
926:     sends2[starts2[i]] = 0;
927:     for (j=0; j<len[i]; j++) {
928:       node = recvs[2*i*nmax+2*j]-rstart;
929:       if (nownedsenders[node] > 1) {
930:         sends2[starts2[i]]++;
931:         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
932:         sends2[starts2[i]+cnt++] = nownedsenders[node];
933:         PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));
934:         cnt += nownedsenders[node];
935:       }
936:     }
937:   }

939:   /* receive the message lengths */
940:   nrecvs2 = nsends;
941:   PetscMalloc1((nrecvs2+1),&lens2);
942:   PetscMalloc1((nrecvs2+1),&starts3);
943:   PetscMalloc1((nrecvs2+1),&recv_waits);
944:   for (i=0; i<nrecvs2; i++) {
945:     MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);
946:   }

948:   /* send the message lengths */
949:   for (i=0; i<nsends2; i++) {
950:     MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);
951:   }

953:   /* wait on receives of lens */
954:   if (nrecvs2) {
955:     PetscMalloc1(nrecvs2,&recv_statuses);
956:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
957:     PetscFree(recv_statuses);
958:   }
959:   PetscFree(recv_waits);

961:   starts3[0] = 0;
962:   nt         = 0;
963:   for (i=0; i<nrecvs2-1; i++) {
964:     starts3[i+1] = starts3[i] + lens2[i];
965:     nt          += lens2[i];
966:   }
967:   if (nrecvs2) nt += lens2[nrecvs2-1];

969:   PetscMalloc1((nt+1),&recvs2);
970:   PetscMalloc1((nrecvs2+1),&recv_waits);
971:   for (i=0; i<nrecvs2; i++) {
972:     MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);
973:   }

975:   /* send the messages */
976:   PetscMalloc1((nsends2+1),&send_waits);
977:   for (i=0; i<nsends2; i++) {
978:     MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);
979:   }

981:   /* wait on receives */
982:   if (nrecvs2) {
983:     PetscMalloc1(nrecvs2,&recv_statuses);
984:     MPI_Waitall(nrecvs2,recv_waits,recv_statuses);
985:     PetscFree(recv_statuses);
986:   }
987:   PetscFree(recv_waits);
988:   PetscFree(nprocs);

990:   if (debug) { /* -----------------------------------  */
991:     cnt = 0;
992:     for (i=0; i<nrecvs2; i++) {
993:       nt = recvs2[cnt++];
994:       for (j=0; j<nt; j++) {
995:         PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);
996:         for (k=0; k<recvs2[cnt+1]; k++) {
997:           PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);
998:         }
999:         cnt += 2 + recvs2[cnt+1];
1000:         PetscSynchronizedPrintf(comm,"\n");
1001:       }
1002:     }
1003:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1004:   } /* -----------------------------------  */

1006:   /* count number subdomains for each local node */
1007:   PetscMalloc1(size,&nprocs);
1008:   PetscMemzero(nprocs,size*sizeof(PetscInt));
1009:   cnt  = 0;
1010:   for (i=0; i<nrecvs2; i++) {
1011:     nt = recvs2[cnt++];
1012:     for (j=0; j<nt; j++) {
1013:       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1014:       cnt += 2 + recvs2[cnt+1];
1015:     }
1016:   }
1017:   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1018:   *nproc    = nt;
1019:   PetscMalloc1((nt+1),procs);
1020:   PetscMalloc1((nt+1),numprocs);
1021:   PetscMalloc1((nt+1),indices);
1022:   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1023:   PetscMalloc1(size,&bprocs);
1024:   cnt       = 0;
1025:   for (i=0; i<size; i++) {
1026:     if (nprocs[i] > 0) {
1027:       bprocs[i]        = cnt;
1028:       (*procs)[cnt]    = i;
1029:       (*numprocs)[cnt] = nprocs[i];
1030:       PetscMalloc1(nprocs[i],&(*indices)[cnt]);
1031:       cnt++;
1032:     }
1033:   }

1035:   /* make the list of subdomains for each nontrivial local node */
1036:   PetscMemzero(*numprocs,nt*sizeof(PetscInt));
1037:   cnt  = 0;
1038:   for (i=0; i<nrecvs2; i++) {
1039:     nt = recvs2[cnt++];
1040:     for (j=0; j<nt; j++) {
1041:       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1042:       cnt += 2 + recvs2[cnt+1];
1043:     }
1044:   }
1045:   PetscFree(bprocs);
1046:   PetscFree(recvs2);

1048:   /* sort the node indexing by their global numbers */
1049:   nt = *nproc;
1050:   for (i=0; i<nt; i++) {
1051:     PetscMalloc1(((*numprocs)[i]),&tmp);
1052:     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1053:     PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);
1054:     PetscFree(tmp);
1055:   }

1057:   if (debug) { /* -----------------------------------  */
1058:     nt = *nproc;
1059:     for (i=0; i<nt; i++) {
1060:       PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);
1061:       for (j=0; j<(*numprocs)[i]; j++) {
1062:         PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);
1063:       }
1064:       PetscSynchronizedPrintf(comm,"\n");
1065:     }
1066:     PetscSynchronizedFlush(comm,PETSC_STDOUT);
1067:   } /* -----------------------------------  */

1069:   /* wait on sends */
1070:   if (nsends2) {
1071:     PetscMalloc1(nsends2,&send_status);
1072:     MPI_Waitall(nsends2,send_waits,send_status);
1073:     PetscFree(send_status);
1074:   }

1076:   PetscFree(starts3);
1077:   PetscFree(dest);
1078:   PetscFree(send_waits);

1080:   PetscFree(nownedsenders);
1081:   PetscFree(ownedsenders);
1082:   PetscFree(starts);
1083:   PetscFree(starts2);
1084:   PetscFree(lens2);

1086:   PetscFree(source);
1087:   PetscFree(len);
1088:   PetscFree(recvs);
1089:   PetscFree(nprocs);
1090:   PetscFree(sends2);

1092:   /* put the information about myself as the first entry in the list */
1093:   first_procs    = (*procs)[0];
1094:   first_numprocs = (*numprocs)[0];
1095:   first_indices  = (*indices)[0];
1096:   for (i=0; i<*nproc; i++) {
1097:     if ((*procs)[i] == rank) {
1098:       (*procs)[0]    = (*procs)[i];
1099:       (*numprocs)[0] = (*numprocs)[i];
1100:       (*indices)[0]  = (*indices)[i];
1101:       (*procs)[i]    = first_procs;
1102:       (*numprocs)[i] = first_numprocs;
1103:       (*indices)[i]  = first_indices;
1104:       break;
1105:     }
1106:   }
1107:   return(0);
1108: }

1112: /*@C
1113:     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()

1115:     Collective on ISLocalToGlobalMapping

1117:     Input Parameters:
1118: .   mapping - the mapping from local to global indexing

1120:     Output Parameter:
1121: +   nproc - number of processors that are connected to this one
1122: .   proc - neighboring processors
1123: .   numproc - number of indices for each processor
1124: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1126:     Level: advanced

1128: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1129:           ISLocalToGlobalMappingGetInfo()
1130: @*/
1131: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1132: {
1134:   PetscInt       i;

1137:   PetscFree(*procs);
1138:   PetscFree(*numprocs);
1139:   if (*indices) {
1140:     PetscFree((*indices)[0]);
1141:     for (i=1; i<*nproc; i++) {
1142:       PetscFree((*indices)[i]);
1143:     }
1144:     PetscFree(*indices);
1145:   }
1146:   return(0);
1147: }

1151: /*@C
1152:     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1153:      each index shared by more than one processor

1155:     Collective on ISLocalToGlobalMapping

1157:     Input Parameters:
1158: .   mapping - the mapping from local to global indexing

1160:     Output Parameter:
1161: +   nproc - number of processors that are connected to this one
1162: .   proc - neighboring processors
1163: .   numproc - number of indices for each subdomain (processor)
1164: -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)

1166:     Level: advanced

1168:     Concepts: mapping^local to global

1170:     Fortran Usage:
1171: $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1172: $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1173:           PetscInt indices[nproc][numprocmax],ierr)
1174:         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1175:         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.


1178: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1179:           ISLocalToGlobalMappingRestoreInfo()
1180: @*/
1181: PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1182: {
1184:   PetscInt       **bindices = NULL,bs = mapping->bs,i,j,k;

1188:   ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);
1189:   PetscCalloc1(*nproc,&*indices);
1190:   for (i=0; i<*nproc; i++) {
1191:     PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);
1192:     for (j=0; j<(*numprocs)[i]; j++) {
1193:       for (k=0; k<bs; k++) {
1194:         (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1195:       }
1196:     }
1197:     (*numprocs)[i] *= bs;
1198:   }
1199:   if (bindices) {
1200:     PetscFree(bindices[0]);
1201:     for (i=1; i<*nproc; i++) {
1202:       PetscFree(bindices[i]);
1203:     }
1204:     PetscFree(bindices);
1205:   }
1206:   return(0);
1207: }

1211: /*@C
1212:     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()

1214:     Collective on ISLocalToGlobalMapping

1216:     Input Parameters:
1217: .   mapping - the mapping from local to global indexing

1219:     Output Parameter:
1220: +   nproc - number of processors that are connected to this one
1221: .   proc - neighboring processors
1222: .   numproc - number of indices for each processor
1223: -   indices - indices of local nodes shared with neighbor (sorted by global numbering)

1225:     Level: advanced

1227: .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1228:           ISLocalToGlobalMappingGetInfo()
1229: @*/
1230: PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1231: {

1235:   ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);
1236:   return(0);
1237: }

1241: /*@C
1242:    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped

1244:    Not Collective

1246:    Input Arguments:
1247: . ltog - local to global mapping

1249:    Output Arguments:
1250: . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()

1252:    Level: advanced

1254:    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array

1256: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1257: @*/
1258: PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1259: {
1263:   if (ltog->bs == 1) {
1264:     *array = ltog->indices;
1265:   } else {
1266:     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1267:     const PetscInt *ii;

1270:     PetscMalloc1(bs*n,&jj);
1271:     *array = jj;
1272:     k    = 0;
1273:     ii   = ltog->indices;
1274:     for (i=0; i<n; i++)
1275:       for (j=0; j<bs; j++)
1276:         jj[k++] = bs*ii[i] + j;
1277:   }
1278:   return(0);
1279: }

1283: /*@C
1284:    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()

1286:    Not Collective

1288:    Input Arguments:
1289: + ltog - local to global mapping
1290: - array - array of indices

1292:    Level: advanced

1294: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1295: @*/
1296: PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1297: {
1301:   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");

1303:   if (ltog->bs > 1) {
1305:     PetscFree(*(void**)array);
1306:   }
1307:   return(0);
1308: }

1312: /*@C
1313:    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block

1315:    Not Collective

1317:    Input Arguments:
1318: . ltog - local to global mapping

1320:    Output Arguments:
1321: . array - array of indices

1323:    Level: advanced

1325: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1326: @*/
1327: PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1328: {
1332:   *array = ltog->indices;
1333:   return(0);
1334: }

1338: /*@C
1339:    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()

1341:    Not Collective

1343:    Input Arguments:
1344: + ltog - local to global mapping
1345: - array - array of indices

1347:    Level: advanced

1349: .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1350: @*/
1351: PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1352: {
1356:   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1357:   *array = NULL;
1358:   return(0);
1359: }

1363: /*@C
1364:    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings

1366:    Not Collective

1368:    Input Arguments:
1369: + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1370: . n - number of mappings to concatenate
1371: - ltogs - local to global mappings

1373:    Output Arguments:
1374: . ltogcat - new mapping

1376:    Note: this currently always returns a mapping with block size of 1

1378:    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case

1380:    Level: advanced

1382: .seealso: ISLocalToGlobalMappingCreate()
1383: @*/
1384: PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1385: {
1386:   PetscInt       i,cnt,m,*idx;

1390:   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1394:   for (cnt=0,i=0; i<n; i++) {
1395:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1396:     cnt += m;
1397:   }
1398:   PetscMalloc1(cnt,&idx);
1399:   for (cnt=0,i=0; i<n; i++) {
1400:     const PetscInt *subidx;
1401:     ISLocalToGlobalMappingGetSize(ltogs[i],&m);
1402:     ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);
1403:     PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));
1404:     ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);
1405:     cnt += m;
1406:   }
1407:   ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);
1408:   return(0);
1409: }