Actual source code: shvec.c

petsc-3.3-p7 2013-05-11
  2: /*
  3:    This file contains routines for Parallel vector operations that use shared memory
  4:  */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/

  7: #if defined(PETSC_USE_SHARED_MEMORY) 

  9: extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);

 13: PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
 14: {
 16:   Vec_MPI        *w = (Vec_MPI *)win->data;
 17:   PetscScalar    *array;


 21:   /* first processor allocates entire array and sends it's address to the others */
 22:   PetscSharedMalloc(((PetscObject)win)->comm,win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);

 24:   VecCreate(((PetscObject)win)->comm,v);
 25:   VecSetSizes(*v,win->map->n,win->map->N);
 26:   VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);
 27:   PetscLayoutReference(win->map,&(*v)->map);

 29:   /* New vector should inherit stashing property of parent */
 30:   (*v)->stash.donotstash   = win->stash.donotstash;
 31:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
 32: 
 33:   PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);
 34:   PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);

 36:   (*v)->ops->duplicate = VecDuplicate_Shared;
 37:   (*v)->bstash.bs = win->bstash.bs;
 38:   return(0);
 39: }


 42: EXTERN_C_BEGIN
 45: PetscErrorCode  VecCreate_Shared(Vec vv)
 46: {
 48:   PetscScalar    *array;

 51:   PetscSplitOwnership(((PetscObject)vv)->comm,&vv->map->n,&vv->map->N);
 52:   PetscSharedMalloc(((PetscObject)vv)->comm,vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);

 54:   VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);
 55:   vv->ops->duplicate = VecDuplicate_Shared;

 57:   return(0);
 58: }
 59: EXTERN_C_END


 62: /* ----------------------------------------------------------------------------------------
 63:      Code to manage shared memory allocation using standard Unix shared memory
 64: */
 65: #include <petscsys.h>
 66: #if defined(PETSC_HAVE_PWD_H)
 67: #include <pwd.h>
 68: #endif
 69: #include <ctype.h>
 70: #include <sys/types.h>
 71: #include <sys/stat.h>
 72: #if defined(PETSC_HAVE_UNISTD_H)
 73: #include <unistd.h>
 74: #endif
 75: #if defined(PETSC_HAVE_STDLIB_H)
 76: #include <stdlib.h>
 77: #endif
 78: #if defined(PETSC_HAVE_SYS_PARAM_H)
 79: #include <sys/param.h>
 80: #endif
 81: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
 82: #include <sys/utsname.h>
 83: #endif
 84: #include <fcntl.h>
 85: #include <time.h>  
 86: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
 87: #include <sys/systeminfo.h>
 88: #endif
 89: #include <sys/shm.h>
 90: #include <sys/mman.h>


 93: static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;

 97: /*
 98:    Private routine to delete internal storage when a communicator is freed.
 99:   This is called by MPI, not by users.

101:   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
102:   it was MPI_Comm *comm.  
103: */
104: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void* attr_val,void* extra_state)
105: {

109:   PetscFree(attr_val);
110:   PetscFunctionReturn(MPI_SUCCESS);
111: }

115: /*

117:     This routine is still incomplete and needs work.

119:     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
120: cat /etc/sysctl.conf
121: kern.sysv.shmmax=67108864
122: kern.sysv.shmmin=1
123: kern.sysv.shmmni=32
124: kern.sysv.shmseg=512
125: kern.sysv.shmall=1024

127:   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
128: ipcrm to remove the shared memory in use.

130: */
131: PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
132: {
134:   PetscInt       shift;
135:   PetscMPIInt    rank,flag;
136:   int            *arena,id,key = 0;
137:   char           *value;

140:   *result = 0;

142:   MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);
143:   shift -= llen;

145:   MPI_Comm_rank(comm,&rank);
146:   if (!rank) {
147:     id = shmget(key,len, 0666 |IPC_CREAT);
148:     if (id == -1) {
149:       perror("Unable to malloc shared memory");
150:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
151:     }
152:   } else {
153:     id = shmget(key,len, 0666);
154:     if (id == -1) {
155:       perror("Unable to malloc shared memory");
156:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
157:     }
158:   }
159:   value = shmat(id,(void*)0,0);
160:   if (value == (char*)-1) {
161:     perror("Unable to access shared memory allocated");
162:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
163:   }
164:   *result = (void*) (value + shift);

166:   return(0);
167: }

169: #else

171: EXTERN_C_BEGIN
172: extern PetscErrorCode  VecCreate_Seq(Vec);
173: EXTERN_C_END

175: EXTERN_C_BEGIN
178: PetscErrorCode  VecCreate_Shared(Vec vv)
179: {
181:   PetscMPIInt    size;

184:   MPI_Comm_size(((PetscObject)vv)->comm,&size);
185:   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
186:   VecCreate_Seq(vv);
187:   return(0);
188: }
189: EXTERN_C_END

191: #endif

195: /*@
196:    VecCreateShared - Creates a parallel vector that uses shared memory.

198:    Input Parameters:
199: .  comm - the MPI communicator to use
200: .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
201: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

203:    Output Parameter:
204: .  vv - the vector

206:    Collective on MPI_Comm
207:  
208:    Notes:
209:    Currently VecCreateShared() is available only on the SGI; otherwise,
210:    this routine is the same as VecCreateMPI().

212:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
213:    same type as an existing vector.

215:    Level: advanced

217:    Concepts: vectors^creating with shared memory

219: .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(), 
220:           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()

222: @*/
223: PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
224: {

228:   VecCreate(comm,v);
229:   VecSetSizes(*v,n,N);
230:   VecSetType(*v,VECSHARED);
231:   return(0);
232: }