Actual source code: shvec.c

petsc-3.4.5 2014-06-29
  2: /*
  3:    This file contains routines for Parallel vector operations that use shared memory
  4:  */
  5: #include <../src/vec/vec/impls/mpi/pvecimpl.h>   /*I  "petscvec.h"   I*/

  7: #if defined(PETSC_USE_SHARED_MEMORY)

  9: extern PetscErrorCode PetscSharedMalloc(MPI_Comm,PetscInt,PetscInt,void**);

 13: PetscErrorCode VecDuplicate_Shared(Vec win,Vec *v)
 14: {
 16:   Vec_MPI        *w = (Vec_MPI*)win->data;
 17:   PetscScalar    *array;

 20:   /* first processor allocates entire array and sends it's address to the others */
 21:   PetscSharedMalloc(PetscObjectComm((PetscObject)win),win->map->n*sizeof(PetscScalar),win->map->N*sizeof(PetscScalar),(void**)&array);

 23:   VecCreate(PetscObjectComm((PetscObject)win),v);
 24:   VecSetSizes(*v,win->map->n,win->map->N);
 25:   VecCreate_MPI_Private(*v,PETSC_FALSE,w->nghost,array);
 26:   PetscLayoutReference(win->map,&(*v)->map);

 28:   /* New vector should inherit stashing property of parent */
 29:   (*v)->stash.donotstash   = win->stash.donotstash;
 30:   (*v)->stash.ignorenegidx = win->stash.ignorenegidx;

 32:   PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)*v)->olist);
 33:   PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)*v)->qlist);

 35:   (*v)->ops->duplicate = VecDuplicate_Shared;
 36:   (*v)->bstash.bs      = win->bstash.bs;
 37:   return(0);
 38: }


 43: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
 44: {
 46:   PetscScalar    *array;

 49:   PetscSplitOwnership(PetscObjectComm((PetscObject)vv),&vv->map->n,&vv->map->N);
 50:   PetscSharedMalloc(PetscObjectComm((PetscObject)vv),vv->map->n*sizeof(PetscScalar),vv->map->N*sizeof(PetscScalar),(void**)&array);

 52:   VecCreate_MPI_Private(vv,PETSC_FALSE,0,array);
 53:   vv->ops->duplicate = VecDuplicate_Shared;
 54:   return(0);
 55: }

 57: /* ----------------------------------------------------------------------------------------
 58:      Code to manage shared memory allocation using standard Unix shared memory
 59: */
 60: #include <petscsys.h>
 61: #if defined(PETSC_HAVE_PWD_H)
 62: #include <pwd.h>
 63: #endif
 64: #include <ctype.h>
 65: #include <sys/stat.h>
 66: #if defined(PETSC_HAVE_UNISTD_H)
 67: #include <unistd.h>
 68: #endif
 69: #if defined(PETSC_HAVE_SYS_UTSNAME_H)
 70: #include <sys/utsname.h>
 71: #endif
 72: #include <fcntl.h>
 73: #include <time.h>
 74: #if defined(PETSC_HAVE_SYS_SYSTEMINFO_H)
 75: #include <sys/systeminfo.h>
 76: #endif
 77: #include <sys/shm.h>
 78: #include <sys/mman.h>


 81: static PetscMPIInt Petsc_Shared_keyval = MPI_KEYVAL_INVALID;

 85: /*
 86:    Private routine to delete internal storage when a communicator is freed.
 87:   This is called by MPI, not by users.

 89:   The binding for the first argument changed from MPI 1.0 to 1.1; in 1.0
 90:   it was MPI_Comm *comm.
 91: */
 92: static PetscErrorCode Petsc_DeleteShared(MPI_Comm comm,PetscInt keyval,void *attr_val,void *extra_state)
 93: {

 97:   PetscFree(attr_val);
 98:   PetscFunctionReturn(MPI_SUCCESS);
 99: }

103: /*

105:     This routine is still incomplete and needs work.

107:     For this to work on the Apple Mac OS X you will likely need to add something line the following to the file /etc/sysctl.conf
108: cat /etc/sysctl.conf
109: kern.sysv.shmmax=67108864
110: kern.sysv.shmmin=1
111: kern.sysv.shmmni=32
112: kern.sysv.shmseg=512
113: kern.sysv.shmall=1024

115:   This does not currently free the shared memory after the program runs. Use the Unix command ipcs to see the shared memory in use and
116: ipcrm to remove the shared memory in use.

118: */
119: PetscErrorCode PetscSharedMalloc(MPI_Comm comm,PetscInt llen,PetscInt len,void **result)
120: {
122:   PetscInt       shift;
123:   PetscMPIInt    rank,flag;
124:   int            *arena,id,key = 0;
125:   char           *value;

128:   *result = 0;

130:   MPI_Scan(&llen,&shift,1,MPI_INT,MPI_SUM,comm);
131:   shift -= llen;

133:   MPI_Comm_rank(comm,&rank);
134:   if (!rank) {
135:     id = shmget(key,len, 0666 |IPC_CREAT);
136:     if (id == -1) {
137:       perror("Unable to malloc shared memory");
138:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
139:     }
140:   } else {
141:     id = shmget(key,len, 0666);
142:     if (id == -1) {
143:       perror("Unable to malloc shared memory");
144:       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to malloc shared memory");
145:     }
146:   }
147:   value = shmat(id,(void*)0,0);
148:   if (value == (char*)-1) {
149:     perror("Unable to access shared memory allocated");
150:     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Unable to access shared memory allocated");
151:   }
152:   *result = (void*) (value + shift);
153:   return(0);
154: }

156: #else

158: PETSC_EXTERN PetscErrorCode VecCreate_Seq(Vec);

162: PETSC_EXTERN PetscErrorCode VecCreate_Shared(Vec vv)
163: {
165:   PetscMPIInt    size;

168:   MPI_Comm_size(PetscObjectComm((PetscObject)vv),&size);
169:   if (size > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"No supported for shared memory vector objects on this machine");
170:   VecCreate_Seq(vv);
171:   return(0);
172: }

174: #endif

178: /*@
179:    VecCreateShared - Creates a parallel vector that uses shared memory.

181:    Input Parameters:
182: .  comm - the MPI communicator to use
183: .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
184: .  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

186:    Output Parameter:
187: .  vv - the vector

189:    Collective on MPI_Comm

191:    Notes:
192:    Currently VecCreateShared() is available only on the SGI; otherwise,
193:    this routine is the same as VecCreateMPI().

195:    Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
196:    same type as an existing vector.

198:    Level: advanced

200:    Concepts: vectors^creating with shared memory

202: .seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(),
203:           VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()

205: @*/
206: PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
207: {

211:   VecCreate(comm,v);
212:   VecSetSizes(*v,n,N);
213:   VecSetType(*v,VECSHARED);
214:   return(0);
215: }