Actual source code: mpiu.c
2: #include petsc.h
6: PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
7: {
9: PetscMPIInt rank,size,tag = 0;
10: MPI_Status status;
13: MPI_Comm_size(comm,&size);
14: if (size == 1) return(0);
15: MPI_Comm_rank(comm,&rank);
16: if (rank) {
17: MPI_Recv(0,0,MPI_INT,rank-1,tag,comm,&status);
18: }
19: /* Send to the next process in the group unless we are the last process */
20: if ((rank % ng) < ng - 1 && rank != size - 1) {
21: MPI_Send(0,0,MPI_INT,rank + 1,tag,comm);
22: }
23: return(0);
24: }
28: PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
29: {
31: PetscMPIInt rank,size,tag = 0;
32: MPI_Status status;
35: MPI_Comm_rank(comm,&rank);
36: MPI_Comm_size(comm,&size);
37: if (size == 1) return(0);
39: /* Send to the first process in the next group */
40: if ((rank % ng) == ng - 1 || rank == size - 1) {
41: MPI_Send(0,0,MPI_INT,(rank + 1) % size,tag,comm);
42: }
43: if (!rank) {
44: MPI_Recv(0,0,MPI_INT,size-1,tag,comm,&status);
45: }
46: return(0);
47: }
49: /* ---------------------------------------------------------------------*/
50: /*
51: The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
52: is attached to a communicator that manages the sequential phase code below.
53: */
54: static int Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
58: /*@C
59: PetscSequentialPhaseBegin - Begins a sequential section of code.
61: Collective on MPI_Comm
63: Input Parameters:
64: + comm - Communicator to sequentialize.
65: - ng - Number in processor group. This many processes are allowed to execute
66: at the same time (usually 1)
68: Level: intermediate
70: Notes:
71: PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
72: way to force a section of code to be executed by the processes in
73: rank order. Typically, this is done with
74: .vb
75: PetscSequentialPhaseBegin(comm, 1);
76: <code to be executed sequentially>
77: PetscSequentialPhaseEnd(comm, 1);
78: .ve
80: Often, the sequential code contains output statements (e.g., printf) to
81: be executed. Note that you may need to flush the I/O buffers before
82: calling PetscSequentialPhaseEnd(). Also, note that some systems do
83: not propagate I/O in any order to the controling terminal (in other words,
84: even if you flush the output, you may not get the data in the order
85: that you want).
87: .seealso: PetscSequentialPhaseEnd()
89: Concepts: sequential stage
91: @*/
92: PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
93: {
95: PetscMPIInt size;
96: MPI_Comm local_comm,*addr_local_comm;
99: MPI_Comm_size(comm,&size);
100: if (size == 1) return(0);
102: /* Get the private communicator for the sequential operations */
103: if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
104: MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Seq_keyval,0);
105: }
107: MPI_Comm_dup(comm,&local_comm);
108: PetscMalloc(sizeof(MPI_Comm),&addr_local_comm);
109: *addr_local_comm = local_comm;
110: MPI_Attr_put(comm,Petsc_Seq_keyval,(void*)addr_local_comm);
111: PetscSequentialPhaseBegin_Private(local_comm,ng);
112: return(0);
113: }
117: /*@C
118: PetscSequentialPhaseEnd - Ends a sequential section of code.
120: Collective on MPI_Comm
122: Input Parameters:
123: + comm - Communicator to sequentialize.
124: - ng - Number in processor group. This many processes are allowed to execute
125: at the same time (usually 1)
127: Level: intermediate
129: Notes:
130: See PetscSequentialPhaseBegin() for more details.
132: .seealso: PetscSequentialPhaseBegin()
134: Concepts: sequential stage
136: @*/
137: PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
138: {
140: PetscMPIInt size,flag;
141: MPI_Comm local_comm,*addr_local_comm;
144: MPI_Comm_size(comm,&size);
145: if (size == 1) return(0);
147: MPI_Attr_get(comm,Petsc_Seq_keyval,(void **)&addr_local_comm,&flag);
148: if (!flag) {
149: SETERRQ(PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
150: }
151: local_comm = *addr_local_comm;
153: PetscSequentialPhaseEnd_Private(local_comm,ng);
155: PetscFree(addr_local_comm);
156: MPI_Comm_free(&local_comm);
157: MPI_Attr_delete(comm,Petsc_Seq_keyval);
158: return(0);
159: }