Actual source code: ex24.c
1: static char help[] = "Test that MatPartitioning and PetscPartitioner interfaces are equivalent when using PETSCPARTITIONERMATPARTITIONING\n\n";
2: static char FILENAME[] = "ex24.c";
4: #include <petscdmplex.h>
5: #include <petscviewerhdf5.h>
7: #if defined(PETSC_HAVE_PTSCOTCH)
8: EXTERN_C_BEGIN
9: #include <ptscotch.h>
10: EXTERN_C_END
11: #endif
13: typedef struct {
14: PetscBool compare_is; /* Compare ISs and PetscSections */
15: PetscBool compare_dm; /* Compare DM */
16: PetscBool tpw; /* Use target partition weights */
17: char partitioning[64];
18: char repartitioning[64];
19: } AppCtx;
21: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
22: {
23: PetscBool repartition = PETSC_TRUE;
25: PetscFunctionBegin;
26: options->compare_is = PETSC_FALSE;
27: options->compare_dm = PETSC_FALSE;
29: PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
30: PetscCall(PetscOptionsBool("-compare_is", "Compare ISs and PetscSections?", FILENAME, options->compare_is, &options->compare_is, NULL));
31: PetscCall(PetscOptionsBool("-compare_dm", "Compare DMs?", FILENAME, options->compare_dm, &options->compare_dm, NULL));
32: PetscCall(PetscStrncpy(options->partitioning, MATPARTITIONINGPARMETIS, sizeof(options->partitioning)));
33: PetscCall(PetscOptionsString("-partitioning", "The mat partitioning type to test", "None", options->partitioning, options->partitioning, sizeof(options->partitioning), NULL));
34: PetscCall(PetscOptionsBool("-repartition", "Partition again after the first partition?", FILENAME, repartition, &repartition, NULL));
35: if (repartition) {
36: PetscCall(PetscStrncpy(options->repartitioning, MATPARTITIONINGPARMETIS, 64));
37: PetscCall(PetscOptionsString("-repartitioning", "The mat partitioning type to test (second partitioning)", "None", options->repartitioning, options->repartitioning, sizeof(options->repartitioning), NULL));
38: } else {
39: options->repartitioning[0] = '\0';
40: }
41: PetscCall(PetscOptionsBool("-tpweight", "Use target partition weights", FILENAME, options->tpw, &options->tpw, NULL));
42: PetscOptionsEnd();
43: PetscFunctionReturn(PETSC_SUCCESS);
44: }
46: static PetscErrorCode ScotchResetRandomSeed()
47: {
48: PetscFunctionBegin;
49: #if defined(PETSC_HAVE_PTSCOTCH)
50: SCOTCH_randomReset();
51: #endif
52: PetscFunctionReturn(PETSC_SUCCESS);
53: }
55: static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
56: {
57: PetscFunctionBegin;
58: PetscCall(DMCreate(comm, dm));
59: PetscCall(DMSetType(*dm, DMPLEX));
60: PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE));
61: PetscCall(DMSetFromOptions(*dm));
62: PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
63: PetscFunctionReturn(PETSC_SUCCESS);
64: }
66: int main(int argc, char **argv)
67: {
68: MPI_Comm comm;
69: DM dm1, dm2, dmdist1, dmdist2;
70: DMPlexInterpolatedFlag interp;
71: MatPartitioning mp;
72: PetscPartitioner part1, part2;
73: AppCtx user;
74: IS is1 = NULL, is2 = NULL;
75: IS is1g, is2g;
76: PetscSection s1 = NULL, s2 = NULL, tpws = NULL;
77: PetscInt i;
78: PetscBool flg;
79: PetscMPIInt size;
81: PetscFunctionBeginUser;
82: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
83: comm = PETSC_COMM_WORLD;
84: PetscCallMPI(MPI_Comm_size(comm, &size));
85: PetscCall(ProcessOptions(comm, &user));
86: PetscCall(CreateMesh(comm, &user, &dm1));
87: PetscCall(CreateMesh(comm, &user, &dm2));
89: if (user.tpw) {
90: PetscCall(PetscSectionCreate(comm, &tpws));
91: PetscCall(PetscSectionSetChart(tpws, 0, size));
92: for (i = 0; i < size; i++) {
93: PetscInt tdof = i % 2 ? 2 * i - 1 : i + 2;
94: PetscCall(PetscSectionSetDof(tpws, i, tdof));
95: }
96: if (size > 1) { /* test zero tpw entry */
97: PetscCall(PetscSectionSetDof(tpws, 0, 0));
98: }
99: PetscCall(PetscSectionSetUp(tpws));
100: }
102: /* partition dm1 using PETSCPARTITIONERPARMETIS */
103: PetscCall(ScotchResetRandomSeed());
104: PetscCall(DMPlexGetPartitioner(dm1, &part1));
105: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1, "p1_"));
106: PetscCall(PetscPartitionerSetType(part1, user.partitioning));
107: PetscCall(PetscPartitionerSetFromOptions(part1));
108: PetscCall(PetscSectionCreate(comm, &s1));
109: PetscCall(PetscPartitionerDMPlexPartition(part1, dm1, tpws, s1, &is1));
111: /* partition dm2 using PETSCPARTITIONERMATPARTITIONING with MATPARTITIONINGPARMETIS */
112: PetscCall(ScotchResetRandomSeed());
113: PetscCall(DMPlexGetPartitioner(dm2, &part2));
114: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2, "p2_"));
115: PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
116: PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
117: PetscCall(MatPartitioningSetType(mp, user.partitioning));
118: PetscCall(PetscPartitionerSetFromOptions(part2));
119: PetscCall(PetscSectionCreate(comm, &s2));
120: PetscCall(PetscPartitionerDMPlexPartition(part2, dm2, tpws, s2, &is2));
122: PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
123: PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
124: PetscCall(ISViewFromOptions(is1g, NULL, "-seq_is1_view"));
125: PetscCall(ISViewFromOptions(is2g, NULL, "-seq_is2_view"));
126: /* compare the two ISs */
127: if (user.compare_is) {
128: PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
129: if (!flg) PetscCall(PetscPrintf(comm, "ISs are not equal with type %s with size %d.\n", user.partitioning, size));
130: }
131: PetscCall(ISDestroy(&is1g));
132: PetscCall(ISDestroy(&is2g));
134: /* compare the two PetscSections */
135: PetscCall(PetscSectionViewFromOptions(s1, NULL, "-seq_s1_view"));
136: PetscCall(PetscSectionViewFromOptions(s2, NULL, "-seq_s2_view"));
137: if (user.compare_is) {
138: PetscCall(PetscSectionCompare(s1, s2, &flg));
139: if (!flg) PetscCall(PetscPrintf(comm, "PetscSections are not equal with %s with size %d.\n", user.partitioning, size));
140: }
142: /* distribute both DMs */
143: PetscCall(ScotchResetRandomSeed());
144: PetscCall(DMPlexDistribute(dm1, 0, NULL, &dmdist1));
145: PetscCall(ScotchResetRandomSeed());
146: PetscCall(DMPlexDistribute(dm2, 0, NULL, &dmdist2));
148: /* cleanup */
149: PetscCall(PetscSectionDestroy(&tpws));
150: PetscCall(PetscSectionDestroy(&s1));
151: PetscCall(PetscSectionDestroy(&s2));
152: PetscCall(ISDestroy(&is1));
153: PetscCall(ISDestroy(&is2));
154: PetscCall(DMDestroy(&dm1));
155: PetscCall(DMDestroy(&dm2));
157: /* if distributed DMs are NULL (sequential case), then quit */
158: if (!dmdist1 && !dmdist2) return 0;
160: PetscCall(DMViewFromOptions(dmdist1, NULL, "-dm_dist1_view"));
161: PetscCall(DMViewFromOptions(dmdist2, NULL, "-dm_dist2_view"));
163: /* compare the two distributed DMs */
164: if (user.compare_dm) {
165: PetscCall(DMPlexEqual(dmdist1, dmdist2, &flg));
166: if (!flg) PetscCall(PetscPrintf(comm, "Distributed DMs are not equal %s with size %d.\n", user.partitioning, size));
167: }
169: /* if repartitioning is disabled, then quit */
170: if (user.repartitioning[0] == '\0') return 0;
172: if (user.tpw) {
173: PetscCall(PetscSectionCreate(comm, &tpws));
174: PetscCall(PetscSectionSetChart(tpws, 0, size));
175: for (i = 0; i < size; i++) {
176: PetscInt tdof = i % 2 ? i + 1 : size - i;
177: PetscCall(PetscSectionSetDof(tpws, i, tdof));
178: }
179: PetscCall(PetscSectionSetUp(tpws));
180: }
182: /* repartition distributed DM dmdist1 */
183: PetscCall(ScotchResetRandomSeed());
184: PetscCall(DMPlexGetPartitioner(dmdist1, &part1));
185: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part1, "dp1_"));
186: PetscCall(PetscPartitionerSetType(part1, user.repartitioning));
187: PetscCall(PetscPartitionerSetFromOptions(part1));
188: PetscCall(PetscSectionCreate(comm, &s1));
189: PetscCall(PetscPartitionerDMPlexPartition(part1, dmdist1, tpws, s1, &is1));
191: /* repartition distributed DM dmdist2 */
192: PetscCall(ScotchResetRandomSeed());
193: PetscCall(DMPlexGetPartitioner(dmdist2, &part2));
194: PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part2, "dp2_"));
195: PetscCall(PetscPartitionerSetType(part2, PETSCPARTITIONERMATPARTITIONING));
196: PetscCall(PetscPartitionerMatPartitioningGetMatPartitioning(part2, &mp));
197: PetscCall(MatPartitioningSetType(mp, user.repartitioning));
198: PetscCall(PetscPartitionerSetFromOptions(part2));
199: PetscCall(PetscSectionCreate(comm, &s2));
200: PetscCall(PetscPartitionerDMPlexPartition(part2, dmdist2, tpws, s2, &is2));
202: /* compare the two ISs */
203: PetscCall(ISOnComm(is1, comm, PETSC_USE_POINTER, &is1g));
204: PetscCall(ISOnComm(is2, comm, PETSC_USE_POINTER, &is2g));
205: PetscCall(ISViewFromOptions(is1g, NULL, "-dist_is1_view"));
206: PetscCall(ISViewFromOptions(is2g, NULL, "-dist_is2_view"));
207: if (user.compare_is) {
208: PetscCall(ISEqualUnsorted(is1g, is2g, &flg));
209: if (!flg) PetscCall(PetscPrintf(comm, "Distributed ISs are not equal, with %s with size %d.\n", user.repartitioning, size));
210: }
211: PetscCall(ISDestroy(&is1g));
212: PetscCall(ISDestroy(&is2g));
214: /* compare the two PetscSections */
215: PetscCall(PetscSectionViewFromOptions(s1, NULL, "-dist_s1_view"));
216: PetscCall(PetscSectionViewFromOptions(s2, NULL, "-dist_s2_view"));
217: if (user.compare_is) {
218: PetscCall(PetscSectionCompare(s1, s2, &flg));
219: if (!flg) PetscCall(PetscPrintf(comm, "Distributed PetscSections are not equal, with %s with size %d.\n", user.repartitioning, size));
220: }
222: /* redistribute both distributed DMs */
223: PetscCall(ScotchResetRandomSeed());
224: PetscCall(DMPlexDistribute(dmdist1, 0, NULL, &dm1));
225: PetscCall(ScotchResetRandomSeed());
226: PetscCall(DMPlexDistribute(dmdist2, 0, NULL, &dm2));
228: /* compare the two distributed DMs */
229: PetscCall(DMPlexIsInterpolated(dm1, &interp));
230: if (interp == DMPLEX_INTERPOLATED_NONE) {
231: PetscCall(DMPlexEqual(dm1, dm2, &flg));
232: if (!flg) PetscCall(PetscPrintf(comm, "Redistributed DMs are not equal, with %s with size %d.\n", user.repartitioning, size));
233: }
235: /* cleanup */
236: PetscCall(PetscSectionDestroy(&tpws));
237: PetscCall(PetscSectionDestroy(&s1));
238: PetscCall(PetscSectionDestroy(&s2));
239: PetscCall(ISDestroy(&is1));
240: PetscCall(ISDestroy(&is2));
241: PetscCall(DMDestroy(&dm1));
242: PetscCall(DMDestroy(&dm2));
243: PetscCall(DMDestroy(&dmdist1));
244: PetscCall(DMDestroy(&dmdist2));
245: PetscCall(PetscFinalize());
246: return 0;
247: }
249: /*TEST
251: test:
252: # partition sequential mesh loaded from Exodus file
253: suffix: 0
254: nsize: {{1 2 3 4 8}}
255: requires: chaco parmetis ptscotch exodusii
256: args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
257: args: -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
258: test:
259: # repartition mesh already partitioned naively by MED loader
260: suffix: 1
261: nsize: {{1 2 3 4 8}}
262: TODO: MED
263: requires: parmetis ptscotch med
264: args: -dm_plex_filename ${PETSC_DIR}/share/petsc/datafiles/meshes/cylinder.med
265: args: -repartition 0 -partitioning {{parmetis ptscotch}}
266: test:
267: # partition mesh generated by ctetgen using scotch, then repartition with scotch, diff view
268: suffix: 3
269: nsize: 4
270: requires: ptscotch ctetgen
271: args: -dm_plex_dim 3 -dm_plex_box_faces 2,3,2 -partitioning ptscotch -repartitioning ptscotch
272: args: -p1_petscpartitioner_view -p2_petscpartitioner_view -dp1_petscpartitioner_view -dp2_petscpartitioner_view -tpweight {{0 1}}
273: test:
274: # partition mesh generated by ctetgen using partitioners supported both by MatPartitioning and PetscPartitioner
275: suffix: 4
276: nsize: {{1 2 3 4 8}}
277: requires: chaco parmetis ptscotch ctetgen
278: args: -dm_plex_dim 3 -dm_plex_box_faces {{2,3,4 5,4,3 7,11,5}} -partitioning {{chaco parmetis ptscotch}} -repartitioning {{parmetis ptscotch}} -tpweight {{0 1}}
280: TEST*/