Actual source code: ex55.c
1: static char help[] = "Load and save the mesh and fields to HDF5 and ExodusII\n\n";
3: #include <petsc/private/dmpleximpl.h>
4: #include <petscviewerhdf5.h>
5: #include <petscsf.h>
7: typedef struct {
8: PetscBool compare; /* Compare the meshes using DMPlexEqual() */
9: PetscBool compare_labels; /* Compare labels in the meshes using DMCompareLabels() */
10: PetscBool distribute; /* Distribute the mesh */
11: PetscBool interpolate; /* Generate intermediate mesh elements */
12: char fname[PETSC_MAX_PATH_LEN]; /* Mesh filename */
13: char ofname[PETSC_MAX_PATH_LEN]; /* Output mesh filename */
14: char meshname[PETSC_MAX_PATH_LEN]; /* Mesh name */
15: PetscViewerFormat format; /* Format to write and read */
16: PetscBool second_write_read; /* Write and read for the 2nd time */
17: PetscBool use_low_level_functions; /* Use low level functions for viewing and loading */
18: } AppCtx;
20: static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
21: {
22: PetscFunctionBeginUser;
23: options->compare = PETSC_FALSE;
24: options->compare_labels = PETSC_FALSE;
25: options->distribute = PETSC_TRUE;
26: options->interpolate = PETSC_FALSE;
27: options->fname[0] = '\0';
28: options->meshname[0] = '\0';
29: options->format = PETSC_VIEWER_DEFAULT;
30: options->second_write_read = PETSC_FALSE;
31: options->use_low_level_functions = PETSC_FALSE;
32: PetscCall(PetscStrncpy(options->ofname, "ex55.h5", sizeof(options->ofname)));
34: PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
35: PetscCall(PetscOptionsBool("-compare", "Compare the meshes using DMPlexEqual()", "ex55.c", options->compare, &options->compare, NULL));
36: PetscCall(PetscOptionsBool("-compare_labels", "Compare labels in the meshes using DMCompareLabels()", "ex55.c", options->compare_labels, &options->compare_labels, NULL));
37: PetscCall(PetscOptionsBool("-distribute", "Distribute the mesh", "ex55.c", options->distribute, &options->distribute, NULL));
38: PetscCall(PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex55.c", options->interpolate, &options->interpolate, NULL));
39: PetscCall(PetscOptionsString("-filename", "The mesh file", "ex55.c", options->fname, options->fname, sizeof(options->fname), NULL));
40: PetscCall(PetscOptionsString("-ofilename", "The output mesh file", "ex55.c", options->ofname, options->ofname, sizeof(options->ofname), NULL));
41: PetscCall(PetscOptionsString("-meshname", "The mesh file", "ex55.c", options->meshname, options->meshname, sizeof(options->meshname), NULL));
42: PetscCall(PetscOptionsEnum("-format", "Format to write and read", "ex55.c", PetscViewerFormats, (PetscEnum)options->format, (PetscEnum *)&options->format, NULL));
43: PetscCall(PetscOptionsBool("-second_write_read", "Write and read for the 2nd time", "ex55.c", options->second_write_read, &options->second_write_read, NULL));
44: PetscCall(PetscOptionsBool("-use_low_level_functions", "Use low level functions for viewing and loading", "ex55.c", options->use_low_level_functions, &options->use_low_level_functions, NULL));
45: PetscOptionsEnd();
46: PetscFunctionReturn(PETSC_SUCCESS);
47: }
49: static PetscErrorCode CheckDistributed(DM dm, PetscBool expectedDistributed)
50: {
51: PetscMPIInt size;
52: PetscBool distributed;
53: const char YES[] = "DISTRIBUTED";
54: const char NO[] = "NOT DISTRIBUTED";
56: PetscFunctionBeginUser;
57: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)dm), &size));
58: if (size < 2) PetscFunctionReturn(PETSC_SUCCESS);
59: PetscCall(DMPlexIsDistributed(dm, &distributed));
60: PetscCheck(distributed == expectedDistributed, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedDistributed ? YES : NO, distributed ? YES : NO);
61: PetscFunctionReturn(PETSC_SUCCESS);
62: }
64: static PetscErrorCode CheckInterpolated(DM dm, PetscBool expectedInterpolated)
65: {
66: DMPlexInterpolatedFlag iflg;
67: PetscBool interpolated;
68: const char YES[] = "INTERPOLATED";
69: const char NO[] = "NOT INTERPOLATED";
71: PetscFunctionBeginUser;
72: PetscCall(DMPlexIsInterpolatedCollective(dm, &iflg));
73: interpolated = (PetscBool)(iflg == DMPLEX_INTERPOLATED_FULL);
74: PetscCheck(interpolated == expectedInterpolated, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Expected DM being %s but actually is %s", expectedInterpolated ? YES : NO, interpolated ? YES : NO);
75: PetscFunctionReturn(PETSC_SUCCESS);
76: }
78: static PetscErrorCode CheckDistributedInterpolated(DM dm, PetscViewer v, AppCtx *user)
79: {
80: PetscViewerFormat format;
81: PetscBool distributed, interpolated;
83: PetscFunctionBeginUser;
84: PetscCall(PetscViewerGetFormat(v, &format));
85: switch (format) {
86: case PETSC_VIEWER_HDF5_XDMF:
87: case PETSC_VIEWER_HDF5_VIZ: {
88: distributed = PETSC_TRUE;
89: interpolated = PETSC_FALSE;
90: }; break;
91: case PETSC_VIEWER_HDF5_PETSC:
92: case PETSC_VIEWER_DEFAULT:
93: case PETSC_VIEWER_NATIVE: {
94: DMPlexStorageVersion version;
96: PetscCall(PetscViewerHDF5GetDMPlexStorageVersionReading(v, &version));
97: distributed = (PetscBool)(version->major >= 3);
98: interpolated = user->interpolate;
99: }; break;
100: default: {
101: distributed = PETSC_FALSE;
102: interpolated = user->interpolate;
103: }
104: }
105: PetscCall(CheckDistributed(dm, distributed));
106: PetscCall(CheckInterpolated(dm, interpolated));
107: PetscFunctionReturn(PETSC_SUCCESS);
108: }
110: static PetscErrorCode DMPlexWriteAndReadHDF5(DM dm, const char filename[], const char prefix[], AppCtx *user, DM *dm_new)
111: {
112: DM dmnew;
113: const char savedName[] = "Mesh";
114: const char loadedName[] = "Mesh_new";
115: PetscViewer v;
117: PetscFunctionBeginUser;
118: PetscCall(PetscViewerHDF5Open(PetscObjectComm((PetscObject)dm), filename, FILE_MODE_WRITE, &v));
119: PetscCall(PetscViewerPushFormat(v, user->format));
120: PetscCall(PetscObjectSetName((PetscObject)dm, savedName));
121: if (user->use_low_level_functions) {
122: PetscCall(DMPlexTopologyView(dm, v));
123: PetscCall(DMPlexCoordinatesView(dm, v));
124: PetscCall(DMPlexLabelsView(dm, v));
125: } else {
126: PetscCall(DMView(dm, v));
127: }
128: PetscCall(PetscViewerFileSetMode(v, FILE_MODE_READ));
129: PetscCall(DMCreate(PETSC_COMM_WORLD, &dmnew));
130: PetscCall(DMSetType(dmnew, DMPLEX));
131: PetscCall(DMPlexDistributeSetDefault(dmnew, PETSC_FALSE));
132: PetscCall(PetscObjectSetName((PetscObject)dmnew, savedName));
133: PetscCall(DMSetOptionsPrefix(dmnew, prefix));
134: if (user->use_low_level_functions) {
135: PetscSF sfXC;
137: PetscCall(DMPlexTopologyLoad(dmnew, v, &sfXC));
138: PetscCall(DMPlexCoordinatesLoad(dmnew, v, sfXC));
139: PetscCall(DMPlexLabelsLoad(dmnew, v, sfXC));
140: PetscCall(PetscSFDestroy(&sfXC));
141: } else {
142: PetscCall(DMLoad(dmnew, v));
143: }
144: PetscCall(CheckDistributedInterpolated(dmnew, v, user));
145: PetscCall(PetscObjectSetName((PetscObject)dmnew, loadedName));
146: PetscCall(PetscViewerPopFormat(v));
147: PetscCall(PetscViewerDestroy(&v));
148: *dm_new = dmnew;
149: PetscFunctionReturn(PETSC_SUCCESS);
150: }
152: int main(int argc, char **argv)
153: {
154: DM dm, dmnew;
155: PetscPartitioner part;
156: AppCtx user;
157: PetscBool flg;
159: PetscFunctionBeginUser;
160: PetscCall(PetscInitialize(&argc, &argv, NULL, help));
161: PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
162: PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, user.fname, user.meshname, user.interpolate, &dm));
163: PetscCall(DMSetOptionsPrefix(dm, "orig_"));
164: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
165: PetscCall(CheckInterpolated(dm, user.interpolate));
167: if (user.distribute) {
168: DM dmdist;
170: PetscCall(DMPlexGetPartitioner(dm, &part));
171: PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
172: PetscCall(PetscPartitionerSetFromOptions(part));
173: PetscCall(DMPlexDistribute(dm, 0, NULL, &dmdist));
174: if (dmdist) {
175: PetscCall(DMDestroy(&dm));
176: dm = dmdist;
177: PetscCall(CheckDistributed(dm, PETSC_TRUE));
178: PetscCall(CheckInterpolated(dm, user.interpolate));
179: }
180: }
182: PetscCall(DMSetOptionsPrefix(dm, NULL));
183: PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
184: PetscCall(DMSetFromOptions(dm));
185: PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
187: PetscCall(DMPlexWriteAndReadHDF5(dm, user.ofname, "new_", &user, &dmnew));
189: if (user.second_write_read) {
190: PetscCall(DMDestroy(&dm));
191: dm = dmnew;
192: PetscCall(DMPlexWriteAndReadHDF5(dm, user.ofname, "new_", &user, &dmnew));
193: }
195: PetscCall(DMViewFromOptions(dmnew, NULL, "-dm_view"));
197: /* This currently makes sense only for sequential meshes. */
198: if (user.compare) {
199: PetscCall(DMPlexEqual(dmnew, dm, &flg));
200: PetscCheck(flg, PETSC_COMM_WORLD, PETSC_ERR_ARG_INCOMP, "DMs are not equal");
201: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMs equal\n"));
202: }
203: if (user.compare_labels) {
204: PetscCall(DMCompareLabels(dmnew, dm, NULL, NULL));
205: PetscCall(PetscPrintf(PETSC_COMM_WORLD, "DMLabels equal\n"));
206: }
208: PetscCall(DMDestroy(&dm));
209: PetscCall(DMDestroy(&dmnew));
210: PetscCall(PetscFinalize());
211: return 0;
212: }
214: /*TEST
215: build:
216: requires: hdf5
217: # Idempotence of saving/loading
218: # Have to replace Exodus file, which is creating uninterpolated edges
219: test:
220: suffix: 0
221: TODO: broken
222: requires: exodusii
223: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
224: args: -format hdf5_petsc -compare
225: test:
226: suffix: 1
227: TODO: broken
228: requires: exodusii parmetis !defined(PETSC_USE_64BIT_INDICES)
229: nsize: 2
230: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/Rect-tri3.exo -dm_view ascii::ascii_info_detail
231: args: -petscpartitioner_type parmetis
232: args: -format hdf5_petsc -new_dm_view ascii::ascii_info_detail
234: testset:
235: requires: exodusii
236: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
237: test:
238: suffix: 2
239: nsize: {{1 2 4 8}separate output}
240: args: -format {{default hdf5_petsc}separate output}
241: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
242: args: -interpolate {{0 1}separate output}
243: test:
244: suffix: 2a
245: nsize: {{1 2 4 8}separate output}
246: args: -format {{hdf5_xdmf hdf5_viz}separate output}
248: test:
249: suffix: 3
250: requires: exodusii
251: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo -compare -compare_labels
252: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
254: # Load HDF5 file in XDMF format in parallel, write, read dm1, write, read dm2, and compare dm1 and dm2
255: testset:
256: suffix: 4
257: requires: !complex
258: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.h5 -dm_plex_create_from_hdf5_xdmf
259: args: -distribute 0 -second_write_read -compare
260: test:
261: suffix: hdf5_petsc
262: nsize: {{1 2}}
263: args: -format hdf5_petsc -compare_labels
264: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
265: test:
266: suffix: hdf5_xdmf
267: nsize: {{1 3 8}}
268: args: -format hdf5_xdmf
270: # Use low level functions, DMPlexTopologyView()/Load(), DMPlexCoordinatesView()/Load(), and DMPlexLabelsView()/Load()
271: # TODO: The output is very long so keeping just 1.0.0 version. This test should be redesigned or removed.
272: test:
273: suffix: 5
274: requires: exodusii
275: nsize: 2
276: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/blockcylinder-50.exo
277: args: -dm_view ascii::ascii_info_detail
278: args: -new_dm_view ascii::ascii_info_detail
279: args: -format hdf5_petsc -use_low_level_functions {{0 1}}
280: args: -dm_plex_view_hdf5_storage_version 1.0.0
282: testset:
283: suffix: 6
284: requires: hdf5 !complex datafilespath
285: nsize: {{1 3}}
286: args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
287: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
288: args: -format hdf5_petsc -second_write_read -compare -compare_labels
289: args: -interpolate {{0 1}} -distribute {{0 1}}
290: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
292: testset:
293: # the same data and settings as dm_impls_plex_tests-ex18_9%
294: suffix: 9
295: requires: hdf5 !complex datafilespath
296: nsize: {{1 2 4}}
297: args: -dm_plex_check_symmetry -dm_plex_check_skeleton -dm_plex_check_geometry
298: args: -filename ${DATAFILESPATH}/meshes/cube-hexahedra-refined.h5 -dm_plex_create_from_hdf5_xdmf -dm_plex_hdf5_topology_path /cells -dm_plex_hdf5_geometry_path /coordinates
299: args: -format {{hdf5_petsc hdf5_xdmf}} -second_write_read -compare
300: args: -dm_plex_view_hdf5_storage_version 3.0.0
301: test:
302: suffix: hdf5_seqload
303: args: -distribute
304: args: -interpolate {{0 1}}
305: args: -dm_plex_hdf5_force_sequential
306: test:
307: suffix: hdf5_seqload_metis
308: requires: parmetis
309: args: -distribute -petscpartitioner_type parmetis
310: args: -interpolate 1
311: args: -dm_plex_hdf5_force_sequential
312: test:
313: suffix: hdf5
314: args: -interpolate 1
315: test:
316: suffix: hdf5_repart
317: requires: parmetis
318: args: -distribute -petscpartitioner_type parmetis
319: args: -interpolate 1
320: test:
321: TODO: Parallel partitioning of uninterpolated meshes not supported
322: suffix: hdf5_repart_ppu
323: requires: parmetis
324: args: -distribute -petscpartitioner_type parmetis
325: args: -interpolate 0
327: # reproduce PetscSFView() crash - fixed, left as regression test
328: test:
329: suffix: new_dm_view
330: requires: exodusii
331: nsize: 2
332: args: -filename ${wPETSC_DIR}/share/petsc/datafiles/meshes/TwoQuads.exo -new_dm_view ascii:ex5_new_dm_view.log:ascii_info_detail
333: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}}
335: # test backward compatibility with petsc_hdf5 format version 1.0.0, serial idempotence
336: testset:
337: suffix: 10-v3.16.0-v1.0.0
338: requires: hdf5 !complex datafilespath
339: args: -dm_plex_check_all -compare -compare_labels
340: args: -dm_plex_view_hdf5_storage_version {{1.0.0 2.0.0 3.0.0}} -use_low_level_functions {{0 1}}
341: test:
342: suffix: a
343: args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/annulus-20.h5
344: test:
345: suffix: b
346: TODO: broken
347: args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/barycentricallyrefinedcube.h5
348: test:
349: suffix: c
350: args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/blockcylinder-50.h5
351: test:
352: suffix: d
353: args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/cube-hexahedra-refined.h5
354: test:
355: suffix: e
356: args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/hybrid_hexwedge.h5
357: test:
358: suffix: f
359: args: -filename ${DATAFILESPATH}/meshes/hdf5-petsc/petsc-v3.16.0/v1.0.0/square.h5
361: TEST*/