Actual source code: xmlviewer.c
1: /*************************************************************************************
2: * M A R I T I M E R E S E A R C H I N S T I T U T E N E T H E R L A N D S *
3: *************************************************************************************
4: * authors: Koos Huijssen, Christiaan M. Klaij *
5: *************************************************************************************
6: * content: Viewer for writing XML output *
7: *************************************************************************************/
8: #include <petscviewer.h>
9: #include <petsc/private/logimpl.h>
10: #include <petsc/private/fortranimpl.h>
11: #include "xmlviewer.h"
12: #include "lognested.h"
14: static PetscErrorCode PetscViewerXMLStartSection(PetscViewer viewer, const char *name, const char *desc)
15: {
16: PetscInt XMLSectionDepthPetsc;
17: int XMLSectionDepth;
19: PetscFunctionBegin;
20: PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
21: XMLSectionDepth = (int)XMLSectionDepthPetsc;
22: if (!desc) {
23: PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s>\n", 2 * XMLSectionDepth, "", name));
24: } else {
25: PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s desc=\"%s\">\n", 2 * XMLSectionDepth, "", name, desc));
26: }
27: PetscCall(PetscViewerASCIIPushTab(viewer));
28: PetscFunctionReturn(PETSC_SUCCESS);
29: }
31: /* Initialize a viewer to XML, and initialize the XMLDepth static parameter */
32: static PetscErrorCode PetscViewerInitASCII_XML(PetscViewer viewer)
33: {
34: MPI_Comm comm;
35: char PerfScript[PETSC_MAX_PATH_LEN + 40];
37: PetscFunctionBegin;
38: PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
39: PetscCall(PetscViewerASCIIPrintf(viewer, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"));
40: PetscCall(PetscStrreplace(comm, "<?xml-stylesheet type=\"text/xsl\" href=\"performance_xml2html.xsl\"?>", PerfScript, sizeof(PerfScript)));
41: PetscCall(PetscViewerASCIIPrintf(viewer, "%s\n", PerfScript));
42: PetscCall(PetscViewerXMLStartSection(viewer, "root", NULL));
43: PetscFunctionReturn(PETSC_SUCCESS);
44: }
46: static PetscErrorCode PetscViewerXMLEndSection(PetscViewer viewer, const char *name)
47: {
48: PetscInt XMLSectionDepthPetsc;
49: int XMLSectionDepth;
51: PetscFunctionBegin;
52: PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
53: XMLSectionDepth = (int)XMLSectionDepthPetsc;
54: if (XMLSectionDepth > 0) PetscCall(PetscViewerASCIIPopTab(viewer));
55: PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
56: XMLSectionDepth = (int)XMLSectionDepthPetsc;
57: PetscCall(PetscViewerASCIIPrintf(viewer, "%*s</%s>\n", 2 * XMLSectionDepth, "", name));
58: PetscFunctionReturn(PETSC_SUCCESS);
59: }
61: /* Initialize a viewer to XML, and initialize the XMLDepth static parameter */
62: static PetscErrorCode PetscViewerFinalASCII_XML(PetscViewer viewer)
63: {
64: PetscFunctionBegin;
65: PetscCall(PetscViewerXMLEndSection(viewer, "root"));
66: PetscFunctionReturn(PETSC_SUCCESS);
67: }
69: static PetscErrorCode PetscViewerXMLPutString(PetscViewer viewer, const char *name, const char *desc, const char *value)
70: {
71: PetscInt XMLSectionDepthPetsc;
72: int XMLSectionDepth;
74: PetscFunctionBegin;
75: PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
76: XMLSectionDepth = (int)XMLSectionDepthPetsc;
77: if (!desc) {
78: PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s>%s</%s>\n", 2 * XMLSectionDepth, "", name, value, name));
79: } else {
80: PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s desc=\"%s\">%s</%s>\n", 2 * XMLSectionDepth, "", name, desc, value, name));
81: }
82: PetscFunctionReturn(PETSC_SUCCESS);
83: }
85: static PetscErrorCode PetscViewerXMLPutInt(PetscViewer viewer, const char *name, const char *desc, int value)
86: {
87: PetscInt XMLSectionDepthPetsc;
88: int XMLSectionDepth;
90: PetscFunctionBegin;
91: PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
92: XMLSectionDepth = (int)XMLSectionDepthPetsc;
93: if (!desc) {
94: PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s>%d</%s>\n", 2 * XMLSectionDepth, "", name, value, name));
95: } else {
96: PetscCall(PetscViewerASCIIPrintf(viewer, "%*s<%s desc=\"%s\">%d</%s>\n", 2 * XMLSectionDepth, "", name, desc, value, name));
97: }
98: PetscFunctionReturn(PETSC_SUCCESS);
99: }
101: static PetscErrorCode PetscViewerXMLPutDouble(PetscViewer viewer, const char *name, PetscLogDouble value, const char *format)
102: {
103: PetscInt XMLSectionDepthPetsc;
104: int XMLSectionDepth;
105: char buffer[1024];
107: PetscFunctionBegin;
108: PetscCall(PetscViewerASCIIGetTab(viewer, &XMLSectionDepthPetsc));
109: XMLSectionDepth = (int)XMLSectionDepthPetsc;
110: PetscCall(PetscSNPrintf(buffer, sizeof(buffer), "%*s<%s>%s</%s>\n", 2 * XMLSectionDepth, "", name, format, name));
111: PetscCall(PetscViewerASCIIPrintf(viewer, buffer, value));
112: PetscFunctionReturn(PETSC_SUCCESS);
113: }
115: static PetscErrorCode PetscPrintExeSpecs(PetscViewer viewer)
116: {
117: char arch[128], hostname[128], username[128], pname[PETSC_MAX_PATH_LEN], date[128];
118: char version[256], buildoptions[128] = "";
119: PetscMPIInt size;
120: size_t len;
122: PetscFunctionBegin;
123: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
124: PetscCall(PetscGetArchType(arch, sizeof(arch)));
125: PetscCall(PetscGetHostName(hostname, sizeof(hostname)));
126: PetscCall(PetscGetUserName(username, sizeof(username)));
127: PetscCall(PetscGetProgramName(pname, sizeof(pname)));
128: PetscCall(PetscGetDate(date, sizeof(date)));
129: PetscCall(PetscGetVersion(version, sizeof(version)));
131: PetscCall(PetscViewerXMLStartSection(viewer, "runspecification", "Run Specification"));
132: PetscCall(PetscViewerXMLPutString(viewer, "executable", "Executable", pname));
133: PetscCall(PetscViewerXMLPutString(viewer, "architecture", "Architecture", arch));
134: PetscCall(PetscViewerXMLPutString(viewer, "hostname", "Host", hostname));
135: PetscCall(PetscViewerXMLPutInt(viewer, "nprocesses", "Number of processes", size));
136: PetscCall(PetscViewerXMLPutString(viewer, "user", "Run by user", username));
137: PetscCall(PetscViewerXMLPutString(viewer, "date", "Started at", date));
138: PetscCall(PetscViewerXMLPutString(viewer, "petscrelease", "Petsc Release", version));
140: if (PetscDefined(USE_DEBUG)) PetscCall(PetscStrlcat(buildoptions, "Debug ", sizeof(buildoptions)));
141: if (PetscDefined(USE_COMPLEX)) PetscCall(PetscStrlcat(buildoptions, "Complex ", sizeof(buildoptions)));
142: if (PetscDefined(USE_REAL_SINGLE)) {
143: PetscCall(PetscStrlcat(buildoptions, "Single ", sizeof(buildoptions)));
144: } else if (PetscDefined(USE_REAL___FLOAT128)) {
145: PetscCall(PetscStrlcat(buildoptions, "Quadruple ", sizeof(buildoptions)));
146: } else if (PetscDefined(USE_REAL___FP16)) {
147: PetscCall(PetscStrlcat(buildoptions, "Half ", sizeof(buildoptions)));
148: }
149: if (PetscDefined(USE_64BIT_INDICES)) PetscCall(PetscStrlcat(buildoptions, "Int64 ", sizeof(buildoptions)));
150: #if defined(__cplusplus)
151: PetscCall(PetscStrlcat(buildoptions, "C++ ", sizeof(buildoptions)));
152: #endif
153: PetscCall(PetscStrlen(buildoptions, &len));
154: if (len) PetscCall(PetscViewerXMLPutString(viewer, "petscbuildoptions", "Petsc build options", buildoptions));
155: PetscCall(PetscViewerXMLEndSection(viewer, "runspecification"));
156: PetscFunctionReturn(PETSC_SUCCESS);
157: }
159: static PetscErrorCode PetscPrintXMLGlobalPerformanceElement(PetscViewer viewer, const char *name, const char *desc, PetscLogDouble local_val, const PetscBool print_average, const PetscBool print_total)
160: {
161: PetscLogDouble min, tot, ratio, avg;
162: MPI_Comm comm;
163: PetscMPIInt rank, size;
164: PetscLogDouble valrank[2], max[2];
166: PetscFunctionBegin;
167: PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
168: PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)viewer), &size));
169: PetscCallMPI(MPI_Comm_rank(comm, &rank));
171: valrank[0] = local_val;
172: valrank[1] = (PetscLogDouble)rank;
173: PetscCall(MPIU_Allreduce(&local_val, &min, 1, MPIU_PETSCLOGDOUBLE, MPI_MIN, comm));
174: PetscCall(MPIU_Allreduce(valrank, &max, 1, MPIU_2PETSCLOGDOUBLE, MPI_MAXLOC, comm));
175: PetscCall(MPIU_Allreduce(&local_val, &tot, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, comm));
176: avg = tot / ((PetscLogDouble)size);
177: if (min != 0.0) ratio = max[0] / min;
178: else ratio = 0.0;
180: PetscCall(PetscViewerXMLStartSection(viewer, name, desc));
181: PetscCall(PetscViewerXMLPutDouble(viewer, "max", max[0], "%e"));
182: PetscCall(PetscViewerXMLPutInt(viewer, "maxrank", "rank at which max was found", (PetscMPIInt)max[1]));
183: PetscCall(PetscViewerXMLPutDouble(viewer, "ratio", ratio, "%f"));
184: if (print_average) PetscCall(PetscViewerXMLPutDouble(viewer, "average", avg, "%e"));
185: if (print_total) PetscCall(PetscViewerXMLPutDouble(viewer, "total", tot, "%e"));
186: PetscCall(PetscViewerXMLEndSection(viewer, name));
187: PetscFunctionReturn(PETSC_SUCCESS);
188: }
190: /* Print the global performance: max, max/min, average and total of
191: * time, objects, flops, flops/sec, memory, MPI messages, MPI message lengths, MPI reductions.
192: */
193: static PetscErrorCode PetscPrintGlobalPerformance(PetscViewer viewer, PetscLogDouble locTotalTime, PetscLogHandler default_handler)
194: {
195: PetscLogDouble flops, mem, red, mess;
196: PetscInt num_objects;
197: const PetscBool print_total_yes = PETSC_TRUE, print_total_no = PETSC_FALSE, print_average_no = PETSC_FALSE, print_average_yes = PETSC_TRUE;
199: PetscFunctionBegin;
200: /* Must preserve reduction count before we go on */
201: red = petsc_allreduce_ct + petsc_gather_ct + petsc_scatter_ct;
203: /* Calculate summary information */
204: PetscCall(PetscViewerXMLStartSection(viewer, "globalperformance", "Global performance"));
206: /* Time */
207: PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "time", "Time (sec)", locTotalTime, print_average_yes, print_total_no));
209: /* Objects */
210: PetscCall(PetscLogHandlerGetNumObjects(default_handler, &num_objects));
211: PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "objects", "Objects", (PetscLogDouble)num_objects, print_average_yes, print_total_no));
213: /* Flop */
214: PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "mflop", "MFlop", petsc_TotalFlops / 1.0E6, print_average_yes, print_total_yes));
216: /* Flop/sec -- Must talk to Barry here */
217: if (locTotalTime != 0.0) flops = petsc_TotalFlops / locTotalTime;
218: else flops = 0.0;
219: PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "mflops", "MFlop/sec", flops / 1.0E6, print_average_yes, print_total_yes));
221: /* Memory */
222: PetscCall(PetscMallocGetMaximumUsage(&mem));
223: if (mem > 0.0) PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "memory", "Memory (MiB)", mem / 1024.0 / 1024.0, print_average_yes, print_total_yes));
224: /* Messages */
225: mess = 0.5 * (petsc_irecv_ct + petsc_isend_ct + petsc_recv_ct + petsc_send_ct);
226: PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "messagetransfers", "MPI Message Transfers", mess, print_average_yes, print_total_yes));
228: /* Message Volume */
229: mess = 0.5 * (petsc_irecv_len + petsc_isend_len + petsc_recv_len + petsc_send_len);
230: PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "messagevolume", "MPI Message Volume (MiB)", mess / 1024.0 / 1024.0, print_average_yes, print_total_yes));
232: /* Reductions */
233: PetscCall(PetscPrintXMLGlobalPerformanceElement(viewer, "reductions", "MPI Reductions", red, print_average_no, print_total_no));
234: PetscCall(PetscViewerXMLEndSection(viewer, "globalperformance"));
235: PetscFunctionReturn(PETSC_SUCCESS);
236: }
238: /* Print the global performance: max, max/min, average and total of
239: * time, objects, flops, flops/sec, memory, MPI messages, MPI message lengths, MPI reductions.
240: */
241: static PetscErrorCode PetscPrintXMLNestedLinePerfResults(PetscViewer viewer, const char *name, PetscLogDouble value, PetscLogDouble minthreshold, PetscLogDouble maxthreshold, PetscLogDouble minmaxtreshold)
242: {
243: MPI_Comm comm; /* MPI communicator in reduction */
244: PetscMPIInt rank; /* rank of this process */
245: PetscLogDouble val_in[2], max[2], min[2];
246: PetscLogDouble minvalue, maxvalue, tot;
247: PetscMPIInt size;
248: PetscMPIInt minLoc, maxLoc;
250: PetscFunctionBegin;
251: PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
252: PetscCallMPI(MPI_Comm_size(comm, &size));
253: PetscCallMPI(MPI_Comm_rank(comm, &rank));
254: val_in[0] = value;
255: val_in[1] = (PetscLogDouble)rank;
256: PetscCall(MPIU_Allreduce(val_in, max, 1, MPIU_2PETSCLOGDOUBLE, MPI_MAXLOC, comm));
257: PetscCall(MPIU_Allreduce(val_in, min, 1, MPIU_2PETSCLOGDOUBLE, MPI_MINLOC, comm));
258: maxvalue = max[0];
259: maxLoc = (PetscMPIInt)max[1];
260: minvalue = min[0];
261: minLoc = (PetscMPIInt)min[1];
262: PetscCall(MPIU_Allreduce(&value, &tot, 1, MPIU_PETSCLOGDOUBLE, MPI_SUM, comm));
264: if (maxvalue < maxthreshold && minvalue >= minthreshold) {
265: /* One call per parent or NO value: don't print */
266: } else {
267: PetscCall(PetscViewerXMLStartSection(viewer, name, NULL));
268: if (maxvalue > minvalue * minmaxtreshold) {
269: PetscCall(PetscViewerXMLPutDouble(viewer, "avgvalue", tot / size, "%g"));
270: PetscCall(PetscViewerXMLPutDouble(viewer, "minvalue", minvalue, "%g"));
271: PetscCall(PetscViewerXMLPutDouble(viewer, "maxvalue", maxvalue, "%g"));
272: PetscCall(PetscViewerXMLPutInt(viewer, "minloc", NULL, minLoc));
273: PetscCall(PetscViewerXMLPutInt(viewer, "maxloc", NULL, maxLoc));
274: } else {
275: PetscCall(PetscViewerXMLPutDouble(viewer, "value", tot / size, "%g"));
276: }
277: PetscCall(PetscViewerXMLEndSection(viewer, name));
278: }
279: PetscFunctionReturn(PETSC_SUCCESS);
280: }
282: static PetscErrorCode PetscLogNestedTreePrintLine(PetscViewer viewer, const PetscEventPerfInfo *perfInfo, int childCount, int parentCount, const char *name, PetscLogDouble totalTime)
283: {
284: PetscLogDouble time = perfInfo->time;
285: PetscLogDouble timeMx;
286: MPI_Comm comm;
288: PetscFunctionBegin;
289: PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm));
290: PetscCall(MPIU_Allreduce(&time, &timeMx, 1, MPIU_PETSCLOGDOUBLE, MPI_MAX, comm));
291: PetscCall(PetscViewerXMLPutString(viewer, "name", NULL, name));
292: PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "time", time / totalTime * 100.0, 0, 0, 1.02));
293: PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "ncalls", parentCount > 0 ? ((PetscLogDouble)childCount) / ((PetscLogDouble)parentCount) : 1.0, 0.99, 1.01, 1.02));
294: PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "mflops", time >= timeMx * 0.001 ? 1e-6 * perfInfo->flops / time : 0, 0, 0.01, 1.05));
295: PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "mbps", time >= timeMx * 0.001 ? perfInfo->messageLength / (1024 * 1024 * time) : 0, 0, 0.01, 1.05));
296: PetscCall(PetscPrintXMLNestedLinePerfResults(viewer, "nreductsps", time >= timeMx * 0.001 ? perfInfo->numReductions / time : 0, 0, 0.01, 1.05));
297: PetscFunctionReturn(PETSC_SUCCESS);
298: }
300: static PetscErrorCode PetscNestedNameGetBase(const char name[], const char *base[])
301: {
302: size_t n;
303: PetscFunctionBegin;
304: PetscCall(PetscStrlen(name, &n));
305: while (n > 0 && name[n - 1] != ';') n--;
306: *base = &name[n];
307: PetscFunctionReturn(PETSC_SUCCESS);
308: }
310: static PetscErrorCode PetscLogNestedTreePrint(PetscViewer viewer, double total_time, double threshold_time, const PetscNestedEventNode *parent_node, PetscEventPerfInfo *parent_info, const PetscNestedEventNode tree[], PetscEventPerfInfo perf[], PetscLogNestedType type, PetscBool print_events)
311: {
312: PetscInt num_children = 0, num_printed;
313: PetscInt num_nodes = parent_node->num_descendants;
314: PetscInt *perm;
315: PetscReal *times;
316: PetscEventPerfInfo other;
318: PetscFunctionBegin;
319: for (PetscInt node = 0; node < num_nodes; node += 1 + tree[node].num_descendants) {
320: PetscAssert(tree[node].parent == parent_node->id, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Failed tree invariant");
321: num_children++;
322: }
323: num_printed = num_children;
324: PetscCall(PetscMemzero(&other, sizeof(other)));
325: PetscCall(PetscMalloc2(num_children + 2, ×, num_children + 2, &perm));
326: for (PetscInt i = 0, node = 0; node < num_nodes; i++, node += 1 + tree[node].num_descendants) {
327: PetscLogDouble child_time = perf[node].time;
329: perm[i] = node;
330: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &child_time, 1, MPI_DOUBLE, MPI_MAX, PetscObjectComm((PetscObject)viewer)));
331: times[i] = -child_time;
333: parent_info->time -= perf[node].time;
334: parent_info->flops -= perf[node].flops;
335: parent_info->numMessages -= perf[node].numMessages;
336: parent_info->messageLength -= perf[node].messageLength;
337: parent_info->numReductions -= perf[node].numReductions;
338: if (child_time < threshold_time) {
339: PetscEventPerfInfo *add_to = (type == PETSC_LOG_NESTED_XML) ? &other : parent_info;
341: add_to->time += perf[node].time;
342: add_to->flops += perf[node].flops;
343: add_to->numMessages += perf[node].numMessages;
344: add_to->messageLength += perf[node].messageLength;
345: add_to->numReductions += perf[node].numReductions;
346: add_to->count += perf[node].count;
347: num_printed--;
348: }
349: }
350: perm[num_children] = -1;
351: times[num_children] = -parent_info->time;
352: perm[num_children + 1] = -2;
353: times[num_children + 1] = -other.time;
354: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, ×[num_children], 2, MPI_DOUBLE, MPI_MIN, PetscObjectComm((PetscObject)viewer)));
355: if (type == PETSC_LOG_NESTED_FLAMEGRAPH) {
356: /* The output is given as an integer in microseconds because otherwise the file cannot be read
357: * by apps such as speedscope (https://speedscope.app/). */
358: PetscCall(PetscViewerASCIIPrintf(viewer, "%s %" PetscInt64_FMT "\n", parent_node->name, (PetscInt64)(-times[num_children] * 1e6)));
359: }
360: if (other.time > 0.0) num_printed++;
361: // number of items other than "self" that will be printed
362: if (num_printed == 0) {
363: PetscCall(PetscFree2(times, perm));
364: PetscFunctionReturn(PETSC_SUCCESS);
365: }
366: // sort descending by time
367: PetscCall(PetscSortRealWithArrayInt(num_children + 2, times, perm));
368: if (type == PETSC_LOG_NESTED_XML && print_events) PetscCall(PetscViewerXMLStartSection(viewer, "events", NULL));
369: for (PetscInt i = 0; i < num_children + 2; i++) {
370: PetscInt node = perm[i];
371: PetscLogDouble child_time = -times[i];
373: if (child_time >= threshold_time || (node < 0 && child_time > 0.0)) {
374: if (type == PETSC_LOG_NESTED_XML) {
375: PetscCall(PetscViewerXMLStartSection(viewer, "event", NULL));
376: if (node == -1) {
377: PetscCall(PetscLogNestedTreePrintLine(viewer, parent_info, 0, 0, "self", total_time));
378: } else if (node == -2) {
379: PetscCall(PetscLogNestedTreePrintLine(viewer, &other, other.count, parent_info->count, "other", total_time));
380: } else {
381: const char *base_name = NULL;
382: PetscCall(PetscNestedNameGetBase(tree[node].name, &base_name));
383: PetscCall(PetscLogNestedTreePrintLine(viewer, &perf[node], perf[node].count, parent_info->count, base_name, total_time));
384: PetscCall(PetscLogNestedTreePrint(viewer, total_time, threshold_time, &tree[node], &perf[node], &tree[node + 1], &perf[node + 1], type, PETSC_TRUE));
385: }
386: PetscCall(PetscViewerXMLEndSection(viewer, "event"));
387: } else if (node >= 0) {
388: PetscCall(PetscLogNestedTreePrint(viewer, total_time, threshold_time, &tree[node], &perf[node], &tree[node + 1], &perf[node + 1], type, PETSC_FALSE));
389: }
390: }
391: }
392: if (type == PETSC_LOG_NESTED_XML && print_events) PetscCall(PetscViewerXMLEndSection(viewer, "events"));
393: PetscCall(PetscFree2(times, perm));
394: PetscFunctionReturn(PETSC_SUCCESS);
395: }
397: static PetscErrorCode PetscLogNestedTreePrintTop(PetscViewer viewer, PetscNestedEventTree *tree, PetscLogDouble threshold, PetscLogNestedType type)
398: {
399: PetscNestedEventNode *main_stage;
400: PetscNestedEventNode *tree_rem;
401: PetscEventPerfInfo *main_stage_perf;
402: PetscEventPerfInfo *perf_rem;
403: PetscLogDouble time;
404: PetscLogDouble threshold_time;
406: PetscFunctionBegin;
407: main_stage = &tree->nodes[0];
408: tree_rem = &tree->nodes[1];
409: main_stage_perf = &tree->perf[0];
410: perf_rem = &tree->perf[1];
411: time = main_stage_perf->time;
412: PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &time, 1, MPI_DOUBLE, MPI_MAX, tree->comm));
413: /* Print (or ignore) the children in ascending order of total time */
414: if (type == PETSC_LOG_NESTED_XML) {
415: PetscCall(PetscViewerXMLStartSection(viewer, "timertree", "Timings tree"));
416: PetscCall(PetscViewerXMLPutDouble(viewer, "totaltime", time, "%f"));
417: PetscCall(PetscViewerXMLPutDouble(viewer, "timethreshold", threshold, "%f"));
418: }
419: threshold_time = time * (threshold / 100.0 + 1.e-12);
420: PetscCall(PetscLogNestedTreePrint(viewer, time, threshold_time, main_stage, main_stage_perf, tree_rem, perf_rem, type, PETSC_FALSE));
421: if (type == PETSC_LOG_NESTED_XML) PetscCall(PetscViewerXMLEndSection(viewer, "timertree"));
422: PetscFunctionReturn(PETSC_SUCCESS);
423: }
425: PETSC_INTERN PetscErrorCode PetscLogHandlerView_Nested_XML(PetscLogHandler_Nested nested, PetscNestedEventTree *tree, PetscViewer viewer)
426: {
427: PetscFunctionBegin;
428: PetscCall(PetscViewerInitASCII_XML(viewer));
429: PetscCall(PetscViewerASCIIPrintf(viewer, "<!-- PETSc Performance Summary: -->\n"));
430: PetscCall(PetscViewerXMLStartSection(viewer, "petscroot", NULL));
432: // Print global information about this run
433: PetscCall(PetscPrintExeSpecs(viewer));
435: if (PetscDefined(USE_LOG)) {
436: PetscEventPerfInfo *main_stage_info;
437: PetscLogDouble locTotalTime;
439: PetscCall(PetscLogHandlerGetEventPerfInfo(nested->handler, 0, 0, &main_stage_info));
440: locTotalTime = main_stage_info->time;
441: PetscCall(PetscPrintGlobalPerformance(viewer, locTotalTime, nested->handler));
442: }
443: PetscCall(PetscLogNestedTreePrintTop(viewer, tree, nested->threshold, PETSC_LOG_NESTED_XML));
444: PetscCall(PetscViewerXMLEndSection(viewer, "petscroot"));
445: PetscCall(PetscViewerFinalASCII_XML(viewer));
446: PetscFunctionReturn(PETSC_SUCCESS);
447: }
449: PETSC_INTERN PetscErrorCode PetscLogHandlerView_Nested_Flamegraph(PetscLogHandler_Nested nested, PetscNestedEventTree *tree, PetscViewer viewer)
450: {
451: PetscFunctionBegin;
452: PetscCall(PetscLogNestedTreePrintTop(viewer, tree, nested->threshold, PETSC_LOG_NESTED_FLAMEGRAPH));
453: PetscFunctionReturn(PETSC_SUCCESS);
454: }