Actual source code: partptscotch.c

  1: #include <petsc/private/partitionerimpl.h>

  3: #if defined(PETSC_HAVE_PTSCOTCH)
  4: EXTERN_C_BEGIN
  5: #include <ptscotch.h>
  6: EXTERN_C_END
  7: #endif

  9: PetscBool  PTScotchPartitionerCite = PETSC_FALSE;
 10: const char PTScotchPartitionerCitation[] =
 11:   "@article{PTSCOTCH,\n"
 12:   "  author  = {C. Chevalier and F. Pellegrini},\n"
 13:   "  title   = {{PT-SCOTCH}: a tool for efficient parallel graph ordering},\n"
 14:   "  journal = {Parallel Computing},\n"
 15:   "  volume  = {34},\n"
 16:   "  number  = {6},\n"
 17:   "  pages   = {318--331},\n"
 18:   "  year    = {2008},\n"
 19:   "  doi     = {https://doi.org/10.1016/j.parco.2007.12.001}\n"
 20:   "}\n";

 22: typedef struct {
 23:   MPI_Comm  pcomm;
 24:   PetscInt  strategy;
 25:   PetscReal imbalance;
 26: } PetscPartitioner_PTScotch;

 28: #if defined(PETSC_HAVE_PTSCOTCH)


 32: static int PTScotch_Strategy(PetscInt strategy)
 33: {
 34:   switch (strategy) {
 35:   case  0: return SCOTCH_STRATDEFAULT;
 36:   case  1: return SCOTCH_STRATQUALITY;
 37:   case  2: return SCOTCH_STRATSPEED;
 38:   case  3: return SCOTCH_STRATBALANCE;
 39:   case  4: return SCOTCH_STRATSAFETY;
 40:   case  5: return SCOTCH_STRATSCALABILITY;
 41:   case  6: return SCOTCH_STRATRECURSIVE;
 42:   case  7: return SCOTCH_STRATREMAP;
 43:   default: return SCOTCH_STRATDEFAULT;
 44:   }
 45: }

 47: static PetscErrorCode PTScotch_PartGraph_Seq(SCOTCH_Num strategy, double imbalance, SCOTCH_Num n, SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
 48:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[])
 49: {
 50:   SCOTCH_Arch    archdat;
 51:   SCOTCH_Graph   grafdat;
 52:   SCOTCH_Strat   stradat;
 53:   SCOTCH_Num     vertnbr = n;
 54:   SCOTCH_Num     edgenbr = xadj[n];
 55:   SCOTCH_Num*    velotab = vtxwgt;
 56:   SCOTCH_Num*    edlotab = adjwgt;
 57:   SCOTCH_Num     flagval = strategy;
 58:   double         kbalval = imbalance;

 60:   {
 61:     PetscBool flg = PETSC_TRUE;
 62:     PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");
 63:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
 64:     if (!flg) velotab = NULL;
 65:   }
 66:   SCOTCH_graphInit(&grafdat);
 67:   SCOTCH_graphBuild(&grafdat, 0, vertnbr, xadj, xadj + 1, velotab, NULL, edgenbr, adjncy, edlotab);
 68:   SCOTCH_stratInit(&stradat);
 69:   SCOTCH_stratGraphMapBuild(&stradat, flagval, nparts, kbalval);
 70:   SCOTCH_archInit(&archdat);
 71:   if (tpart) {
 72:     SCOTCH_archCmpltw(&archdat, nparts, tpart);
 73:   } else {
 74:     SCOTCH_archCmplt(&archdat, nparts);
 75:   }
 76:   SCOTCH_graphMap(&grafdat, &archdat, &stradat, part);
 77:   SCOTCH_archExit(&archdat);
 78:   SCOTCH_stratExit(&stradat);
 79:   SCOTCH_graphExit(&grafdat);
 80:   return 0;
 81: }

 83: static PetscErrorCode PTScotch_PartGraph_MPI(SCOTCH_Num strategy, double imbalance, SCOTCH_Num vtxdist[], SCOTCH_Num xadj[], SCOTCH_Num adjncy[],
 84:                                              SCOTCH_Num vtxwgt[], SCOTCH_Num adjwgt[], SCOTCH_Num nparts, SCOTCH_Num tpart[], SCOTCH_Num part[], MPI_Comm comm)
 85: {
 86:   PetscMPIInt     procglbnbr;
 87:   PetscMPIInt     proclocnum;
 88:   SCOTCH_Arch     archdat;
 89:   SCOTCH_Dgraph   grafdat;
 90:   SCOTCH_Dmapping mappdat;
 91:   SCOTCH_Strat    stradat;
 92:   SCOTCH_Num      vertlocnbr;
 93:   SCOTCH_Num      edgelocnbr;
 94:   SCOTCH_Num*     veloloctab = vtxwgt;
 95:   SCOTCH_Num*     edloloctab = adjwgt;
 96:   SCOTCH_Num      flagval = strategy;
 97:   double          kbalval = imbalance;

 99:   {
100:     PetscBool flg = PETSC_TRUE;
101:     PetscOptionsDeprecatedNoObject("-petscpartititoner_ptscotch_vertex_weight",NULL,"3.13","Use -petscpartitioner_use_vertex_weights");
102:     PetscOptionsGetBool(NULL, NULL, "-petscpartititoner_ptscotch_vertex_weight", &flg, NULL);
103:     if (!flg) veloloctab = NULL;
104:   }
105:   MPI_Comm_size(comm, &procglbnbr);
106:   MPI_Comm_rank(comm, &proclocnum);
107:   vertlocnbr = vtxdist[proclocnum + 1] - vtxdist[proclocnum];
108:   edgelocnbr = xadj[vertlocnbr];

110:   SCOTCH_dgraphInit(&grafdat, comm);
111:   SCOTCH_dgraphBuild(&grafdat, 0, vertlocnbr, vertlocnbr, xadj, xadj + 1, veloloctab, NULL, edgelocnbr, edgelocnbr, adjncy, NULL, edloloctab);
112:   SCOTCH_stratInit(&stradat);
113:   SCOTCH_stratDgraphMapBuild(&stradat, flagval, procglbnbr, nparts, kbalval);
114:   SCOTCH_archInit(&archdat);
115:   if (tpart) { /* target partition weights */
116:     SCOTCH_archCmpltw(&archdat, nparts, tpart);
117:   } else {
118:     SCOTCH_archCmplt(&archdat, nparts);
119:   }
120:   SCOTCH_dgraphMapInit(&grafdat, &mappdat, &archdat, part);
121:   SCOTCH_dgraphMapCompute(&grafdat, &mappdat, &stradat);
122:   SCOTCH_dgraphMapExit(&grafdat, &mappdat);
123:   SCOTCH_archExit(&archdat);
124:   SCOTCH_stratExit(&stradat);
125:   SCOTCH_dgraphExit(&grafdat);
126:   return 0;
127: }

129: #endif /* PETSC_HAVE_PTSCOTCH */

131: static const char *const
132: PTScotchStrategyList[] = {
133:   "DEFAULT",
134:   "QUALITY",
135:   "SPEED",
136:   "BALANCE",
137:   "SAFETY",
138:   "SCALABILITY",
139:   "RECURSIVE",
140:   "REMAP"
141: };

143: static PetscErrorCode PetscPartitionerDestroy_PTScotch(PetscPartitioner part)
144: {
145:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;

147:   MPI_Comm_free(&p->pcomm);
148:   PetscFree(part->data);
149:   return 0;
150: }

152: static PetscErrorCode PetscPartitionerView_PTScotch_ASCII(PetscPartitioner part, PetscViewer viewer)
153: {
154:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;

156:   PetscViewerASCIIPushTab(viewer);
157:   PetscViewerASCIIPrintf(viewer,"using partitioning strategy %s\n",PTScotchStrategyList[p->strategy]);
158:   PetscViewerASCIIPrintf(viewer,"using load imbalance ratio %g\n",(double)p->imbalance);
159:   PetscViewerASCIIPopTab(viewer);
160:   return 0;
161: }

163: static PetscErrorCode PetscPartitionerView_PTScotch(PetscPartitioner part, PetscViewer viewer)
164: {
165:   PetscBool iascii;

169:   PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);
170:   if (iascii) PetscPartitionerView_PTScotch_ASCII(part, viewer);
171:   return 0;
172: }

174: static PetscErrorCode PetscPartitionerSetFromOptions_PTScotch(PetscOptionItems *PetscOptionsObject, PetscPartitioner part)
175: {
176:   PetscPartitioner_PTScotch *p = (PetscPartitioner_PTScotch *) part->data;
177:   const char *const         *slist = PTScotchStrategyList;
178:   PetscInt                  nlist = (PetscInt)(sizeof(PTScotchStrategyList)/sizeof(PTScotchStrategyList[0]));
179:   PetscBool                 flag;

181:   PetscOptionsHead(PetscOptionsObject, "PetscPartitioner PTScotch Options");
182:   PetscOptionsEList("-petscpartitioner_ptscotch_strategy","Partitioning strategy","",slist,nlist,slist[p->strategy],&p->strategy,&flag);
183:   PetscOptionsReal("-petscpartitioner_ptscotch_imbalance","Load imbalance ratio","",p->imbalance,&p->imbalance,&flag);
184:   PetscOptionsTail();
185:   return 0;
186: }

188: static PetscErrorCode PetscPartitionerPartition_PTScotch(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
189: {
190: #if defined(PETSC_HAVE_PTSCOTCH)
191:   MPI_Comm     comm;
192:   PetscInt     nvtxs    = numVertices; /* The number of vertices in full graph */
193:   PetscInt    *vtxdist;         /* Distribution of vertices across processes */
194:   PetscInt    *xadj     = start; /* Start of edge list for each vertex */
195:   PetscInt    *adjncy   = adjacency; /* Edge lists for all vertices */
196:   PetscInt    *vwgt     = NULL; /* Vertex weights */
197:   PetscInt    *adjwgt   = NULL; /* Edge weights */
198:   PetscInt     v, i, *assignment, *points;
199:   PetscMPIInt  size, rank, p;
200:   PetscBool    hasempty = PETSC_FALSE;
201:   PetscInt     *tpwgts  = NULL;

203:   PetscObjectGetComm((PetscObject)part,&comm);
204:   MPI_Comm_size(comm, &size);
205:   MPI_Comm_rank(comm, &rank);
206:   PetscMalloc2(size+1,&vtxdist,PetscMax(nvtxs,1),&assignment);
207:   /* Calculate vertex distribution */
208:   vtxdist[0] = 0;
209:   MPI_Allgather(&nvtxs, 1, MPIU_INT, &vtxdist[1], 1, MPIU_INT, comm);
210:   for (p = 2; p <= size; ++p) {
211:     hasempty = (PetscBool)(hasempty || !vtxdist[p-1] || !vtxdist[p]);
212:     vtxdist[p] += vtxdist[p-1];
213:   }
214:   /* null graph */
215:   if (vtxdist[size] == 0) {
216:     PetscFree2(vtxdist, assignment);
217:     ISCreateGeneral(comm, 0, NULL, PETSC_OWN_POINTER, partition);
218:     return 0;
219:   }

221:   /* Calculate vertex weights */
222:   if (vertSection) {
223:     PetscMalloc1(nvtxs,&vwgt);
224:     for (v = 0; v < nvtxs; ++v) PetscSectionGetDof(vertSection, v, &vwgt[v]);
225:   }

227:   /* Calculate partition weights */
228:   if (targetSection) {
229:     PetscInt sumw;

231:     PetscCalloc1(nparts,&tpwgts);
232:     for (p = 0, sumw = 0; p < nparts; ++p) {
233:       PetscSectionGetDof(targetSection,p,&tpwgts[p]);
234:       sumw += tpwgts[p];
235:     }
236:     if (!sumw) PetscFree(tpwgts);
237:   }

239:   {
240:     PetscPartitioner_PTScotch *pts = (PetscPartitioner_PTScotch *) part->data;
241:     int                       strat = PTScotch_Strategy(pts->strategy);
242:     double                    imbal = (double)pts->imbalance;

244:     for (p = 0; !vtxdist[p+1] && p < size; ++p);
245:     if (vtxdist[p+1] == vtxdist[size]) {
246:       if (rank == p) PTScotch_PartGraph_Seq(strat, imbal, nvtxs, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment);
247:     } else {
248:       MPI_Comm pcomm = pts->pcomm;

250:       if (hasempty) {
251:         PetscInt cnt;

253:         MPI_Comm_split(pts->pcomm,!!nvtxs,rank,&pcomm);
254:         for (p=0,cnt=0;p<size;p++) {
255:           if (vtxdist[p+1] != vtxdist[p]) {
256:             vtxdist[cnt+1] = vtxdist[p+1];
257:             cnt++;
258:           }
259:         }
260:       };
261:       if (nvtxs) PTScotch_PartGraph_MPI(strat, imbal, vtxdist, xadj, adjncy, vwgt, adjwgt, nparts, tpwgts, assignment, pcomm);
262:       if (hasempty) MPI_Comm_free(&pcomm);
263:     }
264:   }
265:   PetscFree(vwgt);
266:   PetscFree(tpwgts);

268:   /* Convert to PetscSection+IS */
269:   for (v = 0; v < nvtxs; ++v) PetscSectionAddDof(partSection, assignment[v], 1);
270:   PetscMalloc1(nvtxs, &points);
271:   for (p = 0, i = 0; p < nparts; ++p) {
272:     for (v = 0; v < nvtxs; ++v) {
273:       if (assignment[v] == p) points[i++] = v;
274:     }
275:   }
277:   ISCreateGeneral(comm, nvtxs, points, PETSC_OWN_POINTER, partition);

279:   PetscFree2(vtxdist,assignment);
280:   return 0;
281: #else
282:   SETERRQ(PetscObjectComm((PetscObject) part), PETSC_ERR_SUP, "Mesh partitioning needs external package support.\nPlease reconfigure with --download-ptscotch.");
283: #endif
284: }

286: static PetscErrorCode PetscPartitionerInitialize_PTScotch(PetscPartitioner part)
287: {
288:   part->noGraph             = PETSC_FALSE;
289:   part->ops->view           = PetscPartitionerView_PTScotch;
290:   part->ops->destroy        = PetscPartitionerDestroy_PTScotch;
291:   part->ops->partition      = PetscPartitionerPartition_PTScotch;
292:   part->ops->setfromoptions = PetscPartitionerSetFromOptions_PTScotch;
293:   return 0;
294: }

296: /*MC
297:   PETSCPARTITIONERPTSCOTCH = "ptscotch" - A PetscPartitioner object using the PT-Scotch library

299:   Level: intermediate

301:   Options Database Keys:
302: +  -petscpartitioner_ptscotch_strategy <string> - PT-Scotch strategy. Choose one of default quality speed balance safety scalability recursive remap
303: -  -petscpartitioner_ptscotch_imbalance <val> - Load imbalance ratio

305:   Notes: when the graph is on a single process, this partitioner actually uses Scotch and not PT-Scotch

307: .seealso: PetscPartitionerType, PetscPartitionerCreate(), PetscPartitionerSetType()
308: M*/

310: PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_PTScotch(PetscPartitioner part)
311: {
312:   PetscPartitioner_PTScotch *p;

315:   PetscNewLog(part, &p);
316:   part->data = p;

318:   MPI_Comm_dup(PetscObjectComm((PetscObject)part),&p->pcomm);
319:   p->strategy  = 0;
320:   p->imbalance = 0.01;

322:   PetscPartitionerInitialize_PTScotch(part);
323:   PetscCitationsRegister(PTScotchPartitionerCitation, &PTScotchPartitionerCite);
324:   return 0;
325: }