Actual source code: partition.c
2: #include <petsc/private/matimpl.h>
4: /* Logging support */
5: PetscClassId MAT_PARTITIONING_CLASSID;
7: /*
8: Simplest partitioning, keeps the current partitioning.
9: */
10: static PetscErrorCode MatPartitioningApply_Current(MatPartitioning part, IS *partitioning)
11: {
12: PetscInt m;
13: PetscMPIInt rank, size;
15: MPI_Comm_size(PetscObjectComm((PetscObject)part), &size);
16: if (part->n != size) {
17: const char *prefix;
18: PetscObjectGetOptionsPrefix((PetscObject)part, &prefix);
19: SETERRQ(PetscObjectComm((PetscObject)part), PETSC_ERR_SUP, "This is the DEFAULT NO-OP partitioner, it currently only supports one domain per processor\nuse -%smat_partitioning_type parmetis or chaco or ptscotch for more than one subdomain per processor", prefix ? prefix : "");
20: }
21: MPI_Comm_rank(PetscObjectComm((PetscObject)part), &rank);
23: MatGetLocalSize(part->adj, &m, NULL);
24: ISCreateStride(PetscObjectComm((PetscObject)part), m, rank, 0, partitioning);
25: return 0;
26: }
28: /*
29: partition an index to rebalance the computation
30: */
31: static PetscErrorCode MatPartitioningApply_Average(MatPartitioning part, IS *partitioning)
32: {
33: PetscInt m, M, nparts, *indices, r, d, *parts, i, start, end, loc;
35: MatGetSize(part->adj, &M, NULL);
36: MatGetLocalSize(part->adj, &m, NULL);
37: nparts = part->n;
38: PetscMalloc1(nparts, &parts);
39: d = M / nparts;
40: for (i = 0; i < nparts; i++) parts[i] = d;
41: r = M % nparts;
42: for (i = 0; i < r; i++) parts[i] += 1;
43: for (i = 1; i < nparts; i++) parts[i] += parts[i - 1];
44: PetscMalloc1(m, &indices);
45: MatGetOwnershipRange(part->adj, &start, &end);
46: for (i = start; i < end; i++) {
47: PetscFindInt(i, nparts, parts, &loc);
48: if (loc < 0) loc = -(loc + 1);
49: else loc = loc + 1;
50: indices[i - start] = loc;
51: }
52: PetscFree(parts);
53: ISCreateGeneral(PetscObjectComm((PetscObject)part), m, indices, PETSC_OWN_POINTER, partitioning);
54: return 0;
55: }
57: static PetscErrorCode MatPartitioningApply_Square(MatPartitioning part, IS *partitioning)
58: {
59: PetscInt cell, n, N, p, rstart, rend, *color;
60: PetscMPIInt size;
62: MPI_Comm_size(PetscObjectComm((PetscObject)part), &size);
64: p = (PetscInt)PetscSqrtReal((PetscReal)part->n);
67: MatGetSize(part->adj, &N, NULL);
68: n = (PetscInt)PetscSqrtReal((PetscReal)N);
71: MatGetOwnershipRange(part->adj, &rstart, &rend);
72: PetscMalloc1(rend - rstart, &color);
73: /* for (int cell=rstart; cell<rend; cell++) color[cell-rstart] = ((cell%n) < (n/2)) + 2 * ((cell/n) < (n/2)); */
74: for (cell = rstart; cell < rend; cell++) color[cell - rstart] = ((cell % n) / (n / p)) + p * ((cell / n) / (n / p));
75: ISCreateGeneral(PetscObjectComm((PetscObject)part), rend - rstart, color, PETSC_OWN_POINTER, partitioning);
76: return 0;
77: }
79: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Current(MatPartitioning part)
80: {
81: part->ops->apply = MatPartitioningApply_Current;
82: part->ops->view = NULL;
83: part->ops->destroy = NULL;
84: return 0;
85: }
87: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Average(MatPartitioning part)
88: {
89: part->ops->apply = MatPartitioningApply_Average;
90: part->ops->view = NULL;
91: part->ops->destroy = NULL;
92: return 0;
93: }
95: PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Square(MatPartitioning part)
96: {
97: part->ops->apply = MatPartitioningApply_Square;
98: part->ops->view = NULL;
99: part->ops->destroy = NULL;
100: return 0;
101: }
103: /* gets as input the "sizes" array computed by ParMetis_*_NodeND and returns
104: seps[ 0 : 2*p) : the start and end node of each subdomain
105: seps[2*p : 2*p+2*(p-1)) : the start and end node of each separator
106: levels[ 0 : p-1) : level in the tree for each separator (-1 root, -2 and -3 first level and so on)
107: The arrays must be large enough
108: */
109: PETSC_INTERN PetscErrorCode MatPartitioningSizesToSep_Private(PetscInt p, PetscInt sizes[], PetscInt seps[], PetscInt level[])
110: {
111: PetscInt l2p, i, pTree, pStartTree;
113: l2p = PetscLog2Real(p);
115: if (!p) return 0;
116: PetscArrayzero(seps, 2 * p - 2);
117: PetscArrayzero(level, p - 1);
118: seps[2 * p - 2] = sizes[2 * p - 2];
119: pTree = p;
120: pStartTree = 0;
121: while (pTree != 1) {
122: for (i = pStartTree; i < pStartTree + pTree; i++) {
123: seps[i] += sizes[i];
124: seps[pStartTree + pTree + (i - pStartTree) / 2] += seps[i];
125: }
126: pStartTree += pTree;
127: pTree = pTree / 2;
128: }
129: seps[2 * p - 2] -= sizes[2 * p - 2];
131: pStartTree = 2 * p - 2;
132: pTree = 1;
133: while (pStartTree > 0) {
134: for (i = pStartTree; i < pStartTree + pTree; i++) {
135: PetscInt k = 2 * i - (pStartTree + 2 * pTree);
136: PetscInt n = seps[k + 1];
138: seps[k + 1] = seps[i] - sizes[k + 1];
139: seps[k] = seps[k + 1] + sizes[k + 1] - n - sizes[k];
140: level[i - p] = -pTree - i + pStartTree;
141: }
142: pTree *= 2;
143: pStartTree -= pTree;
144: }
145: /* I know there should be a formula */
146: PetscSortIntWithArrayPair(p - 1, seps + p, sizes + p, level);
147: for (i = 2 * p - 2; i >= 0; i--) {
148: seps[2 * i] = seps[i];
149: seps[2 * i + 1] = seps[i] + PetscMax(sizes[i] - 1, 0);
150: }
151: return 0;
152: }
154: /* ===========================================================================================*/
156: PetscFunctionList MatPartitioningList = NULL;
157: PetscBool MatPartitioningRegisterAllCalled = PETSC_FALSE;
159: /*@C
160: MatPartitioningRegister - Adds a new sparse matrix partitioning to the matrix package.
162: Not Collective
164: Input Parameters:
165: + sname - name of partitioning (for example `MATPARTITIONINGCURRENT`) or `MATPARTITIONINGPARMETIS`
166: - function - function pointer that creates the partitioning type
168: Level: developer
170: Sample usage:
171: .vb
172: MatPartitioningRegister("my_part",MyPartCreate);
173: .ve
175: Then, your partitioner can be chosen with the procedural interface via
176: $ MatPartitioningSetType(part,"my_part")
177: or at runtime via the option
178: $ -mat_partitioning_type my_part
180: .seealso: `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
181: @*/
182: PetscErrorCode MatPartitioningRegister(const char sname[], PetscErrorCode (*function)(MatPartitioning))
183: {
184: MatInitializePackage();
185: PetscFunctionListAdd(&MatPartitioningList, sname, function);
186: return 0;
187: }
189: /*@C
190: MatPartitioningGetType - Gets the Partitioning method type and name (as a string)
191: from the partitioning context.
193: Not collective
195: Input Parameter:
196: . partitioning - the partitioning context
198: Output Parameter:
199: . type - partitioner type
201: Level: intermediate
203: Not Collective
205: .seealso: `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`, `MatPartitioningRegisterDestroy()`, `MatPartitioningRegisterAll()`
206: @*/
207: PetscErrorCode MatPartitioningGetType(MatPartitioning partitioning, MatPartitioningType *type)
208: {
211: *type = ((PetscObject)partitioning)->type_name;
212: return 0;
213: }
215: /*@C
216: MatPartitioningSetNParts - Set how many partitions need to be created;
217: by default this is one per processor. Certain partitioning schemes may
218: in fact only support that option.
220: Collective on part
222: Input Parameters:
223: + partitioning - the partitioning context
224: - n - the number of partitions
226: Level: intermediate
228: .seealso: `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`
229: @*/
230: PetscErrorCode MatPartitioningSetNParts(MatPartitioning part, PetscInt n)
231: {
232: part->n = n;
233: return 0;
234: }
236: /*@
237: MatPartitioningApplyND - Gets a nested dissection partitioning for a matrix.
239: Collective on Mat
241: Input Parameters:
242: . matp - the matrix partitioning object
244: Output Parameters:
245: . partitioning - the partitioning. For each local node, a positive value indicates the processor
246: number the node has been assigned to. Negative x values indicate the separator level -(x+1).
248: Level: intermediate
250: Note:
251: The user can define additional partitionings; see `MatPartitioningRegister()`.
253: .seealso: `MatPartitioningApplyND()`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
254: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
255: `ISPartitioningCount()`
256: @*/
257: PetscErrorCode MatPartitioningApplyND(MatPartitioning matp, IS *partitioning)
258: {
263: PetscLogEventBegin(MAT_PartitioningND, matp, 0, 0, 0);
264: PetscUseTypeMethod(matp, applynd, partitioning);
265: PetscLogEventEnd(MAT_PartitioningND, matp, 0, 0, 0);
267: MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view");
268: ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view");
269: return 0;
270: }
272: /*@
273: MatPartitioningApply - Gets a partitioning for the graph represented by a sparse matrix.
275: Collective
277: Input Parameters:
278: . matp - the matrix partitioning object
280: Output Parameters:
281: . partitioning - the partitioning. For each local node this tells the processor
282: number that that node is assigned to.
284: Options Database Keys:
285: To specify the partitioning through the options database, use one of
286: the following
287: $ -mat_partitioning_type parmetis, -mat_partitioning current
288: To see the partitioning result
289: $ -mat_partitioning_view
291: Level: beginner
293: The user can define additional partitionings; see `MatPartitioningRegister()`.
295: .seealso: `MatPartitioning`, `MatPartitioningType`, `MatPartitioningRegister()`, `MatPartitioningCreate()`,
296: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
297: `ISPartitioningCount()`
298: @*/
299: PetscErrorCode MatPartitioningApply(MatPartitioning matp, IS *partitioning)
300: {
301: PetscBool viewbalance, improve;
307: PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0);
308: PetscUseTypeMethod(matp, apply, partitioning);
309: PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0);
311: MatPartitioningViewFromOptions(matp, NULL, "-mat_partitioning_view");
312: ISViewFromOptions(*partitioning, NULL, "-mat_partitioning_view");
314: PetscObjectOptionsBegin((PetscObject)matp);
315: viewbalance = PETSC_FALSE;
316: PetscOptionsBool("-mat_partitioning_view_imbalance", "Display imbalance information of a partition", NULL, PETSC_FALSE, &viewbalance, NULL);
317: improve = PETSC_FALSE;
318: PetscOptionsBool("-mat_partitioning_improve", "Improve the quality of a partition", NULL, PETSC_FALSE, &improve, NULL);
319: PetscOptionsEnd();
321: if (improve) MatPartitioningImprove(matp, partitioning);
323: if (viewbalance) MatPartitioningViewImbalance(matp, *partitioning);
324: return 0;
325: }
327: /*@
328: MatPartitioningImprove - Improves the quality of a given partition.
330: Collective
332: Input Parameters:
333: + matp - the matrix partitioning object
334: - partitioning - the partitioning. For each local node this tells the processor
335: number that that node is assigned to.
337: Output Parameters:
338: . partitioning - the partitioning. For each local node this tells the processor
339: number that that node is assigned to.
341: Options Database Keys:
342: To improve the quality of the partition
343: $ -mat_partitioning_improve
345: Level: beginner
347: .seealso: `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningCreate()`,
348: `MatPartitioningDestroy()`, `MatPartitioningSetAdjacency()`, `ISPartitioningToNumbering()`,
349: `ISPartitioningCount()`
350: @*/
351: PetscErrorCode MatPartitioningImprove(MatPartitioning matp, IS *partitioning)
352: {
357: PetscLogEventBegin(MAT_Partitioning, matp, 0, 0, 0);
358: PetscTryTypeMethod(matp, improve, partitioning);
359: PetscLogEventEnd(MAT_Partitioning, matp, 0, 0, 0);
360: return 0;
361: }
363: /*@
364: MatPartitioningViewImbalance - Display partitioning imbalance information.
366: Collective
368: Input Parameters:
369: + matp - the matrix partitioning object
370: - partitioning - the partitioning. For each local node this tells the processor
371: number that that node is assigned to.
373: Options Database Keys:
374: To see the partitioning imbalance information
375: $ -mat_partitioning_view_balance
377: Level: beginner
379: .seealso: `MatPartitioning`, `MatPartitioningType`, `MatPartitioningApply()`, `MatPartitioningView()`
380: @*/
381: PetscErrorCode MatPartitioningViewImbalance(MatPartitioning matp, IS partitioning)
382: {
383: PetscInt nparts, *subdomainsizes, *subdomainsizes_tmp, nlocal, i, maxsub, minsub, avgsub;
384: const PetscInt *indices;
385: PetscViewer viewer;
389: nparts = matp->n;
390: PetscCalloc2(nparts, &subdomainsizes, nparts, &subdomainsizes_tmp);
391: ISGetLocalSize(partitioning, &nlocal);
392: ISGetIndices(partitioning, &indices);
393: for (i = 0; i < nlocal; i++) subdomainsizes_tmp[indices[i]] += matp->vertex_weights ? matp->vertex_weights[i] : 1;
394: MPI_Allreduce(subdomainsizes_tmp, subdomainsizes, nparts, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)matp));
395: ISRestoreIndices(partitioning, &indices);
396: minsub = PETSC_MAX_INT, maxsub = PETSC_MIN_INT, avgsub = 0;
397: for (i = 0; i < nparts; i++) {
398: minsub = PetscMin(minsub, subdomainsizes[i]);
399: maxsub = PetscMax(maxsub, subdomainsizes[i]);
400: avgsub += subdomainsizes[i];
401: }
402: avgsub /= nparts;
403: PetscFree2(subdomainsizes, subdomainsizes_tmp);
404: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)matp), &viewer);
405: MatPartitioningView(matp, viewer);
406: PetscViewerASCIIPrintf(viewer, "Partitioning Imbalance Info: Max %" PetscInt_FMT ", Min %" PetscInt_FMT ", Avg %" PetscInt_FMT ", R %g\n", maxsub, minsub, avgsub, (double)(maxsub / (PetscReal)minsub));
407: return 0;
408: }
410: /*@
411: MatPartitioningSetAdjacency - Sets the adjacency graph (matrix) of the thing to be
412: partitioned.
414: Collective
416: Input Parameters:
417: + part - the partitioning context
418: - adj - the adjacency matrix, this can be any `MatType` but the natural representation is `MATMPIADJ`
420: Level: beginner
422: .seealso: `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
423: @*/
424: PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning part, Mat adj)
425: {
428: part->adj = adj;
429: return 0;
430: }
432: /*@
433: MatPartitioningDestroy - Destroys the partitioning context.
435: Collective
437: Input Parameters:
438: . part - the partitioning context
440: Level: beginner
442: .seealso: `MatPartitioning`, `MatPartitioningType`, `MatPartitioningCreate()`
443: @*/
444: PetscErrorCode MatPartitioningDestroy(MatPartitioning *part)
445: {
446: if (!*part) return 0;
448: if (--((PetscObject)(*part))->refct > 0) {
449: *part = NULL;
450: return 0;
451: }
453: if ((*part)->ops->destroy) (*(*part)->ops->destroy)((*part));
454: PetscFree((*part)->vertex_weights);
455: PetscFree((*part)->part_weights);
456: PetscHeaderDestroy(part);
457: return 0;
458: }
460: /*@C
461: MatPartitioningSetVertexWeights - Sets the weights for vertices for a partitioning.
463: Logically Collective
465: Input Parameters:
466: + part - the partitioning context
467: - weights - the weights, on each process this array must have the same size as the number of local rows times the value passed with `MatPartitioningSetNumberVertexWeights()` or
468: 1 if that is not provided
470: Level: beginner
472: Notes:
473: The array weights is freed by PETSc so the user should not free the array. In C/C++
474: the array must be obtained with a call to `PetscMalloc()`, not malloc().
476: The weights may not be used by some partitioners
478: .seealso: `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetPartitionWeights()`, `MatPartitioningSetNumberVertexWeights()`
479: @*/
480: PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning part, const PetscInt weights[])
481: {
483: PetscFree(part->vertex_weights);
484: part->vertex_weights = (PetscInt *)weights;
485: return 0;
486: }
488: /*@C
489: MatPartitioningSetPartitionWeights - Sets the weights for each partition.
491: Logically Collective
493: Input Parameters:
494: + part - the partitioning context
495: - weights - An array of size nparts that is used to specify the fraction of
496: vertex weight that should be distributed to each sub-domain for
497: the balance constraint. If all of the sub-domains are to be of
498: the same size, then each of the nparts elements should be set
499: to a value of 1/nparts. Note that the sum of all of the weights
500: should be one.
502: Level: beginner
504: Note:
505: The array weights is freed by PETSc so the user should not free the array. In C/C++
506: the array must be obtained with a call to `PetscMalloc()`, not malloc().
508: .seealso: `MatPartitioning`, `MatPartitioningSetVertexWeights()`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`
509: @*/
510: PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning part, const PetscReal weights[])
511: {
513: PetscFree(part->part_weights);
514: part->part_weights = (PetscReal *)weights;
515: return 0;
516: }
518: /*@
519: MatPartitioningSetUseEdgeWeights - Set a flag to indicate whether or not to use edge weights.
521: Logically Collective
523: Input Parameters:
524: + part - the partitioning context
525: - use_edge_weights - the flag indicateing whether or not to use edge weights. By default no edge weights will be used,
526: that is, use_edge_weights is set to FALSE. If set use_edge_weights to TRUE, users need to make sure legal
527: edge weights are stored in an ADJ matrix.
528: Level: beginner
530: Options Database Keys:
531: . -mat_partitioning_use_edge_weights - (true or false)
533: .seealso: `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`
534: @*/
535: PetscErrorCode MatPartitioningSetUseEdgeWeights(MatPartitioning part, PetscBool use_edge_weights)
536: {
538: part->use_edge_weights = use_edge_weights;
539: return 0;
540: }
542: /*@
543: MatPartitioningGetUseEdgeWeights - Get a flag that indicates whether or not to edge weights are used.
545: Logically Collective
547: Input Parameters:
548: . part - the partitioning context
550: Output Parameters:
551: . use_edge_weights - the flag indicateing whether or not to edge weights are used.
553: Level: beginner
555: .seealso: `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningSetType()`, `MatPartitioningSetVertexWeights()`, `MatPartitioningSetPartitionWeights()`,
556: `MatPartitioningSetUseEdgeWeights`
557: @*/
558: PetscErrorCode MatPartitioningGetUseEdgeWeights(MatPartitioning part, PetscBool *use_edge_weights)
559: {
562: *use_edge_weights = part->use_edge_weights;
563: return 0;
564: }
566: /*@
567: MatPartitioningCreate - Creates a partitioning context.
569: Collective
571: Input Parameter:
572: . comm - MPI communicator
574: Output Parameter:
575: . newp - location to put the context
577: Level: beginner
579: .seealso: `MatPartitioning`, `MatPartitioningSetType()`, `MatPartitioningApply()`, `MatPartitioningDestroy()`,
580: `MatPartitioningSetAdjacency()`
581: @*/
582: PetscErrorCode MatPartitioningCreate(MPI_Comm comm, MatPartitioning *newp)
583: {
584: MatPartitioning part;
585: PetscMPIInt size;
587: *newp = NULL;
589: MatInitializePackage();
590: PetscHeaderCreate(part, MAT_PARTITIONING_CLASSID, "MatPartitioning", "Matrix/graph partitioning", "MatOrderings", comm, MatPartitioningDestroy, MatPartitioningView);
591: part->vertex_weights = NULL;
592: part->part_weights = NULL;
593: part->use_edge_weights = PETSC_FALSE; /* By default we don't use edge weights */
595: MPI_Comm_size(comm, &size);
596: part->n = (PetscInt)size;
597: part->ncon = 1;
599: *newp = part;
600: return 0;
601: }
603: /*@C
604: MatPartitioningViewFromOptions - View a partitioning context from the options database
606: Collective
608: Input Parameters:
609: + A - the partitioning context
610: . obj - Optional object
611: - name - command line option
613: Level: intermediate
615: Options Database:
616: . -mat_partitioning_view [viewertype]:... - the viewer and its options
618: Note:
619: .vb
620: If no value is provided ascii:stdout is used
621: ascii[:[filename][:[format][:append]]] defaults to stdout - format can be one of ascii_info, ascii_info_detail, or ascii_matlab,
622: for example ascii::ascii_info prints just the information about the object not all details
623: unless :append is given filename opens in write mode, overwriting what was already there
624: binary[:[filename][:[format][:append]]] defaults to the file binaryoutput
625: draw[:drawtype[:filename]] for example, draw:tikz, draw:tikz:figure.tex or draw:x
626: socket[:port] defaults to the standard output port
627: saws[:communicatorname] publishes object to the Scientific Application Webserver (SAWs)
628: .ve
630: .seealso: `MatPartitioning`, `MatPartitioningView()`, `PetscObjectViewFromOptions()`, `MatPartitioningCreate()`
631: @*/
632: PetscErrorCode MatPartitioningViewFromOptions(MatPartitioning A, PetscObject obj, const char name[])
633: {
635: PetscObjectViewFromOptions((PetscObject)A, obj, name);
636: return 0;
637: }
639: /*@C
640: MatPartitioningView - Prints the partitioning data structure.
642: Collective
644: Input Parameters:
645: + part - the partitioning context
646: - viewer - optional visualization context
648: Level: intermediate
650: Note:
651: The available visualization contexts include
652: + `PETSC_VIEWER_STDOUT_SELF` - standard output (default)
653: - `PETSC_VIEWER_STDOUT_WORLD` - synchronized standard
654: output where only the first processor opens
655: the file. All other processors send their
656: data to the first processor to print.
658: The user can open alternative visualization contexts with
659: . `PetscViewerASCIIOpen()` - output to a specified file
661: .seealso: `MatPartitioning`, `PetscViewer`, `PetscViewerASCIIOpen()`
662: @*/
663: PetscErrorCode MatPartitioningView(MatPartitioning part, PetscViewer viewer)
664: {
665: PetscBool iascii;
668: if (!viewer) PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)part), &viewer);
672: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
673: if (iascii) {
674: PetscObjectPrintClassNamePrefixType((PetscObject)part, viewer);
675: if (part->vertex_weights) PetscViewerASCIIPrintf(viewer, " Using vertex weights\n");
676: }
677: PetscViewerASCIIPushTab(viewer);
678: PetscTryTypeMethod(part, view, viewer);
679: PetscViewerASCIIPopTab(viewer);
680: return 0;
681: }
683: /*@C
684: MatPartitioningSetType - Sets the type of partitioner to use
686: Collective
688: Input Parameters:
689: + part - the partitioning context.
690: - type - a known method
692: Options Database Key:
693: . -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods
695: Level: intermediate
697: .seealso: `MatPartitioning`, `MatPartitioningCreate()`, `MatPartitioningApply()`, `MatPartitioningType`
698: @*/
699: PetscErrorCode MatPartitioningSetType(MatPartitioning part, MatPartitioningType type)
700: {
701: PetscBool match;
702: PetscErrorCode (*r)(MatPartitioning);
707: PetscObjectTypeCompare((PetscObject)part, type, &match);
708: if (match) return 0;
710: PetscTryTypeMethod(part, destroy);
711: part->ops->destroy = NULL;
713: part->setupcalled = 0;
714: part->data = NULL;
715: PetscMemzero(part->ops, sizeof(struct _MatPartitioningOps));
717: PetscFunctionListFind(MatPartitioningList, type, &r);
720: (*r)(part);
722: PetscFree(((PetscObject)part)->type_name);
723: PetscStrallocpy(type, &((PetscObject)part)->type_name);
724: return 0;
725: }
727: /*@
728: MatPartitioningSetFromOptions - Sets various partitioning options from the
729: options database for the partitioning object
731: Collective
733: Input Parameter:
734: . part - the partitioning context.
736: Options Database Keys:
737: + -mat_partitioning_type <type> - (for instance, parmetis), use -help for a list of available methods
738: - -mat_partitioning_nparts - number of subgraphs
740: Level: beginner
742: Note:
743: If the partitioner has not been set by the user it uses one of the installed partitioner such as ParMetis. If there are
744: no installed partitioners it uses current which means no repartioning.
746: .seealso: `MatPartitioning`
747: @*/
748: PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning part)
749: {
750: PetscBool flag;
751: char type[256];
752: const char *def;
754: PetscObjectOptionsBegin((PetscObject)part);
755: if (!((PetscObject)part)->type_name) {
756: #if defined(PETSC_HAVE_PARMETIS)
757: def = MATPARTITIONINGPARMETIS;
758: #elif defined(PETSC_HAVE_CHACO)
759: def = MATPARTITIONINGCHACO;
760: #elif defined(PETSC_HAVE_PARTY)
761: def = MATPARTITIONINGPARTY;
762: #elif defined(PETSC_HAVE_PTSCOTCH)
763: def = MATPARTITIONINGPTSCOTCH;
764: #else
765: def = MATPARTITIONINGCURRENT;
766: #endif
767: } else {
768: def = ((PetscObject)part)->type_name;
769: }
770: PetscOptionsFList("-mat_partitioning_type", "Type of partitioner", "MatPartitioningSetType", MatPartitioningList, def, type, 256, &flag);
771: if (flag) MatPartitioningSetType(part, type);
773: PetscOptionsInt("-mat_partitioning_nparts", "number of fine parts", NULL, part->n, &part->n, &flag);
775: PetscOptionsBool("-mat_partitioning_use_edge_weights", "whether or not to use edge weights", NULL, part->use_edge_weights, &part->use_edge_weights, &flag);
777: /*
778: Set the type if it was never set.
779: */
780: if (!((PetscObject)part)->type_name) MatPartitioningSetType(part, def);
782: PetscTryTypeMethod(part, setfromoptions, PetscOptionsObject);
783: PetscOptionsEnd();
784: return 0;
785: }
787: /*@C
788: MatPartitioningSetNumberVertexWeights - Sets the number of weights per vertex
790: Not collective
792: Input Parameters:
793: + partitioning - the partitioning context
794: - ncon - the number of weights
796: Level: intermediate
798: .seealso: `MatPartitioning`, `MatPartitioningSetVertexWeights()`
799: @*/
800: PetscErrorCode MatPartitioningSetNumberVertexWeights(MatPartitioning partitioning, PetscInt ncon)
801: {
803: partitioning->ncon = ncon;
804: return 0;
805: }