Actual source code: sbaijcholmod.c
2: /*
3: Provides an interface to the CHOLMOD sparse solver available through SuiteSparse version 4.2.1
5: When built with PETSC_USE_64BIT_INDICES this will use Suitesparse_long as the
6: integer type in UMFPACK, otherwise it will use int. This means
7: all integers in this file as simply declared as PetscInt. Also it means
8: that one cannot use 64BIT_INDICES on 32bit machines [as Suitesparse_long is 32bit only]
10: */
12: #include <../src/mat/impls/sbaij/seq/sbaij.h>
13: #include <../src/mat/impls/sbaij/seq/cholmod/cholmodimpl.h>
15: /*
16: This is a terrible hack, but it allows the error handler to retain a context.
17: Note that this hack really cannot be made both reentrant and concurrent.
18: */
19: static Mat static_F;
21: static void CholmodErrorHandler(int status, const char *file, int line, const char *message)
22: {
23: if (status > CHOLMOD_OK) {
24: PetscInfo(static_F, "CHOLMOD warning %d at %s:%d: %s\n", status, file, line, message);
25: } else if (status == CHOLMOD_OK) { /* Documentation says this can happen, but why? */
26: PetscInfo(static_F, "CHOLMOD OK at %s:%d: %s\n", file, line, message);
27: } else {
28: PetscErrorPrintf("CHOLMOD error %d at %s:%d: %s\n", status, file, line, message);
29: }
30: return;
31: }
33: #define CHOLMOD_OPTION_DOUBLE(name, help) \
34: do { \
35: PetscReal tmp = (PetscReal)c->name; \
36: PetscOptionsReal("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL); \
37: c->name = (double)tmp; \
38: } while (0)
40: #define CHOLMOD_OPTION_INT(name, help) \
41: do { \
42: PetscInt tmp = (PetscInt)c->name; \
43: PetscOptionsInt("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL); \
44: c->name = (int)tmp; \
45: } while (0)
47: #define CHOLMOD_OPTION_SIZE_T(name, help) \
48: do { \
49: PetscReal tmp = (PetscInt)c->name; \
50: PetscOptionsReal("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL); \
52: c->name = (size_t)tmp; \
53: } while (0)
55: #define CHOLMOD_OPTION_BOOL(name, help) \
56: do { \
57: PetscBool tmp = (PetscBool) !!c->name; \
58: PetscOptionsBool("-mat_cholmod_" #name, help, "None", tmp, &tmp, NULL); \
59: c->name = (int)tmp; \
60: } while (0)
62: static PetscErrorCode CholmodSetOptions(Mat F)
63: {
64: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
65: cholmod_common *c = chol->common;
66: PetscBool flg;
68: PetscOptionsBegin(PetscObjectComm((PetscObject)F), ((PetscObject)F)->prefix, "CHOLMOD Options", "Mat");
69: CHOLMOD_OPTION_INT(nmethods, "Number of different ordering methods to try");
71: #if defined(PETSC_USE_SUITESPARSE_GPU)
72: c->useGPU = 1;
73: CHOLMOD_OPTION_INT(useGPU, "Use GPU for BLAS 1, otherwise 0");
74: CHOLMOD_OPTION_SIZE_T(maxGpuMemBytes, "Maximum memory to allocate on the GPU");
75: CHOLMOD_OPTION_DOUBLE(maxGpuMemFraction, "Fraction of available GPU memory to allocate");
76: #endif
78: /* CHOLMOD handles first-time packing and refactor-packing separately, but we usually want them to be the same. */
79: chol->pack = (PetscBool)c->final_pack;
80: PetscOptionsBool("-mat_cholmod_pack", "Pack factors after factorization [disable for frequent repeat factorization]", "None", chol->pack, &chol->pack, NULL);
81: c->final_pack = (int)chol->pack;
83: CHOLMOD_OPTION_DOUBLE(dbound, "Minimum absolute value of diagonal entries of D");
84: CHOLMOD_OPTION_DOUBLE(grow0, "Global growth ratio when factors are modified");
85: CHOLMOD_OPTION_DOUBLE(grow1, "Column growth ratio when factors are modified");
86: CHOLMOD_OPTION_SIZE_T(grow2, "Affine column growth constant when factors are modified");
87: CHOLMOD_OPTION_SIZE_T(maxrank, "Max rank of update, larger values are faster but use more memory [2,4,8]");
88: {
89: static const char *const list[] = {"SIMPLICIAL", "AUTO", "SUPERNODAL", "MatCholmodFactorType", "MAT_CHOLMOD_FACTOR_", 0};
90: PetscOptionsEnum("-mat_cholmod_factor", "Factorization method", "None", list, (PetscEnum)c->supernodal, (PetscEnum *)&c->supernodal, NULL);
91: }
92: if (c->supernodal) CHOLMOD_OPTION_DOUBLE(supernodal_switch, "flop/nnz_L threshold for switching to supernodal factorization");
93: CHOLMOD_OPTION_BOOL(final_asis, "Leave factors \"as is\"");
94: CHOLMOD_OPTION_BOOL(final_pack, "Pack the columns when finished (use FALSE if the factors will be updated later)");
95: if (!c->final_asis) {
96: CHOLMOD_OPTION_BOOL(final_super, "Leave supernodal factors instead of converting to simplicial");
97: CHOLMOD_OPTION_BOOL(final_ll, "Turn LDL' factorization into LL'");
98: CHOLMOD_OPTION_BOOL(final_monotonic, "Ensure columns are monotonic when done");
99: CHOLMOD_OPTION_BOOL(final_resymbol, "Remove numerically zero values resulting from relaxed supernodal amalgamation");
100: }
101: {
102: PetscReal tmp[] = {(PetscReal)c->zrelax[0], (PetscReal)c->zrelax[1], (PetscReal)c->zrelax[2]};
103: PetscInt n = 3;
104: PetscOptionsRealArray("-mat_cholmod_zrelax", "3 real supernodal relaxed amalgamation parameters", "None", tmp, &n, &flg);
106: if (flg)
107: while (n--) c->zrelax[n] = (double)tmp[n];
108: }
109: {
110: PetscInt n, tmp[] = {(PetscInt)c->nrelax[0], (PetscInt)c->nrelax[1], (PetscInt)c->nrelax[2]};
111: PetscOptionsIntArray("-mat_cholmod_nrelax", "3 size_t supernodal relaxed amalgamation parameters", "None", tmp, &n, &flg);
113: if (flg)
114: while (n--) c->nrelax[n] = (size_t)tmp[n];
115: }
116: CHOLMOD_OPTION_BOOL(prefer_upper, "Work with upper triangular form [faster when using fill-reducing ordering, slower in natural ordering]");
117: CHOLMOD_OPTION_BOOL(default_nesdis, "Use NESDIS instead of METIS for nested dissection");
118: CHOLMOD_OPTION_INT(print, "Verbosity level");
119: PetscOptionsEnd();
120: return 0;
121: }
123: PetscErrorCode CholmodStart(Mat F)
124: {
125: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
126: cholmod_common *c;
128: if (chol->common) return 0;
129: PetscMalloc1(1, &chol->common);
130: !cholmod_X_start(chol->common);
132: c = chol->common;
133: c->error_handler = CholmodErrorHandler;
134: return 0;
135: }
137: static PetscErrorCode MatWrapCholmod_seqsbaij(Mat A, PetscBool values, cholmod_sparse *C, PetscBool *aijalloc, PetscBool *valloc)
138: {
139: Mat_SeqSBAIJ *sbaij = (Mat_SeqSBAIJ *)A->data;
140: PetscBool vallocin = PETSC_FALSE;
142: PetscMemzero(C, sizeof(*C));
143: /* CHOLMOD uses column alignment, SBAIJ stores the upper factor, so we pass it on as a lower factor, swapping the meaning of row and column */
144: C->nrow = (size_t)A->cmap->n;
145: C->ncol = (size_t)A->rmap->n;
146: C->nzmax = (size_t)sbaij->maxnz;
147: C->p = sbaij->i;
148: C->i = sbaij->j;
149: if (values) {
150: #if defined(PETSC_USE_COMPLEX)
151: /* we need to pass CHOLMOD the conjugate matrix */
152: PetscScalar *v;
153: PetscInt i;
155: PetscMalloc1(sbaij->maxnz, &v);
156: for (i = 0; i < sbaij->maxnz; i++) v[i] = PetscConj(sbaij->a[i]);
157: C->x = v;
158: vallocin = PETSC_TRUE;
159: #else
160: C->x = sbaij->a;
161: #endif
162: }
163: C->stype = -1;
164: C->itype = CHOLMOD_INT_TYPE;
165: C->xtype = values ? CHOLMOD_SCALAR_TYPE : CHOLMOD_PATTERN;
166: C->dtype = CHOLMOD_DOUBLE;
167: C->sorted = 1;
168: C->packed = 1;
169: *aijalloc = PETSC_FALSE;
170: *valloc = vallocin;
171: return 0;
172: }
174: #define GET_ARRAY_READ 0
175: #define GET_ARRAY_WRITE 1
177: PetscErrorCode VecWrapCholmod(Vec X, PetscInt rw, cholmod_dense *Y)
178: {
179: PetscScalar *x;
180: PetscInt n;
182: PetscMemzero(Y, sizeof(*Y));
183: switch (rw) {
184: case GET_ARRAY_READ:
185: VecGetArrayRead(X, (const PetscScalar **)&x);
186: break;
187: case GET_ARRAY_WRITE:
188: VecGetArrayWrite(X, &x);
189: break;
190: default:
191: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
192: break;
193: }
194: VecGetSize(X, &n);
196: Y->x = x;
197: Y->nrow = n;
198: Y->ncol = 1;
199: Y->nzmax = n;
200: Y->d = n;
201: Y->xtype = CHOLMOD_SCALAR_TYPE;
202: Y->dtype = CHOLMOD_DOUBLE;
203: return 0;
204: }
206: PetscErrorCode VecUnWrapCholmod(Vec X, PetscInt rw, cholmod_dense *Y)
207: {
208: switch (rw) {
209: case GET_ARRAY_READ:
210: VecRestoreArrayRead(X, (const PetscScalar **)&Y->x);
211: break;
212: case GET_ARRAY_WRITE:
213: VecRestoreArrayWrite(X, (PetscScalar **)&Y->x);
214: break;
215: default:
216: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
217: break;
218: }
219: return 0;
220: }
222: PetscErrorCode MatDenseWrapCholmod(Mat X, PetscInt rw, cholmod_dense *Y)
223: {
224: PetscScalar *x;
225: PetscInt m, n, lda;
227: PetscMemzero(Y, sizeof(*Y));
228: switch (rw) {
229: case GET_ARRAY_READ:
230: MatDenseGetArrayRead(X, (const PetscScalar **)&x);
231: break;
232: case GET_ARRAY_WRITE:
233: MatDenseGetArrayWrite(X, &x);
234: break;
235: default:
236: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
237: break;
238: }
239: MatDenseGetLDA(X, &lda);
240: MatGetLocalSize(X, &m, &n);
242: Y->x = x;
243: Y->nrow = m;
244: Y->ncol = n;
245: Y->nzmax = lda * n;
246: Y->d = lda;
247: Y->xtype = CHOLMOD_SCALAR_TYPE;
248: Y->dtype = CHOLMOD_DOUBLE;
249: return 0;
250: }
252: PetscErrorCode MatDenseUnWrapCholmod(Mat X, PetscInt rw, cholmod_dense *Y)
253: {
254: switch (rw) {
255: case GET_ARRAY_READ:
256: MatDenseRestoreArrayRead(X, (const PetscScalar **)&Y->x);
257: break;
258: case GET_ARRAY_WRITE:
259: /* we don't have MatDenseRestoreArrayWrite */
260: MatDenseRestoreArray(X, (PetscScalar **)&Y->x);
261: break;
262: default:
263: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Case %" PetscInt_FMT " not handled", rw);
264: break;
265: }
266: return 0;
267: }
269: PETSC_INTERN PetscErrorCode MatDestroy_CHOLMOD(Mat F)
270: {
271: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
273: if (chol->spqrfact) !SuiteSparseQR_C_free(&chol->spqrfact, chol->common);
274: if (chol->factor) !cholmod_X_free_factor(&chol->factor, chol->common);
275: if (chol->common->itype == CHOLMOD_INT) {
276: !cholmod_finish(chol->common);
277: } else {
278: !cholmod_l_finish(chol->common);
279: }
280: PetscFree(chol->common);
281: PetscFree(chol->matrix);
282: PetscObjectComposeFunction((PetscObject)F, "MatFactorGetSolverType_C", NULL);
283: PetscObjectComposeFunction((PetscObject)F, "MatQRFactorSymbolic_C", NULL);
284: PetscObjectComposeFunction((PetscObject)F, "MatQRFactorNumeric_C", NULL);
285: PetscFree(F->data);
286: return 0;
287: }
289: static PetscErrorCode MatSolve_CHOLMOD(Mat, Vec, Vec);
290: static PetscErrorCode MatMatSolve_CHOLMOD(Mat, Mat, Mat);
292: /*static const char *const CholmodOrderingMethods[] = {"User","AMD","METIS","NESDIS(default)","Natural","NESDIS(small=20000)","NESDIS(small=4,no constrained)","NESDIS()"};*/
294: static PetscErrorCode MatView_Info_CHOLMOD(Mat F, PetscViewer viewer)
295: {
296: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
297: const cholmod_common *c = chol->common;
298: PetscInt i;
300: if (F->ops->solve != MatSolve_CHOLMOD) return 0;
301: PetscViewerASCIIPrintf(viewer, "CHOLMOD run parameters:\n");
302: PetscViewerASCIIPushTab(viewer);
303: PetscViewerASCIIPrintf(viewer, "Pack factors after symbolic factorization: %s\n", chol->pack ? "TRUE" : "FALSE");
304: PetscViewerASCIIPrintf(viewer, "Common.dbound %g (Smallest absolute value of diagonal entries of D)\n", c->dbound);
305: PetscViewerASCIIPrintf(viewer, "Common.grow0 %g\n", c->grow0);
306: PetscViewerASCIIPrintf(viewer, "Common.grow1 %g\n", c->grow1);
307: PetscViewerASCIIPrintf(viewer, "Common.grow2 %u\n", (unsigned)c->grow2);
308: PetscViewerASCIIPrintf(viewer, "Common.maxrank %u\n", (unsigned)c->maxrank);
309: PetscViewerASCIIPrintf(viewer, "Common.supernodal_switch %g\n", c->supernodal_switch);
310: PetscViewerASCIIPrintf(viewer, "Common.supernodal %d\n", c->supernodal);
311: PetscViewerASCIIPrintf(viewer, "Common.final_asis %d\n", c->final_asis);
312: PetscViewerASCIIPrintf(viewer, "Common.final_super %d\n", c->final_super);
313: PetscViewerASCIIPrintf(viewer, "Common.final_ll %d\n", c->final_ll);
314: PetscViewerASCIIPrintf(viewer, "Common.final_pack %d\n", c->final_pack);
315: PetscViewerASCIIPrintf(viewer, "Common.final_monotonic %d\n", c->final_monotonic);
316: PetscViewerASCIIPrintf(viewer, "Common.final_resymbol %d\n", c->final_resymbol);
317: PetscViewerASCIIPrintf(viewer, "Common.zrelax [%g,%g,%g]\n", c->zrelax[0], c->zrelax[1], c->zrelax[2]);
318: PetscViewerASCIIPrintf(viewer, "Common.nrelax [%u,%u,%u]\n", (unsigned)c->nrelax[0], (unsigned)c->nrelax[1], (unsigned)c->nrelax[2]);
319: PetscViewerASCIIPrintf(viewer, "Common.prefer_upper %d\n", c->prefer_upper);
320: PetscViewerASCIIPrintf(viewer, "Common.print %d\n", c->print);
321: for (i = 0; i < c->nmethods; i++) {
322: PetscViewerASCIIPrintf(viewer, "Ordering method %" PetscInt_FMT "%s:\n", i, i == c->selected ? " [SELECTED]" : "");
323: PetscViewerASCIIPrintf(viewer, " lnz %g, fl %g, prune_dense %g, prune_dense2 %g\n", c->method[i].lnz, c->method[i].fl, c->method[i].prune_dense, c->method[i].prune_dense2);
324: }
325: PetscViewerASCIIPrintf(viewer, "Common.postorder %d\n", c->postorder);
326: PetscViewerASCIIPrintf(viewer, "Common.default_nesdis %d (use NESDIS instead of METIS for nested dissection)\n", c->default_nesdis);
327: /* Statistics */
328: PetscViewerASCIIPrintf(viewer, "Common.fl %g (flop count from most recent analysis)\n", c->fl);
329: PetscViewerASCIIPrintf(viewer, "Common.lnz %g (fundamental nz in L)\n", c->lnz);
330: PetscViewerASCIIPrintf(viewer, "Common.anz %g\n", c->anz);
331: PetscViewerASCIIPrintf(viewer, "Common.modfl %g (flop count from most recent update)\n", c->modfl);
332: PetscViewerASCIIPrintf(viewer, "Common.malloc_count %g (number of live objects)\n", (double)c->malloc_count);
333: PetscViewerASCIIPrintf(viewer, "Common.memory_usage %g (peak memory usage in bytes)\n", (double)c->memory_usage);
334: PetscViewerASCIIPrintf(viewer, "Common.memory_inuse %g (current memory usage in bytes)\n", (double)c->memory_inuse);
335: PetscViewerASCIIPrintf(viewer, "Common.nrealloc_col %g (number of column reallocations)\n", c->nrealloc_col);
336: PetscViewerASCIIPrintf(viewer, "Common.nrealloc_factor %g (number of factor reallocations due to column reallocations)\n", c->nrealloc_factor);
337: PetscViewerASCIIPrintf(viewer, "Common.ndbounds_hit %g (number of times diagonal was modified by dbound)\n", c->ndbounds_hit);
338: PetscViewerASCIIPrintf(viewer, "Common.rowfacfl %g (number of flops in last call to cholmod_rowfac)\n", c->rowfacfl);
339: PetscViewerASCIIPrintf(viewer, "Common.aatfl %g (number of flops to compute A(:,f)*A(:,f)')\n", c->aatfl);
340: #if defined(PETSC_USE_SUITESPARSE_GPU)
341: PetscViewerASCIIPrintf(viewer, "Common.useGPU %d\n", c->useGPU);
342: #endif
343: PetscViewerASCIIPopTab(viewer);
344: return 0;
345: }
347: PETSC_INTERN PetscErrorCode MatView_CHOLMOD(Mat F, PetscViewer viewer)
348: {
349: PetscBool iascii;
350: PetscViewerFormat format;
352: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii);
353: if (iascii) {
354: PetscViewerGetFormat(viewer, &format);
355: if (format == PETSC_VIEWER_ASCII_INFO) MatView_Info_CHOLMOD(F, viewer);
356: }
357: return 0;
358: }
360: static PetscErrorCode MatSolve_CHOLMOD(Mat F, Vec B, Vec X)
361: {
362: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
363: cholmod_dense cholB, cholX, *X_handle, *Y_handle = NULL, *E_handle = NULL;
365: static_F = F;
366: VecWrapCholmod(B, GET_ARRAY_READ, &cholB);
367: VecWrapCholmod(X, GET_ARRAY_WRITE, &cholX);
368: X_handle = &cholX;
369: !cholmod_X_solve2(CHOLMOD_A, chol->factor, &cholB, NULL, &X_handle, NULL, &Y_handle, &E_handle, chol->common);
370: !cholmod_X_free_dense(&Y_handle, chol->common);
371: !cholmod_X_free_dense(&E_handle, chol->common);
372: VecUnWrapCholmod(B, GET_ARRAY_READ, &cholB);
373: VecUnWrapCholmod(X, GET_ARRAY_WRITE, &cholX);
374: PetscLogFlops(4.0 * chol->common->lnz);
375: return 0;
376: }
378: static PetscErrorCode MatMatSolve_CHOLMOD(Mat F, Mat B, Mat X)
379: {
380: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
381: cholmod_dense cholB, cholX, *X_handle, *Y_handle = NULL, *E_handle = NULL;
383: static_F = F;
384: MatDenseWrapCholmod(B, GET_ARRAY_READ, &cholB);
385: MatDenseWrapCholmod(X, GET_ARRAY_WRITE, &cholX);
386: X_handle = &cholX;
387: !cholmod_X_solve2(CHOLMOD_A, chol->factor, &cholB, NULL, &X_handle, NULL, &Y_handle, &E_handle, chol->common);
388: !cholmod_X_free_dense(&Y_handle, chol->common);
389: !cholmod_X_free_dense(&E_handle, chol->common);
390: MatDenseUnWrapCholmod(B, GET_ARRAY_READ, &cholB);
391: MatDenseUnWrapCholmod(X, GET_ARRAY_WRITE, &cholX);
392: PetscLogFlops(4.0 * B->cmap->n * chol->common->lnz);
393: return 0;
394: }
396: static PetscErrorCode MatCholeskyFactorNumeric_CHOLMOD(Mat F, Mat A, const MatFactorInfo *info)
397: {
398: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
399: cholmod_sparse cholA;
400: PetscBool aijalloc, valloc;
401: int err;
403: (*chol->Wrap)(A, PETSC_TRUE, &cholA, &aijalloc, &valloc);
404: static_F = F;
405: err = !cholmod_X_factorize(&cholA, chol->factor, chol->common);
409: PetscLogFlops(chol->common->fl);
410: if (aijalloc) PetscFree2(cholA.p, cholA.i);
411: if (valloc) PetscFree(cholA.x);
412: #if defined(PETSC_USE_SUITESPARSE_GPU)
413: PetscLogGpuTimeAdd(chol->common->CHOLMOD_GPU_GEMM_TIME + chol->common->CHOLMOD_GPU_SYRK_TIME + chol->common->CHOLMOD_GPU_TRSM_TIME + chol->common->CHOLMOD_GPU_POTRF_TIME);
414: #endif
416: F->ops->solve = MatSolve_CHOLMOD;
417: F->ops->solvetranspose = MatSolve_CHOLMOD;
418: F->ops->matsolve = MatMatSolve_CHOLMOD;
419: F->ops->matsolvetranspose = MatMatSolve_CHOLMOD;
420: return 0;
421: }
423: PETSC_INTERN PetscErrorCode MatCholeskyFactorSymbolic_CHOLMOD(Mat F, Mat A, IS perm, const MatFactorInfo *info)
424: {
425: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
426: int err;
427: cholmod_sparse cholA;
428: PetscBool aijalloc, valloc;
429: PetscInt *fset = 0;
430: size_t fsize = 0;
432: /* Set options to F */
433: CholmodSetOptions(F);
435: (*chol->Wrap)(A, PETSC_FALSE, &cholA, &aijalloc, &valloc);
436: static_F = F;
437: if (chol->factor) {
438: err = !cholmod_X_resymbol(&cholA, fset, fsize, (int)chol->pack, chol->factor, chol->common);
440: } else if (perm) {
441: const PetscInt *ip;
442: ISGetIndices(perm, &ip);
443: chol->factor = cholmod_X_analyze_p(&cholA, (PetscInt *)ip, fset, fsize, chol->common);
445: ISRestoreIndices(perm, &ip);
446: } else {
447: chol->factor = cholmod_X_analyze(&cholA, chol->common);
449: }
451: if (aijalloc) PetscFree2(cholA.p, cholA.i);
452: if (valloc) PetscFree(cholA.x);
454: F->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_CHOLMOD;
455: return 0;
456: }
458: static PetscErrorCode MatFactorGetSolverType_seqsbaij_cholmod(Mat A, MatSolverType *type)
459: {
460: *type = MATSOLVERCHOLMOD;
461: return 0;
462: }
464: PETSC_INTERN PetscErrorCode MatGetInfo_CHOLMOD(Mat F, MatInfoType flag, MatInfo *info)
465: {
466: Mat_CHOLMOD *chol = (Mat_CHOLMOD *)F->data;
468: info->block_size = 1.0;
469: info->nz_allocated = chol->common->lnz;
470: info->nz_used = chol->common->lnz;
471: info->nz_unneeded = 0.0;
472: info->assemblies = 0.0;
473: info->mallocs = 0.0;
474: info->memory = chol->common->memory_inuse;
475: info->fill_ratio_given = 0;
476: info->fill_ratio_needed = 0;
477: info->factor_mallocs = chol->common->malloc_count;
478: return 0;
479: }
481: /*MC
482: MATSOLVERCHOLMOD
484: A matrix type providing direct solvers (Cholesky) for sequential matrices
485: via the external package CHOLMOD.
487: Use ./configure --download-suitesparse to install PETSc to use CHOLMOD
489: Use -pc_type cholesky -pc_factor_mat_solver_type cholmod to use this direct solver
491: Consult CHOLMOD documentation for more information about the Common parameters
492: which correspond to the options database keys below.
494: Options Database Keys:
495: + -mat_cholmod_dbound <0> - Minimum absolute value of diagonal entries of D (None)
496: . -mat_cholmod_grow0 <1.2> - Global growth ratio when factors are modified (None)
497: . -mat_cholmod_grow1 <1.2> - Column growth ratio when factors are modified (None)
498: . -mat_cholmod_grow2 <5> - Affine column growth constant when factors are modified (None)
499: . -mat_cholmod_maxrank <8> - Max rank of update, larger values are faster but use more memory [2,4,8] (None)
500: . -mat_cholmod_factor <AUTO> - (choose one of) SIMPLICIAL AUTO SUPERNODAL
501: . -mat_cholmod_supernodal_switch <40> - flop/nnz_L threshold for switching to supernodal factorization (None)
502: . -mat_cholmod_final_asis <TRUE> - Leave factors "as is" (None)
503: . -mat_cholmod_final_pack <TRUE> - Pack the columns when finished (use FALSE if the factors will be updated later) (None)
504: . -mat_cholmod_zrelax <0.8> - 3 real supernodal relaxed amalgamation parameters (None)
505: . -mat_cholmod_nrelax <4> - 3 size_t supernodal relaxed amalgamation parameters (None)
506: . -mat_cholmod_prefer_upper <TRUE> - Work with upper triangular form (faster when using fill-reducing ordering, slower in natural ordering) (None)
507: . -mat_cholmod_print <3> - Verbosity level (None)
508: - -mat_ordering_type internal - Use the ordering provided by Cholmod
510: Level: beginner
512: Note: CHOLMOD is part of SuiteSparse http://faculty.cse.tamu.edu/davis/suitesparse.html
514: .seealso: `PCCHOLESKY`, `PCFactorSetMatSolverType()`, `MatSolverType`
515: M*/
517: PETSC_INTERN PetscErrorCode MatGetFactor_seqsbaij_cholmod(Mat A, MatFactorType ftype, Mat *F)
518: {
519: Mat B;
520: Mat_CHOLMOD *chol;
521: PetscInt m = A->rmap->n, n = A->cmap->n, bs;
523: MatGetBlockSize(A, &bs);
525: #if defined(PETSC_USE_COMPLEX)
527: #endif
528: /* Create the factorization matrix F */
529: MatCreate(PetscObjectComm((PetscObject)A), &B);
530: MatSetSizes(B, PETSC_DECIDE, PETSC_DECIDE, m, n);
531: PetscStrallocpy("cholmod", &((PetscObject)B)->type_name);
532: MatSetUp(B);
533: PetscNew(&chol);
535: chol->Wrap = MatWrapCholmod_seqsbaij;
536: B->data = chol;
538: B->ops->getinfo = MatGetInfo_CHOLMOD;
539: B->ops->view = MatView_CHOLMOD;
540: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_CHOLMOD;
541: B->ops->destroy = MatDestroy_CHOLMOD;
542: PetscObjectComposeFunction((PetscObject)B, "MatFactorGetSolverType_C", MatFactorGetSolverType_seqsbaij_cholmod);
543: B->factortype = MAT_FACTOR_CHOLESKY;
544: B->assembled = PETSC_TRUE;
545: B->preallocated = PETSC_TRUE;
547: CholmodStart(B);
549: PetscFree(B->solvertype);
550: PetscStrallocpy(MATSOLVERCHOLMOD, &B->solvertype);
551: B->canuseordering = PETSC_TRUE;
552: PetscStrallocpy(MATORDERINGEXTERNAL, (char **)&B->preferredordering[MAT_FACTOR_CHOLESKY]);
553: *F = B;
554: return 0;
555: }