Actual source code: pod.c
1: #include <petsc/private/kspimpl.h>
2: #include <petsc/private/matimpl.h>
3: #include <petscblaslapack.h>
4: static PetscBool cited = PETSC_FALSE;
5: static const char citation[] = "@phdthesis{zampini2010non,\n"
6: " title={Non-overlapping Domain Decomposition Methods for Cardiac Reaction-Diffusion Models and Applications},\n"
7: " author={Zampini, S},\n"
8: " year={2010},\n"
9: " school={PhD thesis, Universita degli Studi di Milano}\n"
10: "}\n";
12: typedef struct {
13: PetscInt maxn; /* maximum number of snapshots */
14: PetscInt n; /* number of active snapshots */
15: PetscInt curr; /* current tip of snapshots set */
16: Vec *xsnap; /* snapshots */
17: Vec *bsnap; /* rhs snapshots */
18: Vec *work; /* parallel work vectors */
19: PetscScalar *dots_iallreduce;
20: MPI_Request req_iallreduce;
21: PetscInt ndots_iallreduce; /* if we have iallreduce we can hide the VecMDot communications */
22: PetscReal tol; /* relative tolerance to retain eigenvalues */
23: PetscBool Aspd; /* if true, uses the SPD operator as inner product */
24: PetscScalar *corr; /* correlation matrix */
25: PetscReal *eigs; /* eigenvalues */
26: PetscScalar *eigv; /* eigenvectors */
27: PetscBLASInt nen; /* dimension of lower dimensional system */
28: PetscInt st; /* first eigenvector of correlation matrix to be retained */
29: PetscBLASInt *iwork; /* integer work vector */
30: PetscScalar *yhay; /* Y^H * A * Y */
31: PetscScalar *low; /* lower dimensional linear system */
32: #if defined(PETSC_USE_COMPLEX)
33: PetscReal *rwork;
34: #endif
35: PetscBLASInt lwork;
36: PetscScalar *swork;
37: PetscBool monitor;
38: } KSPGuessPOD;
40: static PetscErrorCode KSPGuessReset_POD(KSPGuess guess)
41: {
42: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
43: PetscLayout Alay = NULL, vlay = NULL;
44: PetscBool cong;
46: pod->nen = 0;
47: pod->n = 0;
48: pod->curr = 0;
49: /* need to wait for completion of outstanding requests */
50: if (pod->ndots_iallreduce) MPI_Wait(&pod->req_iallreduce, MPI_STATUS_IGNORE);
51: pod->ndots_iallreduce = 0;
52: /* destroy vectors if the size of the linear system has changed */
53: if (guess->A) MatGetLayouts(guess->A, &Alay, NULL);
54: if (pod->xsnap) VecGetLayout(pod->xsnap[0], &vlay);
55: cong = PETSC_FALSE;
56: if (vlay && Alay) PetscLayoutCompare(Alay, vlay, &cong);
57: if (!cong) {
58: VecDestroyVecs(pod->maxn, &pod->xsnap);
59: VecDestroyVecs(pod->maxn, &pod->bsnap);
60: VecDestroyVecs(1, &pod->work);
61: }
62: return 0;
63: }
65: static PetscErrorCode KSPGuessSetUp_POD(KSPGuess guess)
66: {
67: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
69: if (!pod->corr) {
70: PetscScalar sdummy;
71: PetscReal rdummy = 0;
72: PetscBLASInt bN, lierr, idummy;
74: PetscCalloc6(pod->maxn * pod->maxn, &pod->corr, pod->maxn, &pod->eigs, pod->maxn * pod->maxn, &pod->eigv, 6 * pod->maxn, &pod->iwork, pod->maxn * pod->maxn, &pod->yhay, pod->maxn * pod->maxn, &pod->low);
75: #if defined(PETSC_USE_COMPLEX)
76: PetscMalloc1(7 * pod->maxn, &pod->rwork);
77: #endif
78: #if defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
79: PetscMalloc1(3 * pod->maxn, &pod->dots_iallreduce);
80: #endif
81: pod->lwork = -1;
82: PetscBLASIntCast(pod->maxn, &bN);
83: #if !defined(PETSC_USE_COMPLEX)
84: PetscCallBLAS("LAPACKsyevx", LAPACKsyevx_("V", "A", "L", &bN, pod->corr, &bN, &rdummy, &rdummy, &idummy, &idummy, &rdummy, &idummy, pod->eigs, pod->eigv, &bN, &sdummy, &pod->lwork, pod->iwork, pod->iwork + 5 * bN, &lierr));
85: #else
86: PetscCallBLAS("LAPACKsyevx", LAPACKsyevx_("V", "A", "L", &bN, pod->corr, &bN, &rdummy, &rdummy, &idummy, &idummy, &rdummy, &idummy, pod->eigs, pod->eigv, &bN, &sdummy, &pod->lwork, pod->rwork, pod->iwork, pod->iwork + 5 * bN, &lierr));
87: #endif
89: PetscBLASIntCast((PetscInt)PetscRealPart(sdummy), &pod->lwork);
90: PetscMalloc1(pod->lwork + PetscMax(bN * bN, 6 * bN), &pod->swork);
91: }
92: /* work vectors are sequential, we explicitly use MPI_Allreduce */
93: if (!pod->xsnap) {
94: Vec *v, vseq;
96: KSPCreateVecs(guess->ksp, 1, &v, 0, NULL);
97: VecCreateLocalVector(v[0], &vseq);
98: VecDestroyVecs(1, &v);
99: VecDuplicateVecs(vseq, pod->maxn, &pod->xsnap);
100: VecDestroy(&vseq);
101: }
102: if (!pod->bsnap) {
103: Vec *v, vseq;
105: KSPCreateVecs(guess->ksp, 0, NULL, 1, &v);
106: VecCreateLocalVector(v[0], &vseq);
107: VecDestroyVecs(1, &v);
108: VecDuplicateVecs(vseq, pod->maxn, &pod->bsnap);
109: VecDestroy(&vseq);
110: }
111: if (!pod->work) KSPCreateVecs(guess->ksp, 1, &pod->work, 0, NULL);
112: return 0;
113: }
115: static PetscErrorCode KSPGuessDestroy_POD(KSPGuess guess)
116: {
117: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
119: PetscFree6(pod->corr, pod->eigs, pod->eigv, pod->iwork, pod->yhay, pod->low);
120: #if defined(PETSC_USE_COMPLEX)
121: PetscFree(pod->rwork);
122: #endif
123: /* need to wait for completion before destroying dots_iallreduce */
124: if (pod->ndots_iallreduce) MPI_Wait(&pod->req_iallreduce, MPI_STATUS_IGNORE);
125: PetscFree(pod->dots_iallreduce);
126: PetscFree(pod->swork);
127: VecDestroyVecs(pod->maxn, &pod->bsnap);
128: VecDestroyVecs(pod->maxn, &pod->xsnap);
129: VecDestroyVecs(1, &pod->work);
130: PetscFree(pod);
131: return 0;
132: }
134: static PetscErrorCode KSPGuessUpdate_POD(KSPGuess, Vec, Vec);
136: static PetscErrorCode KSPGuessFormGuess_POD(KSPGuess guess, Vec b, Vec x)
137: {
138: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
139: PetscScalar one = 1, zero = 0;
140: PetscBLASInt bN, ione = 1, bNen, lierr;
141: PetscInt i;
143: PetscCitationsRegister(citation, &cited);
144: if (pod->ndots_iallreduce) { /* complete communication and project the linear system */
145: KSPGuessUpdate_POD(guess, NULL, NULL);
146: }
147: if (!pod->nen) return 0;
148: /* b_low = S * V^T * X^T * b */
149: VecGetLocalVectorRead(b, pod->bsnap[pod->curr]);
150: VecMDot(pod->bsnap[pod->curr], pod->n, pod->xsnap, pod->swork);
151: VecRestoreLocalVectorRead(b, pod->bsnap[pod->curr]);
152: MPIU_Allreduce(pod->swork, pod->swork + pod->n, pod->n, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess));
153: PetscBLASIntCast(pod->n, &bN);
154: PetscBLASIntCast(pod->nen, &bNen);
155: PetscCallBLAS("BLASgemv", BLASgemv_("T", &bN, &bNen, &one, pod->eigv + pod->st * pod->n, &bN, pod->swork + pod->n, &ione, &zero, pod->swork, &ione));
156: if (pod->monitor) {
157: PetscPrintf(PetscObjectComm((PetscObject)guess), " KSPGuessPOD alphas = ");
158: for (i = 0; i < pod->nen; i++) {
159: #if defined(PETSC_USE_COMPLEX)
160: PetscPrintf(PetscObjectComm((PetscObject)guess), "%g + %g i", (double)PetscRealPart(pod->swork[i]), (double)PetscImaginaryPart(pod->swork[i]));
161: #else
162: PetscPrintf(PetscObjectComm((PetscObject)guess), "%g ", (double)pod->swork[i]);
163: #endif
164: }
165: PetscPrintf(PetscObjectComm((PetscObject)guess), "\n");
166: }
167: /* A_low x_low = b_low */
168: if (!pod->Aspd) { /* A is spd -> LOW = Identity */
169: KSP pksp = guess->ksp;
170: PetscBool tsolve, symm, set;
172: if (pod->monitor) {
173: PetscMPIInt rank;
174: Mat L;
176: MPI_Comm_rank(PetscObjectComm((PetscObject)guess), &rank);
177: if (rank == 0) {
178: PetscPrintf(PetscObjectComm((PetscObject)guess), " L = ");
179: MatCreateSeqDense(PETSC_COMM_SELF, pod->nen, pod->nen, pod->low, &L);
180: MatView(L, NULL);
181: MatDestroy(&L);
182: }
183: }
184: MatIsSymmetricKnown(guess->A, &set, &symm);
185: tsolve = (set && symm) ? PETSC_FALSE : pksp->transpose_solve;
186: PetscCallBLAS("LAPACKgetrf", LAPACKgetrf_(&bNen, &bNen, pod->low, &bNen, pod->iwork, &lierr));
188: PetscCallBLAS("LAPACKgetrs", LAPACKgetrs_(tsolve ? "T" : "N", &bNen, &ione, pod->low, &bNen, pod->iwork, pod->swork, &bNen, &lierr));
190: }
191: /* x = X * V * S * x_low */
192: PetscCallBLAS("BLASgemv", BLASgemv_("N", &bN, &bNen, &one, pod->eigv + pod->st * pod->n, &bN, pod->swork, &ione, &zero, pod->swork + pod->n, &ione));
193: if (pod->monitor) {
194: PetscPrintf(PetscObjectComm((PetscObject)guess), " KSPGuessPOD sol = ");
195: for (i = 0; i < pod->nen; i++) {
196: #if defined(PETSC_USE_COMPLEX)
197: PetscPrintf(PetscObjectComm((PetscObject)guess), "%g + %g i", (double)PetscRealPart(pod->swork[i + pod->n]), (double)PetscImaginaryPart(pod->swork[i + pod->n]));
198: #else
199: PetscPrintf(PetscObjectComm((PetscObject)guess), "%g ", (double)pod->swork[i + pod->n]);
200: #endif
201: }
202: PetscPrintf(PetscObjectComm((PetscObject)guess), "\n");
203: }
204: VecGetLocalVector(x, pod->bsnap[pod->curr]);
205: VecSet(pod->bsnap[pod->curr], 0);
206: VecMAXPY(pod->bsnap[pod->curr], pod->n, pod->swork + pod->n, pod->xsnap);
207: VecRestoreLocalVector(x, pod->bsnap[pod->curr]);
208: return 0;
209: }
211: static PetscErrorCode KSPGuessUpdate_POD(KSPGuess guess, Vec b, Vec x)
212: {
213: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
214: PetscScalar one = 1, zero = 0;
215: PetscReal toten, parten, reps = 0; /* dlamch? */
216: PetscBLASInt bN, lierr, idummy;
217: PetscInt i;
219: if (pod->ndots_iallreduce) goto complete_request;
220: pod->n = pod->n < pod->maxn ? pod->n + 1 : pod->maxn;
221: VecCopy(x, pod->xsnap[pod->curr]);
222: KSP_MatMult(guess->ksp, guess->A, x, pod->work[0]);
223: VecCopy(pod->work[0], pod->bsnap[pod->curr]);
224: if (pod->Aspd) {
225: VecMDot(pod->xsnap[pod->curr], pod->n, pod->bsnap, pod->swork);
226: #if !defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
227: MPIU_Allreduce(pod->swork, pod->swork + 3 * pod->n, pod->n, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess));
228: #else
229: MPI_Iallreduce(pod->swork, pod->dots_iallreduce, pod->n, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess), &pod->req_iallreduce);
230: pod->ndots_iallreduce = 1;
231: #endif
232: } else {
233: PetscInt off;
234: PetscBool set, herm;
236: #if defined(PETSC_USE_COMPLEX)
237: MatIsHermitianKnown(guess->A, &set, &herm);
238: #else
239: MatIsSymmetricKnown(guess->A, &set, &herm);
240: #endif
241: off = (guess->ksp->transpose_solve && (!set || !herm)) ? 2 * pod->n : pod->n;
243: /* TODO: we may want to use a user-defined dot for the correlation matrix */
244: VecMDot(pod->xsnap[pod->curr], pod->n, pod->xsnap, pod->swork);
245: VecMDot(pod->bsnap[pod->curr], pod->n, pod->xsnap, pod->swork + off);
246: if (!set || !herm) {
247: off = (off == pod->n) ? 2 * pod->n : pod->n;
248: VecMDot(pod->xsnap[pod->curr], pod->n, pod->bsnap, pod->swork + off);
249: #if !defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
250: MPIU_Allreduce(pod->swork, pod->swork + 3 * pod->n, 3 * pod->n, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess));
251: #else
252: MPI_Iallreduce(pod->swork, pod->dots_iallreduce, 3 * pod->n, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess), &pod->req_iallreduce);
253: pod->ndots_iallreduce = 3;
254: #endif
255: } else {
256: #if !defined(PETSC_HAVE_MPI_NONBLOCKING_COLLECTIVES)
257: MPIU_Allreduce(pod->swork, pod->swork + 3 * pod->n, 2 * pod->n, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess));
258: for (i = 0; i < pod->n; i++) pod->swork[5 * pod->n + i] = pod->swork[4 * pod->n + i];
259: #else
260: MPI_Iallreduce(pod->swork, pod->dots_iallreduce, 2 * pod->n, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess), &pod->req_iallreduce);
261: pod->ndots_iallreduce = 2;
262: #endif
263: }
264: }
265: if (pod->ndots_iallreduce) return 0;
267: complete_request:
268: if (pod->ndots_iallreduce) {
269: MPI_Wait(&pod->req_iallreduce, MPI_STATUS_IGNORE);
270: switch (pod->ndots_iallreduce) {
271: case 3:
272: for (i = 0; i < pod->n; i++) pod->swork[3 * pod->n + i] = pod->dots_iallreduce[i];
273: for (i = 0; i < pod->n; i++) pod->swork[4 * pod->n + i] = pod->dots_iallreduce[pod->n + i];
274: for (i = 0; i < pod->n; i++) pod->swork[5 * pod->n + i] = pod->dots_iallreduce[2 * pod->n + i];
275: break;
276: case 2:
277: for (i = 0; i < pod->n; i++) pod->swork[3 * pod->n + i] = pod->dots_iallreduce[i];
278: for (i = 0; i < pod->n; i++) pod->swork[4 * pod->n + i] = pod->dots_iallreduce[pod->n + i];
279: for (i = 0; i < pod->n; i++) pod->swork[5 * pod->n + i] = pod->dots_iallreduce[pod->n + i];
280: break;
281: case 1:
282: for (i = 0; i < pod->n; i++) pod->swork[3 * pod->n + i] = pod->dots_iallreduce[i];
283: break;
284: default:
285: SETERRQ(PetscObjectComm((PetscObject)guess), PETSC_ERR_PLIB, "Invalid number of outstanding dots operations: %" PetscInt_FMT, pod->ndots_iallreduce);
286: }
287: }
288: pod->ndots_iallreduce = 0;
290: /* correlation matrix and Y^H A Y (Galerkin) */
291: for (i = 0; i < pod->n; i++) {
292: pod->corr[pod->curr * pod->maxn + i] = pod->swork[3 * pod->n + i];
293: pod->corr[i * pod->maxn + pod->curr] = PetscConj(pod->swork[3 * pod->n + i]);
294: if (!pod->Aspd) {
295: pod->yhay[pod->curr * pod->maxn + i] = pod->swork[4 * pod->n + i];
296: pod->yhay[i * pod->maxn + pod->curr] = PetscConj(pod->swork[5 * pod->n + i]);
297: }
298: }
299: /* syevx changes the input matrix */
300: for (i = 0; i < pod->n; i++) {
301: PetscInt j;
302: for (j = i; j < pod->n; j++) pod->swork[i * pod->n + j] = pod->corr[i * pod->maxn + j];
303: }
304: PetscBLASIntCast(pod->n, &bN);
305: #if !defined(PETSC_USE_COMPLEX)
306: PetscCallBLAS("LAPACKsyevx", LAPACKsyevx_("V", "A", "L", &bN, pod->swork, &bN, &reps, &reps, &idummy, &idummy, &reps, &idummy, pod->eigs, pod->eigv, &bN, pod->swork + bN * bN, &pod->lwork, pod->iwork, pod->iwork + 5 * bN, &lierr));
307: #else
308: PetscCallBLAS("LAPACKsyevx", LAPACKsyevx_("V", "A", "L", &bN, pod->swork, &bN, &reps, &reps, &idummy, &idummy, &reps, &idummy, pod->eigs, pod->eigv, &bN, pod->swork + bN * bN, &pod->lwork, pod->rwork, pod->iwork, pod->iwork + 5 * bN, &lierr));
309: #endif
313: /* dimension of lower dimensional system */
314: pod->st = -1;
315: for (i = 0, toten = 0; i < pod->n; i++) {
316: pod->eigs[i] = PetscMax(pod->eigs[i], 0.0);
317: toten += pod->eigs[i];
318: if (!pod->eigs[i]) pod->st = i;
319: }
320: pod->nen = 0;
321: for (i = pod->n - 1, parten = 0; i > pod->st && toten > 0; i--) {
322: pod->nen++;
323: parten += pod->eigs[i];
324: if (parten + toten * pod->tol >= toten) break;
325: }
326: pod->st = pod->n - pod->nen;
328: /* Compute eigv = V * S */
329: for (i = pod->st; i < pod->n; i++) {
330: const PetscReal v = 1.0 / PetscSqrtReal(pod->eigs[i]);
331: const PetscInt st = pod->n * i;
332: PetscInt j;
334: for (j = 0; j < pod->n; j++) pod->eigv[st + j] *= v;
335: }
337: /* compute S * V^T * X^T * A * X * V * S if needed */
338: if (pod->nen && !pod->Aspd) {
339: PetscBLASInt bNen, bMaxN;
340: PetscInt st = pod->st * pod->n;
341: PetscBLASIntCast(pod->nen, &bNen);
342: PetscBLASIntCast(pod->maxn, &bMaxN);
343: PetscCallBLAS("BLASgemm", BLASgemm_("T", "N", &bNen, &bN, &bN, &one, pod->eigv + st, &bN, pod->yhay, &bMaxN, &zero, pod->swork, &bNen));
344: PetscCallBLAS("BLASgemm", BLASgemm_("N", "N", &bNen, &bNen, &bN, &one, pod->swork, &bNen, pod->eigv + st, &bN, &zero, pod->low, &bNen));
345: }
347: if (pod->monitor) {
348: PetscMPIInt rank;
349: Mat C;
351: MPI_Comm_rank(PetscObjectComm((PetscObject)guess), &rank);
352: if (rank == 0) {
353: PetscPrintf(PetscObjectComm((PetscObject)guess), " C = ");
354: MatCreateSeqDense(PETSC_COMM_SELF, pod->n, pod->n, pod->corr, &C);
355: MatDenseSetLDA(C, pod->maxn);
356: MatView(C, NULL);
357: MatDestroy(&C);
358: PetscPrintf(PetscObjectComm((PetscObject)guess), " YHAY = ");
359: MatCreateSeqDense(PETSC_COMM_SELF, pod->n, pod->n, pod->yhay, &C);
360: MatDenseSetLDA(C, pod->maxn);
361: MatView(C, NULL);
362: MatDestroy(&C);
363: }
364: PetscPrintf(PetscObjectComm((PetscObject)guess), " KSPGuessPOD: basis %" PetscBLASInt_FMT ", energy fractions = ", pod->nen);
365: for (i = pod->n - 1; i >= 0; i--) PetscPrintf(PetscObjectComm((PetscObject)guess), "%1.6e (%d) ", (double)(pod->eigs[i] / toten), i >= pod->st ? 1 : 0);
366: PetscPrintf(PetscObjectComm((PetscObject)guess), "\n");
367: if (PetscDefined(USE_DEBUG)) {
368: for (i = 0; i < pod->n; i++) {
369: Vec v;
370: PetscInt j;
371: PetscBLASInt bNen, ione = 1;
373: VecDuplicate(pod->xsnap[i], &v);
374: VecCopy(pod->xsnap[i], v);
375: PetscBLASIntCast(pod->nen, &bNen);
376: PetscCallBLAS("BLASgemv", BLASgemv_("T", &bN, &bNen, &one, pod->eigv + pod->st * pod->n, &bN, pod->corr + pod->maxn * i, &ione, &zero, pod->swork, &ione));
377: PetscCallBLAS("BLASgemv", BLASgemv_("N", &bN, &bNen, &one, pod->eigv + pod->st * pod->n, &bN, pod->swork, &ione, &zero, pod->swork + pod->n, &ione));
378: for (j = 0; j < pod->n; j++) pod->swork[j] = -pod->swork[pod->n + j];
379: VecMAXPY(v, pod->n, pod->swork, pod->xsnap);
380: VecDot(v, v, pod->swork);
381: MPIU_Allreduce(pod->swork, pod->swork + 1, 1, MPIU_SCALAR, MPIU_SUM, PetscObjectComm((PetscObject)guess));
382: PetscPrintf(PetscObjectComm((PetscObject)guess), " Error projection %" PetscInt_FMT ": %g (expected lower than %g)\n", i, (double)PetscRealPart(pod->swork[1]), (double)(toten - parten));
383: VecDestroy(&v);
384: }
385: }
386: }
387: /* new tip */
388: pod->curr = (pod->curr + 1) % pod->maxn;
389: return 0;
390: }
392: static PetscErrorCode KSPGuessSetFromOptions_POD(KSPGuess guess)
393: {
394: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
396: PetscOptionsBegin(PetscObjectComm((PetscObject)guess), ((PetscObject)guess)->prefix, "POD initial guess options", "KSPGuess");
397: PetscOptionsInt("-ksp_guess_pod_size", "Number of snapshots", NULL, pod->maxn, &pod->maxn, NULL);
398: PetscOptionsBool("-ksp_guess_pod_monitor", "Monitor initial guess generator", NULL, pod->monitor, &pod->monitor, NULL);
399: PetscOptionsReal("-ksp_guess_pod_tol", "Tolerance to retain eigenvectors", "KSPGuessSetTolerance", pod->tol, &pod->tol, NULL);
400: PetscOptionsBool("-ksp_guess_pod_Ainner", "Use the operator as inner product (must be SPD)", NULL, pod->Aspd, &pod->Aspd, NULL);
401: PetscOptionsEnd();
402: return 0;
403: }
405: static PetscErrorCode KSPGuessSetTolerance_POD(KSPGuess guess, PetscReal tol)
406: {
407: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
409: pod->tol = tol;
410: return 0;
411: }
413: static PetscErrorCode KSPGuessView_POD(KSPGuess guess, PetscViewer viewer)
414: {
415: KSPGuessPOD *pod = (KSPGuessPOD *)guess->data;
416: PetscBool isascii;
418: PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii);
419: if (isascii) PetscViewerASCIIPrintf(viewer, "Max size %" PetscInt_FMT ", tolerance %g, Ainner %d\n", pod->maxn, (double)pod->tol, pod->Aspd);
420: return 0;
421: }
423: /*
424: KSPGUESSPOD - Implements a proper orthogonal decomposition based Galerkin scheme for repeated linear system solves.
426: The initial guess is obtained by solving a small and dense linear system, obtained by Galerkin projection on a lower dimensional space generated by the previous solutions.
427: The number of solutions to be retained and the energy tolerance to construct the lower dimensional basis can be specified at command line by -ksp_guess_pod_tol <real> and -ksp_guess_pod_size <int>.
429: References:
430: . * - http://www.math.uni-konstanz.de/numerik/personen/volkwein/teaching/POD-Book.pdf
432: Level: intermediate
434: .seealso: [](chapter_ksp), `KSPGuess`, `KSPGuessType`, `KSPGuessCreate()`, `KSPSetGuess()`, `KSPGetGuess()`
435: @*/
436: PetscErrorCode KSPGuessCreate_POD(KSPGuess guess)
437: {
438: KSPGuessPOD *pod;
440: PetscNew(&pod);
441: pod->maxn = 10;
442: pod->tol = PETSC_MACHINE_EPSILON;
443: guess->data = pod;
445: guess->ops->setfromoptions = KSPGuessSetFromOptions_POD;
446: guess->ops->destroy = KSPGuessDestroy_POD;
447: guess->ops->settolerance = KSPGuessSetTolerance_POD;
448: guess->ops->setup = KSPGuessSetUp_POD;
449: guess->ops->view = KSPGuessView_POD;
450: guess->ops->reset = KSPGuessReset_POD;
451: guess->ops->update = KSPGuessUpdate_POD;
452: guess->ops->formguess = KSPGuessFormGuess_POD;
453: return 0;
454: }