Actual source code: xyt.c


  2: /*
  3: Module Name: xyt
  4: Module Info:

  6: author:  Henry M. Tufo III
  7: e-mail:  hmt@asci.uchicago.edu
  8: contact:
  9: +--------------------------------+--------------------------------+
 10: |MCS Division - Building 221     |Department of Computer Science  |
 11: |Argonne National Laboratory     |Ryerson 152                     |
 12: |9700 S. Cass Avenue             |The University of Chicago       |
 13: |Argonne, IL  60439              |Chicago, IL  60637              |
 14: |(630) 252-5354/5986 ph/fx       |(773) 702-6019/8487 ph/fx       |
 15: +--------------------------------+--------------------------------+

 17: Last Modification: 3.20.01
 18: */
 19: #include <../src/ksp/pc/impls/tfs/tfs.h>

 21: #define LEFT  -1
 22: #define RIGHT 1
 23: #define BOTH  0

 25: typedef struct xyt_solver_info {
 26:   PetscInt      n, m, n_global, m_global;
 27:   PetscInt      nnz, max_nnz, msg_buf_sz;
 28:   PetscInt     *nsep, *lnsep, *fo, nfo, *stages;
 29:   PetscInt     *xcol_sz, *xcol_indices;
 30:   PetscScalar **xcol_vals, *x, *solve_uu, *solve_w;
 31:   PetscInt     *ycol_sz, *ycol_indices;
 32:   PetscScalar **ycol_vals, *y;
 33:   PetscInt      nsolves;
 34:   PetscScalar   tot_solve_time;
 35: } xyt_info;

 37: typedef struct matvec_info {
 38:   PetscInt     n, m, n_global, m_global;
 39:   PetscInt    *local2global;
 40:   PCTFS_gs_ADT PCTFS_gs_handle;
 41:   PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
 42:   void *grid_data;
 43: } mv_info;

 45: struct xyt_CDT {
 46:   PetscInt  id;
 47:   PetscInt  ns;
 48:   PetscInt  level;
 49:   xyt_info *info;
 50:   mv_info  *mvi;
 51: };

 53: static PetscInt n_xyt         = 0;
 54: static PetscInt n_xyt_handles = 0;

 56: /* prototypes */
 57: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *rhs);
 58: static PetscErrorCode check_handle(xyt_ADT xyt_handle);
 59: static PetscErrorCode det_separators(xyt_ADT xyt_handle);
 60: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
 61: static PetscErrorCode xyt_generate(xyt_ADT xyt_handle);
 62: static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle);
 63: static mv_info       *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);

 65: xyt_ADT XYT_new(void)
 66: {
 67:   xyt_ADT xyt_handle;

 69:   /* rolling count on n_xyt ... pot. problem here */
 70:   n_xyt_handles++;
 71:   xyt_handle       = (xyt_ADT)malloc(sizeof(struct xyt_CDT));
 72:   xyt_handle->id   = ++n_xyt;
 73:   xyt_handle->info = NULL;
 74:   xyt_handle->mvi  = NULL;

 76:   return (xyt_handle);
 77: }

 79: PetscErrorCode XYT_factor(xyt_ADT   xyt_handle,                                           /* prev. allocated xyt  handle */
 80:                           PetscInt *local2global,                                         /* global column mapping       */
 81:                           PetscInt  n,                                                    /* local num rows              */
 82:                           PetscInt  m,                                                    /* local num cols              */
 83:                           PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc         */
 84:                           void *grid_data)                                                /* grid data for matvec        */
 85: {
 86:   PCTFS_comm_init();
 87:   check_handle(xyt_handle);

 89:   /* only 2^k for now and all nodes participating */

 92:   /* space for X info */
 93:   xyt_handle->info = (xyt_info *)malloc(sizeof(xyt_info));

 95:   /* set up matvec handles */
 96:   xyt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);

 98:   /* matrix is assumed to be of full rank */
 99:   /* LATER we can reset to indicate rank def. */
100:   xyt_handle->ns = 0;

102:   /* determine separators and generate firing order - NB xyt info set here */
103:   det_separators(xyt_handle);

105:   return (do_xyt_factor(xyt_handle));
106: }

108: PetscErrorCode XYT_solve(xyt_ADT xyt_handle, PetscScalar *x, PetscScalar *b)
109: {
110:   PCTFS_comm_init();
111:   check_handle(xyt_handle);

113:   /* need to copy b into x? */
114:   if (b) PCTFS_rvec_copy(x, b, xyt_handle->mvi->n);
115:   return do_xyt_solve(xyt_handle, x);
116: }

118: PetscErrorCode XYT_free(xyt_ADT xyt_handle)
119: {
120:   PCTFS_comm_init();
121:   check_handle(xyt_handle);
122:   n_xyt_handles--;

124:   free(xyt_handle->info->nsep);
125:   free(xyt_handle->info->lnsep);
126:   free(xyt_handle->info->fo);
127:   free(xyt_handle->info->stages);
128:   free(xyt_handle->info->solve_uu);
129:   free(xyt_handle->info->solve_w);
130:   free(xyt_handle->info->x);
131:   free(xyt_handle->info->xcol_vals);
132:   free(xyt_handle->info->xcol_sz);
133:   free(xyt_handle->info->xcol_indices);
134:   free(xyt_handle->info->y);
135:   free(xyt_handle->info->ycol_vals);
136:   free(xyt_handle->info->ycol_sz);
137:   free(xyt_handle->info->ycol_indices);
138:   free(xyt_handle->info);
139:   free(xyt_handle->mvi->local2global);
140:   PCTFS_gs_free(xyt_handle->mvi->PCTFS_gs_handle);
141:   free(xyt_handle->mvi);
142:   free(xyt_handle);

144:   /* if the check fails we nuke */
145:   /* if NULL pointer passed to free we nuke */
146:   /* if the calls to free fail that's not my problem */
147:   return (0);
148: }

150: /* This function is currently not used */
151: PetscErrorCode XYT_stats(xyt_ADT xyt_handle)
152: {
153:   PetscInt    op[]  = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD, GL_MIN, GL_MAX, GL_ADD};
154:   PetscInt    fop[] = {NON_UNIFORM, GL_MIN, GL_MAX, GL_ADD};
155:   PetscInt    vals[9], work[9];
156:   PetscScalar fvals[3], fwork[3];

158:   PCTFS_comm_init();
159:   check_handle(xyt_handle);

161:   /* if factorization not done there are no stats */
162:   if (!xyt_handle->info || !xyt_handle->mvi) {
163:     if (!PCTFS_my_id) PetscPrintf(PETSC_COMM_WORLD, "XYT_stats() :: no stats available!\n");
164:     return 0;
165:   }

167:   vals[0] = vals[1] = vals[2] = xyt_handle->info->nnz;
168:   vals[3] = vals[4] = vals[5] = xyt_handle->mvi->n;
169:   vals[6] = vals[7] = vals[8] = xyt_handle->info->msg_buf_sz;
170:   PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op);

172:   fvals[0] = fvals[1] = fvals[2] = xyt_handle->info->tot_solve_time / xyt_handle->info->nsolves++;
173:   PCTFS_grop(fvals, fwork, PETSC_STATIC_ARRAY_LENGTH(fop) - 1, fop);

175:   if (!PCTFS_my_id) {
176:     PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[0]);
177:     PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[1]);
178:     PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_nnz=%g\n", PCTFS_my_id, (double)(1.0 * vals[2] / PCTFS_num_nodes));
179:     PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xyt_nnz=%" PetscInt_FMT "\n", PCTFS_my_id, vals[2]);
180:     PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt   C(2d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.5))));
181:     PetscPrintf(PETSC_COMM_WORLD, "%d :: xyt   C(3d)  =%g\n", PCTFS_my_id, (double)(vals[2] / (PetscPowReal(1.0 * vals[5], 1.6667))));
182:     PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[3]);
183:     PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[4]);
184:     PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_n  =%g\n", PCTFS_my_id, (double)(1.0 * vals[5] / PCTFS_num_nodes));
185:     PetscPrintf(PETSC_COMM_WORLD, "%d :: tot   xyt_n  =%" PetscInt_FMT "\n", PCTFS_my_id, vals[5]);
186:     PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[6]);
187:     PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_buf=%" PetscInt_FMT "\n", PCTFS_my_id, vals[7]);
188:     PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_buf=%g\n", PCTFS_my_id, (double)(1.0 * vals[8] / PCTFS_num_nodes));
189:     PetscPrintf(PETSC_COMM_WORLD, "%d :: min   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[0]));
190:     PetscPrintf(PETSC_COMM_WORLD, "%d :: max   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[1]));
191:     PetscPrintf(PETSC_COMM_WORLD, "%d :: avg   xyt_slv=%g\n", PCTFS_my_id, (double)PetscRealPart(fvals[2] / PCTFS_num_nodes));
192:   }
193:   return 0;
194: }

196: /*

198: Description: get A_local, local portion of global coarse matrix which
199: is a row dist. nxm matrix w/ n<m.
200:    o my_ml holds address of ML struct associated w/A_local and coarse grid
201:    o local2global holds global number of column i (i=0,...,m-1)
202:    o local2global holds global number of row    i (i=0,...,n-1)
203:    o mylocmatvec performs A_local . vec_local (note that gs is performed using
204:    PCTFS_gs_init/gop).

206: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
207: mylocmatvec (void :: void *data, double *in, double *out)
208: */
209: static PetscErrorCode do_xyt_factor(xyt_ADT xyt_handle)
210: {
211:   return xyt_generate(xyt_handle);
212: }

214: static PetscErrorCode xyt_generate(xyt_ADT xyt_handle)
215: {
216:   PetscInt      i, j, k, idx;
217:   PetscInt      dim, col;
218:   PetscScalar  *u, *uu, *v, *z, *w, alpha, alpha_w;
219:   PetscInt     *segs;
220:   PetscInt      op[] = {GL_ADD, 0};
221:   PetscInt      off, len;
222:   PetscScalar  *x_ptr, *y_ptr;
223:   PetscInt     *iptr, flag;
224:   PetscInt      start = 0, end, work;
225:   PetscInt      op2[] = {GL_MIN, 0};
226:   PCTFS_gs_ADT  PCTFS_gs_handle;
227:   PetscInt     *nsep, *lnsep, *fo;
228:   PetscInt      a_n            = xyt_handle->mvi->n;
229:   PetscInt      a_m            = xyt_handle->mvi->m;
230:   PetscInt     *a_local2global = xyt_handle->mvi->local2global;
231:   PetscInt      level;
232:   PetscInt      n, m;
233:   PetscInt     *xcol_sz, *xcol_indices, *stages;
234:   PetscScalar **xcol_vals, *x;
235:   PetscInt     *ycol_sz, *ycol_indices;
236:   PetscScalar **ycol_vals, *y;
237:   PetscInt      n_global;
238:   PetscInt      xt_nnz = 0, xt_max_nnz = 0;
239:   PetscInt      yt_nnz = 0, yt_max_nnz = 0;
240:   PetscBLASInt  i1  = 1, dlen;
241:   PetscScalar   dm1 = -1.0;

243:   n               = xyt_handle->mvi->n;
244:   nsep            = xyt_handle->info->nsep;
245:   lnsep           = xyt_handle->info->lnsep;
246:   fo              = xyt_handle->info->fo;
247:   end             = lnsep[0];
248:   level           = xyt_handle->level;
249:   PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;

251:   /* is there a null space? */
252:   /* LATER add in ability to detect null space by checking alpha */
253:   for (i = 0, j = 0; i <= level; i++) j += nsep[i];

255:   m = j - xyt_handle->ns;
256:   if (m != j) PetscPrintf(PETSC_COMM_WORLD, "xyt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xyt_handle->ns);

258:   PetscInfo(0, "xyt_generate() :: X(%" PetscInt_FMT ",%" PetscInt_FMT ")\n", n, m);

260:   /* get and initialize storage for x local         */
261:   /* note that x local is nxm and stored by columns */
262:   xcol_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
263:   xcol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
264:   xcol_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
265:   for (i = j = 0; i < m; i++, j += 2) {
266:     xcol_indices[j] = xcol_indices[j + 1] = xcol_sz[i] = -1;
267:     xcol_vals[i]                                       = NULL;
268:   }
269:   xcol_indices[j] = -1;

271:   /* get and initialize storage for y local         */
272:   /* note that y local is nxm and stored by columns */
273:   ycol_sz      = (PetscInt *)malloc(m * sizeof(PetscInt));
274:   ycol_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
275:   ycol_vals    = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
276:   for (i = j = 0; i < m; i++, j += 2) {
277:     ycol_indices[j] = ycol_indices[j + 1] = ycol_sz[i] = -1;
278:     ycol_vals[i]                                       = NULL;
279:   }
280:   ycol_indices[j] = -1;

282:   /* size of separators for each sub-hc working from bottom of tree to top */
283:   /* this looks like nsep[]=segments */
284:   stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
285:   segs   = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
286:   PCTFS_ivec_zero(stages, level + 1);
287:   PCTFS_ivec_copy(segs, nsep, level + 1);
288:   for (i = 0; i < level; i++) segs[i + 1] += segs[i];
289:   stages[0] = segs[0];

291:   /* temporary vectors  */
292:   u  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
293:   z  = (PetscScalar *)malloc(n * sizeof(PetscScalar));
294:   v  = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
295:   uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
296:   w  = (PetscScalar *)malloc(m * sizeof(PetscScalar));

298:   /* extra nnz due to replication of vertices across separators */
299:   for (i = 1, j = 0; i <= level; i++) j += nsep[i];

301:   /* storage for sparse x values */
302:   n_global   = xyt_handle->info->n_global;
303:   xt_max_nnz = yt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
304:   x                       = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
305:   y                       = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));

307:   /* LATER - can embed next sep to fire in gs */
308:   /* time to make the donuts - generate X factor */
309:   for (dim = i = j = 0; i < m; i++) {
310:     /* time to move to the next level? */
311:     while (i == segs[dim]) {
313:       stages[dim++] = i;
314:       end += lnsep[dim];
315:     }
316:     stages[dim] = i;

318:     /* which column are we firing? */
319:     /* i.e. set v_l */
320:     /* use new seps and do global min across hc to determine which one to fire */
321:     (start < end) ? (col = fo[start]) : (col = INT_MAX);
322:     PCTFS_giop_hc(&col, &work, 1, op2, dim);

324:     /* shouldn't need this */
325:     if (col == INT_MAX) {
326:       PetscInfo(0, "hey ... col==INT_MAX??\n");
327:       continue;
328:     }

330:     /* do I own it? I should */
331:     PCTFS_rvec_zero(v, a_m);
332:     if (col == fo[start]) {
333:       start++;
334:       idx = PCTFS_ivec_linear_search(col, a_local2global, a_n);
335:       if (idx != -1) {
336:         v[idx] = 1.0;
337:         j++;
338:       } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
339:     } else {
340:       idx = PCTFS_ivec_linear_search(col, a_local2global, a_m);
341:       if (idx != -1) v[idx] = 1.0;
342:     }

344:     /* perform u = A.v_l */
345:     PCTFS_rvec_zero(u, n);
346:     do_matvec(xyt_handle->mvi, v, u);

348:     /* uu =  X^T.u_l (local portion) */
349:     /* technically only need to zero out first i entries */
350:     /* later turn this into an XYT_solve call ? */
351:     PCTFS_rvec_zero(uu, m);
352:     y_ptr = y;
353:     iptr  = ycol_indices;
354:     for (k = 0; k < i; k++) {
355:       off = *iptr++;
356:       len = *iptr++;
357:       PetscBLASIntCast(len, &dlen);
358:       PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, y_ptr, &i1));
359:       y_ptr += len;
360:     }

362:     /* uu = X^T.u_l (comm portion) */
363:     PCTFS_ssgl_radd(uu, w, dim, stages);

365:     /* z = X.uu */
366:     PCTFS_rvec_zero(z, n);
367:     x_ptr = x;
368:     iptr  = xcol_indices;
369:     for (k = 0; k < i; k++) {
370:       off = *iptr++;
371:       len = *iptr++;
372:       PetscBLASIntCast(len, &dlen);
373:       PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
374:       x_ptr += len;
375:     }

377:     /* compute v_l = v_l - z */
378:     PCTFS_rvec_zero(v + a_n, a_m - a_n);
379:     PetscBLASIntCast(n, &dlen);
380:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));

382:     /* compute u_l = A.v_l */
383:     if (a_n != a_m) PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim);
384:     PCTFS_rvec_zero(u, n);
385:     do_matvec(xyt_handle->mvi, v, u);

387:     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - local portion */
388:     PetscBLASIntCast(n, &dlen);
389:     PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, u, &i1));
390:     /* compute sqrt(alpha) = sqrt(u_l^T.u_l) - comm portion */
391:     PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim);

393:     alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);

395:     /* check for small alpha                             */
396:     /* LATER use this to detect and determine null space */

399:     /* compute v_l = v_l/sqrt(alpha) */
400:     PCTFS_rvec_scale(v, 1.0 / alpha, n);
401:     PCTFS_rvec_scale(u, 1.0 / alpha, n);

403:     /* add newly generated column, v_l, to X */
404:     flag = 1;
405:     off = len = 0;
406:     for (k = 0; k < n; k++) {
407:       if (v[k] != 0.0) {
408:         len = k;
409:         if (flag) {
410:           off  = k;
411:           flag = 0;
412:         }
413:       }
414:     }

416:     len -= (off - 1);

418:     if (len > 0) {
419:       if ((xt_nnz + len) > xt_max_nnz) {
420:         PetscInfo(0, "increasing space for X by 2x!\n");
421:         xt_max_nnz *= 2;
422:         x_ptr = (PetscScalar *)malloc(xt_max_nnz * sizeof(PetscScalar));
423:         PCTFS_rvec_copy(x_ptr, x, xt_nnz);
424:         free(x);
425:         x = x_ptr;
426:         x_ptr += xt_nnz;
427:       }
428:       xt_nnz += len;
429:       PCTFS_rvec_copy(x_ptr, v + off, len);

431:       xcol_indices[2 * i] = off;
432:       xcol_sz[i] = xcol_indices[2 * i + 1] = len;
433:       xcol_vals[i]                         = x_ptr;
434:     } else {
435:       xcol_indices[2 * i] = 0;
436:       xcol_sz[i] = xcol_indices[2 * i + 1] = 0;
437:       xcol_vals[i]                         = x_ptr;
438:     }

440:     /* add newly generated column, u_l, to Y */
441:     flag = 1;
442:     off = len = 0;
443:     for (k = 0; k < n; k++) {
444:       if (u[k] != 0.0) {
445:         len = k;
446:         if (flag) {
447:           off  = k;
448:           flag = 0;
449:         }
450:       }
451:     }

453:     len -= (off - 1);

455:     if (len > 0) {
456:       if ((yt_nnz + len) > yt_max_nnz) {
457:         PetscInfo(0, "increasing space for Y by 2x!\n");
458:         yt_max_nnz *= 2;
459:         y_ptr = (PetscScalar *)malloc(yt_max_nnz * sizeof(PetscScalar));
460:         PCTFS_rvec_copy(y_ptr, y, yt_nnz);
461:         free(y);
462:         y = y_ptr;
463:         y_ptr += yt_nnz;
464:       }
465:       yt_nnz += len;
466:       PCTFS_rvec_copy(y_ptr, u + off, len);

468:       ycol_indices[2 * i] = off;
469:       ycol_sz[i] = ycol_indices[2 * i + 1] = len;
470:       ycol_vals[i]                         = y_ptr;
471:     } else {
472:       ycol_indices[2 * i] = 0;
473:       ycol_sz[i] = ycol_indices[2 * i + 1] = 0;
474:       ycol_vals[i]                         = y_ptr;
475:     }
476:   }

478:   /* close off stages for execution phase */
479:   while (dim != level) {
480:     stages[dim++] = i;
481:     PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level);
482:   }
483:   stages[dim] = i;

485:   xyt_handle->info->n            = xyt_handle->mvi->n;
486:   xyt_handle->info->m            = m;
487:   xyt_handle->info->nnz          = xt_nnz + yt_nnz;
488:   xyt_handle->info->max_nnz      = xt_max_nnz + yt_max_nnz;
489:   xyt_handle->info->msg_buf_sz   = stages[level] - stages[0];
490:   xyt_handle->info->solve_uu     = (PetscScalar *)malloc(m * sizeof(PetscScalar));
491:   xyt_handle->info->solve_w      = (PetscScalar *)malloc(m * sizeof(PetscScalar));
492:   xyt_handle->info->x            = x;
493:   xyt_handle->info->xcol_vals    = xcol_vals;
494:   xyt_handle->info->xcol_sz      = xcol_sz;
495:   xyt_handle->info->xcol_indices = xcol_indices;
496:   xyt_handle->info->stages       = stages;
497:   xyt_handle->info->y            = y;
498:   xyt_handle->info->ycol_vals    = ycol_vals;
499:   xyt_handle->info->ycol_sz      = ycol_sz;
500:   xyt_handle->info->ycol_indices = ycol_indices;

502:   free(segs);
503:   free(u);
504:   free(v);
505:   free(uu);
506:   free(z);
507:   free(w);

509:   return (0);
510: }

512: static PetscErrorCode do_xyt_solve(xyt_ADT xyt_handle, PetscScalar *uc)
513: {
514:   PetscInt     off, len, *iptr;
515:   PetscInt     level        = xyt_handle->level;
516:   PetscInt     n            = xyt_handle->info->n;
517:   PetscInt     m            = xyt_handle->info->m;
518:   PetscInt    *stages       = xyt_handle->info->stages;
519:   PetscInt    *xcol_indices = xyt_handle->info->xcol_indices;
520:   PetscInt    *ycol_indices = xyt_handle->info->ycol_indices;
521:   PetscScalar *x_ptr, *y_ptr, *uu_ptr;
522:   PetscScalar *solve_uu = xyt_handle->info->solve_uu;
523:   PetscScalar *solve_w  = xyt_handle->info->solve_w;
524:   PetscScalar *x        = xyt_handle->info->x;
525:   PetscScalar *y        = xyt_handle->info->y;
526:   PetscBLASInt i1       = 1, dlen;

528:   uu_ptr = solve_uu;
529:   PCTFS_rvec_zero(uu_ptr, m);

531:   /* x  = X.Y^T.b */
532:   /* uu = Y^T.b */
533:   for (y_ptr = y, iptr = ycol_indices; *iptr != -1; y_ptr += len) {
534:     off = *iptr++;
535:     len = *iptr++;
536:     PetscBLASIntCast(len, &dlen);
537:     PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, y_ptr, &i1));
538:   }

540:   /* communication of beta */
541:   uu_ptr = solve_uu;
542:   if (level) PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages);
543:   PCTFS_rvec_zero(uc, n);

545:   /* x = X.uu */
546:   for (x_ptr = x, iptr = xcol_indices; *iptr != -1; x_ptr += len) {
547:     off = *iptr++;
548:     len = *iptr++;
549:     PetscBLASIntCast(len, &dlen);
550:     PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
551:   }
552:   return 0;
553: }

555: static PetscErrorCode check_handle(xyt_ADT xyt_handle)
556: {
557:   PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};


561:   vals[0] = vals[1] = xyt_handle->id;
562:   PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op);
564:   return 0;
565: }

567: static PetscErrorCode det_separators(xyt_ADT xyt_handle)
568: {
569:   PetscInt     i, ct, id;
570:   PetscInt     mask, edge, *iptr;
571:   PetscInt    *dir, *used;
572:   PetscInt     sum[4], w[4];
573:   PetscScalar  rsum[4], rw[4];
574:   PetscInt     op[] = {GL_ADD, 0};
575:   PetscScalar *lhs, *rhs;
576:   PetscInt    *nsep, *lnsep, *fo, nfo = 0;
577:   PCTFS_gs_ADT PCTFS_gs_handle = xyt_handle->mvi->PCTFS_gs_handle;
578:   PetscInt    *local2global    = xyt_handle->mvi->local2global;
579:   PetscInt     n               = xyt_handle->mvi->n;
580:   PetscInt     m               = xyt_handle->mvi->m;
581:   PetscInt     level           = xyt_handle->level;
582:   PetscInt     shared          = 0;

584:   dir   = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
585:   nsep  = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
586:   lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
587:   fo    = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
588:   used  = (PetscInt *)malloc(sizeof(PetscInt) * n);

590:   PCTFS_ivec_zero(dir, level + 1);
591:   PCTFS_ivec_zero(nsep, level + 1);
592:   PCTFS_ivec_zero(lnsep, level + 1);
593:   PCTFS_ivec_set(fo, -1, n + 1);
594:   PCTFS_ivec_zero(used, n);

596:   lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
597:   rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);

599:   /* determine the # of unique dof */
600:   PCTFS_rvec_zero(lhs, m);
601:   PCTFS_rvec_set(lhs, 1.0, n);
602:   PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level);
603:   PetscInfo(0, "done first PCTFS_gs_gop_hc\n");
604:   PCTFS_rvec_zero(rsum, 2);
605:   for (i = 0; i < n; i++) {
606:     if (lhs[i] != 0.0) {
607:       rsum[0] += 1.0 / lhs[i];
608:       rsum[1] += lhs[i];
609:     }
610:     if (lhs[i] != 1.0) shared = 1;
611:   }

613:   PCTFS_grop_hc(rsum, rw, 2, op, level);
614:   rsum[0] += 0.1;
615:   rsum[1] += 0.1;

617:   xyt_handle->info->n_global = xyt_handle->info->m_global = (PetscInt)rsum[0];
618:   xyt_handle->mvi->n_global = xyt_handle->mvi->m_global = (PetscInt)rsum[0];

620:   /* determine separator sets top down */
621:   if (shared) {
622:     /* solution is to do as in the symmetric shared case but then */
623:     /* pick the sub-hc with the most free dofs and do a mat-vec   */
624:     /* and pick up the responses on the other sub-hc from the     */
625:     /* initial separator set obtained from the symm. shared case  */
626:     SETERRQ(PETSC_COMM_SELF, PETSC_ERR_PLIB, "shared dof separator determination not ready ... see hmt!!!");
627:     /* [dead code deleted since it is unlikely to be completed] */
628:   } else {
629:     for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
630:       /* set rsh of hc, fire, and collect lhs responses */
631:       (id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m);
632:       PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge);

634:       /* set lsh of hc, fire, and collect rhs responses */
635:       (id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m);
636:       PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge);

638:       /* count number of dofs I own that have signal and not in sep set */
639:       for (PCTFS_ivec_zero(sum, 4), ct = i = 0; i < n; i++) {
640:         if (!used[i]) {
641:           /* number of unmarked dofs on node */
642:           ct++;
643:           /* number of dofs to be marked on lhs hc */
644:           if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
645:           /* number of dofs to be marked on rhs hc */
646:           if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
647:         }
648:       }

650:       /* for the non-symmetric case we need separators of width 2 */
651:       /* so take both sides */
652:       (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
653:       PCTFS_giop_hc(sum, w, 4, op, edge);

655:       ct = 0;
656:       if (id < mask) {
657:         /* mark dofs I own that have signal and not in sep set */
658:         for (i = 0; i < n; i++) {
659:           if ((!used[i]) && (lhs[i] != 0.0)) {
660:             ct++;
661:             nfo++;
662:             *--iptr = local2global[i];
663:             used[i] = edge;
664:           }
665:         }
666:         /* LSH hc summation of ct should be sum[0] */
667:       } else {
668:         /* mark dofs I own that have signal and not in sep set */
669:         for (i = 0; i < n; i++) {
670:           if ((!used[i]) && (rhs[i] != 0.0)) {
671:             ct++;
672:             nfo++;
673:             *--iptr = local2global[i];
674:             used[i] = edge;
675:           }
676:         }
677:         /* RSH hc summation of ct should be sum[1] */
678:       }

680:       if (ct > 1) PCTFS_ivec_sort(iptr, ct);
681:       lnsep[edge] = ct;
682:       nsep[edge]  = sum[0] + sum[1];
683:       dir[edge]   = BOTH;

685:       /* LATER or we can recur on these to order seps at this level */
686:       /* do we need full set of separators for this?                */

688:       /* fold rhs hc into lower */
689:       if (id >= mask) id -= mask;
690:     }
691:   }

693:   /* level 0 is on processor case - so mark the remainder */
694:   for (ct = i = 0; i < n; i++) {
695:     if (!used[i]) {
696:       ct++;
697:       nfo++;
698:       *--iptr = local2global[i];
699:       used[i] = edge;
700:     }
701:   }
702:   if (ct > 1) PCTFS_ivec_sort(iptr, ct);
703:   lnsep[edge] = ct;
704:   nsep[edge]  = ct;
705:   dir[edge]   = BOTH;

707:   xyt_handle->info->nsep  = nsep;
708:   xyt_handle->info->lnsep = lnsep;
709:   xyt_handle->info->fo    = fo;
710:   xyt_handle->info->nfo   = nfo;

712:   free(dir);
713:   free(lhs);
714:   free(rhs);
715:   free(used);
716:   return 0;
717: }

719: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
720: {
721:   mv_info *mvi;

723:   mvi               = (mv_info *)malloc(sizeof(mv_info));
724:   mvi->n            = n;
725:   mvi->m            = m;
726:   mvi->n_global     = -1;
727:   mvi->m_global     = -1;
728:   mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));

730:   PCTFS_ivec_copy(mvi->local2global, local2global, m);
731:   mvi->local2global[m] = INT_MAX;
732:   mvi->matvec          = matvec;
733:   mvi->grid_data       = grid_data;

735:   /* set xyt communication handle to perform restricted matvec */
736:   mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);

738:   return (mvi);
739: }

741: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
742: {
743:   A->matvec((mv_info *)A->grid_data, v, u);
744:   return 0;
745: }