Actual source code: xxt.c
1: /*
2: Module Name: xxt
3: Module Info:
5: author: Henry M. Tufo III
6: e-mail: hmt@asci.uchicago.edu
7: contact:
8: +--------------------------------+--------------------------------+
9: |MCS Division - Building 221 |Department of Computer Science |
10: |Argonne National Laboratory |Ryerson 152 |
11: |9700 S. Cass Avenue |The University of Chicago |
12: |Argonne, IL 60439 |Chicago, IL 60637 |
13: |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx |
14: +--------------------------------+--------------------------------+
16: Last Modification: 3.20.01
17: */
18: #include <../src/ksp/pc/impls/tfs/tfs.h>
20: #define LEFT -1
21: #define RIGHT 1
22: #define BOTH 0
24: typedef struct xxt_solver_info {
25: PetscInt n, m, n_global, m_global;
26: PetscInt nnz, max_nnz, msg_buf_sz;
27: PetscInt *nsep, *lnsep, *fo, nfo, *stages;
28: PetscInt *col_sz, *col_indices;
29: PetscScalar **col_vals, *x, *solve_uu, *solve_w;
30: PetscInt nsolves;
31: PetscScalar tot_solve_time;
32: } xxt_info;
34: typedef struct matvec_info {
35: PetscInt n, m, n_global, m_global;
36: PetscInt *local2global;
37: PCTFS_gs_ADT PCTFS_gs_handle;
38: PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
39: void *grid_data;
40: } mv_info;
42: struct xxt_CDT {
43: PetscInt id;
44: PetscInt ns;
45: PetscInt level;
46: xxt_info *info;
47: mv_info *mvi;
48: };
50: static PetscInt n_xxt = 0;
52: /* prototypes */
53: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
54: static PetscErrorCode check_handle(xxt_ADT xxt_handle);
55: static PetscErrorCode det_separators(xxt_ADT xxt_handle);
56: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
57: static PetscErrorCode xxt_generate(xxt_ADT xxt_handle);
58: static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle);
59: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
61: xxt_ADT XXT_new(void)
62: {
63: xxt_ADT xxt_handle;
65: /* rolling count on n_xxt ... pot. problem here */
66: xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
67: xxt_handle->id = ++n_xxt;
68: xxt_handle->info = NULL;
69: xxt_handle->mvi = NULL;
71: return xxt_handle;
72: }
74: PetscErrorCode XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */
75: PetscInt *local2global, /* global column mapping */
76: PetscInt n, /* local num rows */
77: PetscInt m, /* local num cols */
78: PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc */
79: void *grid_data) /* grid data for matvec */
80: {
81: PetscFunctionBegin;
82: PetscCall(PCTFS_comm_init());
83: PetscCall(check_handle(xxt_handle));
85: /* only 2^k for now and all nodes participating */
86: PetscCheck((1 << (xxt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
88: /* space for X info */
89: xxt_handle->info = (xxt_info *)malloc(sizeof(xxt_info));
91: /* set up matvec handles */
92: xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode (*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
94: /* matrix is assumed to be of full rank */
95: /* LATER we can reset to indicate rank def. */
96: xxt_handle->ns = 0;
98: /* determine separators and generate firing order - NB xxt info set here */
99: PetscCall(det_separators(xxt_handle));
101: PetscCall(do_xxt_factor(xxt_handle));
102: PetscFunctionReturn(PETSC_SUCCESS);
103: }
105: PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
106: {
107: PetscFunctionBegin;
108: PetscCall(PCTFS_comm_init());
109: PetscCall(check_handle(xxt_handle));
111: /* need to copy b into x? */
112: if (b) PetscCall(PCTFS_rvec_copy(x, b, xxt_handle->mvi->n));
113: PetscCall(do_xxt_solve(xxt_handle, x));
114: PetscFunctionReturn(PETSC_SUCCESS);
115: }
117: PetscErrorCode XXT_free(xxt_ADT xxt_handle)
118: {
119: PetscFunctionBegin;
120: PetscCall(PCTFS_comm_init());
121: PetscCall(check_handle(xxt_handle));
123: free(xxt_handle->info->nsep);
124: free(xxt_handle->info->lnsep);
125: free(xxt_handle->info->fo);
126: free(xxt_handle->info->stages);
127: free(xxt_handle->info->solve_uu);
128: free(xxt_handle->info->solve_w);
129: free(xxt_handle->info->x);
130: free(xxt_handle->info->col_vals);
131: free(xxt_handle->info->col_sz);
132: free(xxt_handle->info->col_indices);
133: free(xxt_handle->info);
134: free(xxt_handle->mvi->local2global);
135: PetscCall(PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle));
136: free(xxt_handle->mvi);
137: free(xxt_handle);
139: /* if the check fails we nuke */
140: /* if NULL pointer passed to free we nuke */
141: /* if the calls to free fail that's not my problem */
142: PetscFunctionReturn(PETSC_SUCCESS);
143: }
145: /*
147: Description: get A_local, local portion of global coarse matrix which
148: is a row dist. nxm matrix w/ n<m.
149: o my_ml holds address of ML struct associated w/A_local and coarse grid
150: o local2global holds global number of column i (i=0,...,m-1)
151: o local2global holds global number of row i (i=0,...,n-1)
152: o mylocmatvec performs A_local . vec_local (note that gs is performed using
153: PCTFS_gs_init/gop).
155: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
156: mylocmatvec (void :: void *data, double *in, double *out)
157: */
158: static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle)
159: {
160: return xxt_generate(xxt_handle);
161: }
163: static PetscErrorCode xxt_generate(xxt_ADT xxt_handle)
164: {
165: PetscInt i, j, k, idex;
166: PetscInt dim, col;
167: PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
168: PetscInt *segs;
169: PetscInt op[] = {GL_ADD, 0};
170: PetscInt off, len;
171: PetscScalar *x_ptr;
172: PetscInt *iptr, flag;
173: PetscInt start = 0, end, work;
174: PetscInt op2[] = {GL_MIN, 0};
175: PCTFS_gs_ADT PCTFS_gs_handle;
176: PetscInt *nsep, *lnsep, *fo;
177: PetscInt a_n = xxt_handle->mvi->n;
178: PetscInt a_m = xxt_handle->mvi->m;
179: PetscInt *a_local2global = xxt_handle->mvi->local2global;
180: PetscInt level;
181: PetscInt xxt_nnz = 0, xxt_max_nnz = 0;
182: PetscInt n, m;
183: PetscInt *col_sz, *col_indices, *stages;
184: PetscScalar **col_vals, *x;
185: PetscInt n_global;
186: PetscBLASInt i1 = 1, dlen;
187: PetscScalar dm1 = -1.0;
189: n = xxt_handle->mvi->n;
190: nsep = xxt_handle->info->nsep;
191: lnsep = xxt_handle->info->lnsep;
192: fo = xxt_handle->info->fo;
193: end = lnsep[0];
194: level = xxt_handle->level;
195: PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
197: /* is there a null space? */
198: /* LATER add in ability to detect null space by checking alpha */
199: for (i = 0, j = 0; i <= level; i++) j += nsep[i];
201: m = j - xxt_handle->ns;
202: if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xxt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xxt_handle->ns));
204: /* get and initialize storage for x local */
205: /* note that x local is nxm and stored by columns */
206: col_sz = (PetscInt *)malloc(m * sizeof(PetscInt));
207: col_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
208: col_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
209: for (i = j = 0; i < m; i++, j += 2) {
210: col_indices[j] = col_indices[j + 1] = col_sz[i] = -1;
211: col_vals[i] = NULL;
212: }
213: col_indices[j] = -1;
215: /* size of separators for each sub-hc working from bottom of tree to top */
216: /* this looks like nsep[]=segments */
217: stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
218: segs = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
219: PetscCall(PCTFS_ivec_zero(stages, level + 1));
220: PCTFS_ivec_copy(segs, nsep, level + 1);
221: for (i = 0; i < level; i++) segs[i + 1] += segs[i];
222: stages[0] = segs[0];
224: /* temporary vectors */
225: u = (PetscScalar *)malloc(n * sizeof(PetscScalar));
226: z = (PetscScalar *)malloc(n * sizeof(PetscScalar));
227: v = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
228: uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
229: w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
231: /* extra nnz due to replication of vertices across separators */
232: for (i = 1, j = 0; i <= level; i++) j += nsep[i];
234: /* storage for sparse x values */
235: n_global = xxt_handle->info->n_global;
236: xxt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
237: x = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
238: xxt_nnz = 0;
240: /* LATER - can embed next sep to fire in gs */
241: /* time to make the donuts - generate X factor */
242: for (dim = i = j = 0; i < m; i++) {
243: /* time to move to the next level? */
244: while (i == segs[dim]) {
245: PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
246: stages[dim++] = i;
247: end += lnsep[dim];
248: }
249: stages[dim] = i;
251: /* which column are we firing? */
252: /* i.e. set v_l */
253: /* use new seps and do global min across hc to determine which one to fire */
254: (start < end) ? (col = fo[start]) : (col = INT_MAX);
255: PetscCall(PCTFS_giop_hc(&col, &work, 1, op2, dim));
257: /* shouldn't need this */
258: if (col == INT_MAX) {
259: PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
260: continue;
261: }
263: /* do I own it? I should */
264: PetscCall(PCTFS_rvec_zero(v, a_m));
265: if (col == fo[start]) {
266: start++;
267: idex = PCTFS_ivec_linear_search(col, a_local2global, a_n);
268: PetscCheck(idex != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
269: v[idex] = 1.0;
270: j++;
271: } else {
272: idex = PCTFS_ivec_linear_search(col, a_local2global, a_m);
273: if (idex != -1) v[idex] = 1.0;
274: }
276: /* perform u = A.v_l */
277: PetscCall(PCTFS_rvec_zero(u, n));
278: PetscCall(do_matvec(xxt_handle->mvi, v, u));
280: /* uu = X^T.u_l (local portion) */
281: /* technically only need to zero out first i entries */
282: /* later turn this into an XXT_solve call ? */
283: PetscCall(PCTFS_rvec_zero(uu, m));
284: x_ptr = x;
285: iptr = col_indices;
286: for (k = 0; k < i; k++) {
287: off = *iptr++;
288: len = *iptr++;
289: PetscCall(PetscBLASIntCast(len, &dlen));
290: PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, x_ptr, &i1));
291: x_ptr += len;
292: }
294: /* uu = X^T.u_l (comm portion) */
295: PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
297: /* z = X.uu */
298: PetscCall(PCTFS_rvec_zero(z, n));
299: x_ptr = x;
300: iptr = col_indices;
301: for (k = 0; k < i; k++) {
302: off = *iptr++;
303: len = *iptr++;
304: PetscCall(PetscBLASIntCast(len, &dlen));
305: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
306: x_ptr += len;
307: }
309: /* compute v_l = v_l - z */
310: PetscCall(PCTFS_rvec_zero(v + a_n, a_m - a_n));
311: PetscCall(PetscBLASIntCast(n, &dlen));
312: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
314: /* compute u_l = A.v_l */
315: if (a_n != a_m) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim));
316: PetscCall(PCTFS_rvec_zero(u, n));
317: PetscCall(do_matvec(xxt_handle->mvi, v, u));
319: /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
320: PetscCall(PetscBLASIntCast(n, &dlen));
321: PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, v, &i1));
322: /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
323: PetscCall(PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim));
325: alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
327: /* check for small alpha */
328: /* LATER use this to detect and determine null space */
329: PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
331: /* compute v_l = v_l/sqrt(alpha) */
332: PetscCall(PCTFS_rvec_scale(v, 1.0 / alpha, n));
334: /* add newly generated column, v_l, to X */
335: flag = 1;
336: off = len = 0;
337: for (k = 0; k < n; k++) {
338: if (v[k] != 0.0) {
339: len = k;
340: if (flag) {
341: off = k;
342: flag = 0;
343: }
344: }
345: }
347: len -= (off - 1);
349: if (len > 0) {
350: if ((xxt_nnz + len) > xxt_max_nnz) {
351: PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
352: xxt_max_nnz *= 2;
353: x_ptr = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
354: PetscCall(PCTFS_rvec_copy(x_ptr, x, xxt_nnz));
355: free(x);
356: x = x_ptr;
357: x_ptr += xxt_nnz;
358: }
359: xxt_nnz += len;
360: PetscCall(PCTFS_rvec_copy(x_ptr, v + off, len));
362: col_indices[2 * i] = off;
363: col_sz[i] = col_indices[2 * i + 1] = len;
364: col_vals[i] = x_ptr;
365: } else {
366: col_indices[2 * i] = 0;
367: col_sz[i] = col_indices[2 * i + 1] = 0;
368: col_vals[i] = x_ptr;
369: }
370: }
372: /* close off stages for execution phase */
373: while (dim != level) {
374: stages[dim++] = i;
375: PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
376: }
377: stages[dim] = i;
379: xxt_handle->info->n = xxt_handle->mvi->n;
380: xxt_handle->info->m = m;
381: xxt_handle->info->nnz = xxt_nnz;
382: xxt_handle->info->max_nnz = xxt_max_nnz;
383: xxt_handle->info->msg_buf_sz = stages[level] - stages[0];
384: xxt_handle->info->solve_uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
385: xxt_handle->info->solve_w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
386: xxt_handle->info->x = x;
387: xxt_handle->info->col_vals = col_vals;
388: xxt_handle->info->col_sz = col_sz;
389: xxt_handle->info->col_indices = col_indices;
390: xxt_handle->info->stages = stages;
391: xxt_handle->info->nsolves = 0;
392: xxt_handle->info->tot_solve_time = 0.0;
394: free(segs);
395: free(u);
396: free(v);
397: free(uu);
398: free(z);
399: free(w);
401: return PETSC_SUCCESS;
402: }
404: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc)
405: {
406: PetscInt off, len, *iptr;
407: PetscInt level = xxt_handle->level;
408: PetscInt n = xxt_handle->info->n;
409: PetscInt m = xxt_handle->info->m;
410: PetscInt *stages = xxt_handle->info->stages;
411: PetscInt *col_indices = xxt_handle->info->col_indices;
412: PetscScalar *x_ptr, *uu_ptr;
413: PetscScalar *solve_uu = xxt_handle->info->solve_uu;
414: PetscScalar *solve_w = xxt_handle->info->solve_w;
415: PetscScalar *x = xxt_handle->info->x;
416: PetscBLASInt i1 = 1, dlen;
418: PetscFunctionBegin;
419: uu_ptr = solve_uu;
420: PetscCall(PCTFS_rvec_zero(uu_ptr, m));
422: /* x = X.Y^T.b */
423: /* uu = Y^T.b */
424: for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
425: off = *iptr++;
426: len = *iptr++;
427: PetscCall(PetscBLASIntCast(len, &dlen));
428: PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, x_ptr, &i1));
429: }
431: /* communication of beta */
432: uu_ptr = solve_uu;
433: if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
435: PetscCall(PCTFS_rvec_zero(uc, n));
437: /* x = X.uu */
438: for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
439: off = *iptr++;
440: len = *iptr++;
441: PetscCall(PetscBLASIntCast(len, &dlen));
442: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
443: }
444: PetscFunctionReturn(PETSC_SUCCESS);
445: }
447: static PetscErrorCode check_handle(xxt_ADT xxt_handle)
448: {
449: PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
451: PetscFunctionBegin;
452: PetscCheck(xxt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", (void *)xxt_handle);
454: vals[0] = vals[1] = xxt_handle->id;
455: PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
456: PetscCheck(!(vals[0] != vals[1]) && !(xxt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xxt_handle->id);
457: PetscFunctionReturn(PETSC_SUCCESS);
458: }
460: static PetscErrorCode det_separators(xxt_ADT xxt_handle)
461: {
462: PetscInt i, ct, id;
463: PetscInt mask, edge, *iptr;
464: PetscInt *dir, *used;
465: PetscInt sum[4], w[4];
466: PetscScalar rsum[4], rw[4];
467: PetscInt op[] = {GL_ADD, 0};
468: PetscScalar *lhs, *rhs;
469: PetscInt *nsep, *lnsep, *fo, nfo = 0;
470: PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
471: PetscInt *local2global = xxt_handle->mvi->local2global;
472: PetscInt n = xxt_handle->mvi->n;
473: PetscInt m = xxt_handle->mvi->m;
474: PetscInt level = xxt_handle->level;
475: PetscInt shared = 0;
477: PetscFunctionBegin;
478: dir = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
479: nsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
480: lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
481: fo = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
482: used = (PetscInt *)malloc(sizeof(PetscInt) * n);
484: PetscCall(PCTFS_ivec_zero(dir, level + 1));
485: PetscCall(PCTFS_ivec_zero(nsep, level + 1));
486: PetscCall(PCTFS_ivec_zero(lnsep, level + 1));
487: PetscCall(PCTFS_ivec_set(fo, -1, n + 1));
488: PetscCall(PCTFS_ivec_zero(used, n));
490: lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
491: rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
493: /* determine the # of unique dof */
494: PetscCall(PCTFS_rvec_zero(lhs, m));
495: PetscCall(PCTFS_rvec_set(lhs, 1.0, n));
496: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level));
497: PetscCall(PCTFS_rvec_zero(rsum, 2));
498: for (i = 0; i < n; i++) {
499: if (lhs[i] != 0.0) {
500: rsum[0] += 1.0 / lhs[i];
501: rsum[1] += lhs[i];
502: }
503: }
504: PetscCall(PCTFS_grop_hc(rsum, rw, 2, op, level));
505: rsum[0] += 0.1;
506: rsum[1] += 0.1;
508: if (PetscAbsScalar(rsum[0] - rsum[1]) > EPS) shared = 1;
510: xxt_handle->info->n_global = xxt_handle->info->m_global = (PetscInt)rsum[0];
511: xxt_handle->mvi->n_global = xxt_handle->mvi->m_global = (PetscInt)rsum[0];
513: /* determine separator sets top down */
514: if (shared) {
515: for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
516: /* set rsh of hc, fire, and collect lhs responses */
517: PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
518: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
520: /* set lsh of hc, fire, and collect rhs responses */
521: PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
522: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
524: for (i = 0; i < n; i++) {
525: if (id < mask) {
526: if (lhs[i] != 0.0) lhs[i] = 1.0;
527: }
528: if (id >= mask) {
529: if (rhs[i] != 0.0) rhs[i] = 1.0;
530: }
531: }
533: if (id < mask) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge - 1));
534: else PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge - 1));
536: /* count number of dofs I own that have signal and not in sep set */
537: PetscCall(PCTFS_rvec_zero(rsum, 4));
538: PetscCall(PCTFS_ivec_zero(sum, 4));
539: for (ct = i = 0; i < n; i++) {
540: if (!used[i]) {
541: /* number of unmarked dofs on node */
542: ct++;
543: /* number of dofs to be marked on lhs hc */
544: if (id < mask) {
545: if (lhs[i] != 0.0) {
546: sum[0]++;
547: rsum[0] += 1.0 / lhs[i];
548: }
549: }
550: /* number of dofs to be marked on rhs hc */
551: if (id >= mask) {
552: if (rhs[i] != 0.0) {
553: sum[1]++;
554: rsum[1] += 1.0 / rhs[i];
555: }
556: }
557: }
558: }
560: /* go for load balance - choose half with most unmarked dofs, bias LHS */
561: (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
562: (id < mask) ? (rsum[2] = ct) : (rsum[3] = ct);
563: PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
564: PetscCall(PCTFS_grop_hc(rsum, rw, 4, op, edge));
565: rsum[0] += 0.1;
566: rsum[1] += 0.1;
567: rsum[2] += 0.1;
568: rsum[3] += 0.1;
570: if (id < mask) {
571: /* mark dofs I own that have signal and not in sep set */
572: for (ct = i = 0; i < n; i++) {
573: if ((!used[i]) && (lhs[i] != 0.0)) {
574: ct++;
575: nfo++;
577: PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
579: *--iptr = local2global[i];
580: used[i] = edge;
581: }
582: }
583: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
585: lnsep[edge] = ct;
586: nsep[edge] = (PetscInt)rsum[0];
587: dir[edge] = LEFT;
588: }
590: if (id >= mask) {
591: /* mark dofs I own that have signal and not in sep set */
592: for (ct = i = 0; i < n; i++) {
593: if ((!used[i]) && (rhs[i] != 0.0)) {
594: ct++;
595: nfo++;
597: PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
599: *--iptr = local2global[i];
600: used[i] = edge;
601: }
602: }
603: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
605: lnsep[edge] = ct;
606: nsep[edge] = (PetscInt)rsum[1];
607: dir[edge] = RIGHT;
608: }
610: /* LATER or we can recur on these to order seps at this level */
611: /* do we need full set of separators for this? */
613: /* fold rhs hc into lower */
614: if (id >= mask) id -= mask;
615: }
616: } else {
617: for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
618: /* set rsh of hc, fire, and collect lhs responses */
619: PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
620: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
622: /* set lsh of hc, fire, and collect rhs responses */
623: PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
624: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
626: /* count number of dofs I own that have signal and not in sep set */
627: PetscCall(PCTFS_ivec_zero(sum, 4));
628: for (ct = i = 0; i < n; i++) {
629: if (!used[i]) {
630: /* number of unmarked dofs on node */
631: ct++;
632: /* number of dofs to be marked on lhs hc */
633: if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
634: /* number of dofs to be marked on rhs hc */
635: if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
636: }
637: }
639: /* go for load balance - choose half with most unmarked dofs, bias LHS */
640: (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
641: PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
643: /* lhs hc wins */
644: if (sum[2] >= sum[3]) {
645: if (id < mask) {
646: /* mark dofs I own that have signal and not in sep set */
647: for (ct = i = 0; i < n; i++) {
648: if ((!used[i]) && (lhs[i] != 0.0)) {
649: ct++;
650: nfo++;
651: *--iptr = local2global[i];
652: used[i] = edge;
653: }
654: }
655: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
656: lnsep[edge] = ct;
657: }
658: nsep[edge] = sum[0];
659: dir[edge] = LEFT;
660: } else { /* rhs hc wins */
661: if (id >= mask) {
662: /* mark dofs I own that have signal and not in sep set */
663: for (ct = i = 0; i < n; i++) {
664: if ((!used[i]) && (rhs[i] != 0.0)) {
665: ct++;
666: nfo++;
667: *--iptr = local2global[i];
668: used[i] = edge;
669: }
670: }
671: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
672: lnsep[edge] = ct;
673: }
674: nsep[edge] = sum[1];
675: dir[edge] = RIGHT;
676: }
677: /* LATER or we can recur on these to order seps at this level */
678: /* do we need full set of separators for this? */
680: /* fold rhs hc into lower */
681: if (id >= mask) id -= mask;
682: }
683: }
685: /* level 0 is on processor case - so mark the remainder */
686: for (ct = i = 0; i < n; i++) {
687: if (!used[i]) {
688: ct++;
689: nfo++;
690: *--iptr = local2global[i];
691: used[i] = edge;
692: }
693: }
694: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
695: lnsep[edge] = ct;
696: nsep[edge] = ct;
697: dir[edge] = LEFT;
699: xxt_handle->info->nsep = nsep;
700: xxt_handle->info->lnsep = lnsep;
701: xxt_handle->info->fo = fo;
702: xxt_handle->info->nfo = nfo;
704: free(dir);
705: free(lhs);
706: free(rhs);
707: free(used);
708: PetscFunctionReturn(PETSC_SUCCESS);
709: }
711: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
712: {
713: mv_info *mvi = (mv_info *)malloc(sizeof(mv_info));
715: mvi->n = n;
716: mvi->m = m;
717: mvi->n_global = -1;
718: mvi->m_global = -1;
719: mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
720: PCTFS_ivec_copy(mvi->local2global, local2global, m);
721: mvi->local2global[m] = INT_MAX;
722: mvi->matvec = matvec;
723: mvi->grid_data = grid_data;
725: /* set xxt communication handle to perform restricted matvec */
726: mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
728: return mvi;
729: }
731: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
732: {
733: PetscFunctionBegin;
734: PetscCall(A->matvec((mv_info *)A->grid_data, v, u));
735: PetscFunctionReturn(PETSC_SUCCESS);
736: }