Actual source code: xxt.c
1: /*
2: Module Name: xxt
3: Module Info:
5: author: Henry M. Tufo III
6: e-mail: hmt@asci.uchicago.edu
7: contact:
8: +--------------------------------+--------------------------------+
9: |MCS Division - Building 221 |Department of Computer Science |
10: |Argonne National Laboratory |Ryerson 152 |
11: |9700 S. Cass Avenue |The University of Chicago |
12: |Argonne, IL 60439 |Chicago, IL 60637 |
13: |(630) 252-5354/5986 ph/fx |(773) 702-6019/8487 ph/fx |
14: +--------------------------------+--------------------------------+
16: Last Modification: 3.20.01
17: */
18: #include <../src/ksp/pc/impls/tfs/tfs.h>
20: #define LEFT -1
21: #define RIGHT 1
22: #define BOTH 0
24: typedef struct xxt_solver_info {
25: PetscInt n, m, n_global, m_global;
26: PetscInt nnz, max_nnz, msg_buf_sz;
27: PetscInt *nsep, *lnsep, *fo, nfo, *stages;
28: PetscInt *col_sz, *col_indices;
29: PetscScalar **col_vals, *x, *solve_uu, *solve_w;
30: PetscInt nsolves;
31: PetscScalar tot_solve_time;
32: } xxt_info;
34: typedef struct matvec_info {
35: PetscInt n, m, n_global, m_global;
36: PetscInt *local2global;
37: PCTFS_gs_ADT PCTFS_gs_handle;
38: PetscErrorCode (*matvec)(struct matvec_info *, PetscScalar *, PetscScalar *);
39: void *grid_data;
40: } mv_info;
42: struct xxt_CDT {
43: PetscInt id;
44: PetscInt ns;
45: PetscInt level;
46: xxt_info *info;
47: mv_info *mvi;
48: };
50: static PetscInt n_xxt = 0;
51: static PetscInt n_xxt_handles = 0;
53: /* prototypes */
54: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *rhs);
55: static PetscErrorCode check_handle(xxt_ADT xxt_handle);
56: static PetscErrorCode det_separators(xxt_ADT xxt_handle);
57: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u);
58: static PetscErrorCode xxt_generate(xxt_ADT xxt_handle);
59: static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle);
60: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data);
62: xxt_ADT XXT_new(void)
63: {
64: xxt_ADT xxt_handle;
66: /* rolling count on n_xxt ... pot. problem here */
67: n_xxt_handles++;
68: xxt_handle = (xxt_ADT)malloc(sizeof(struct xxt_CDT));
69: xxt_handle->id = ++n_xxt;
70: xxt_handle->info = NULL;
71: xxt_handle->mvi = NULL;
73: return xxt_handle;
74: }
76: PetscErrorCode XXT_factor(xxt_ADT xxt_handle, /* prev. allocated xxt handle */
77: PetscInt *local2global, /* global column mapping */
78: PetscInt n, /* local num rows */
79: PetscInt m, /* local num cols */
80: PetscErrorCode (*matvec)(void *, PetscScalar *, PetscScalar *), /* b_loc=A_local.x_loc */
81: void *grid_data) /* grid data for matvec */
82: {
83: PetscFunctionBegin;
84: PetscCall(PCTFS_comm_init());
85: PetscCall(check_handle(xxt_handle));
87: /* only 2^k for now and all nodes participating */
88: PetscCheck((1 << (xxt_handle->level = PCTFS_i_log2_num_nodes)) == PCTFS_num_nodes, PETSC_COMM_SELF, PETSC_ERR_PLIB, "only 2^k for now and MPI_COMM_WORLD!!! %d != %d", 1 << PCTFS_i_log2_num_nodes, PCTFS_num_nodes);
90: /* space for X info */
91: xxt_handle->info = (xxt_info *)malloc(sizeof(xxt_info));
93: /* set up matvec handles */
94: xxt_handle->mvi = set_mvi(local2global, n, m, (PetscErrorCode(*)(mv_info *, PetscScalar *, PetscScalar *))matvec, grid_data);
96: /* matrix is assumed to be of full rank */
97: /* LATER we can reset to indicate rank def. */
98: xxt_handle->ns = 0;
100: /* determine separators and generate firing order - NB xxt info set here */
101: PetscCall(det_separators(xxt_handle));
103: PetscCall(do_xxt_factor(xxt_handle));
104: PetscFunctionReturn(PETSC_SUCCESS);
105: }
107: PetscErrorCode XXT_solve(xxt_ADT xxt_handle, PetscScalar *x, PetscScalar *b)
108: {
109: PetscFunctionBegin;
110: PetscCall(PCTFS_comm_init());
111: PetscCall(check_handle(xxt_handle));
113: /* need to copy b into x? */
114: if (b) PetscCall(PCTFS_rvec_copy(x, b, xxt_handle->mvi->n));
115: PetscCall(do_xxt_solve(xxt_handle, x));
116: PetscFunctionReturn(PETSC_SUCCESS);
117: }
119: PetscErrorCode XXT_free(xxt_ADT xxt_handle)
120: {
121: PetscFunctionBegin;
122: PetscCall(PCTFS_comm_init());
123: PetscCall(check_handle(xxt_handle));
124: n_xxt_handles--;
126: free(xxt_handle->info->nsep);
127: free(xxt_handle->info->lnsep);
128: free(xxt_handle->info->fo);
129: free(xxt_handle->info->stages);
130: free(xxt_handle->info->solve_uu);
131: free(xxt_handle->info->solve_w);
132: free(xxt_handle->info->x);
133: free(xxt_handle->info->col_vals);
134: free(xxt_handle->info->col_sz);
135: free(xxt_handle->info->col_indices);
136: free(xxt_handle->info);
137: free(xxt_handle->mvi->local2global);
138: PetscCall(PCTFS_gs_free(xxt_handle->mvi->PCTFS_gs_handle));
139: free(xxt_handle->mvi);
140: free(xxt_handle);
142: /* if the check fails we nuke */
143: /* if NULL pointer passed to free we nuke */
144: /* if the calls to free fail that's not my problem */
145: PetscFunctionReturn(PETSC_SUCCESS);
146: }
148: /*
150: Description: get A_local, local portion of global coarse matrix which
151: is a row dist. nxm matrix w/ n<m.
152: o my_ml holds address of ML struct associated w/A_local and coarse grid
153: o local2global holds global number of column i (i=0,...,m-1)
154: o local2global holds global number of row i (i=0,...,n-1)
155: o mylocmatvec performs A_local . vec_local (note that gs is performed using
156: PCTFS_gs_init/gop).
158: mylocmatvec = my_ml->Amat[grid_tag].matvec->external;
159: mylocmatvec (void :: void *data, double *in, double *out)
160: */
161: static PetscErrorCode do_xxt_factor(xxt_ADT xxt_handle)
162: {
163: return xxt_generate(xxt_handle);
164: }
166: static PetscErrorCode xxt_generate(xxt_ADT xxt_handle)
167: {
168: PetscInt i, j, k, idex;
169: PetscInt dim, col;
170: PetscScalar *u, *uu, *v, *z, *w, alpha, alpha_w;
171: PetscInt *segs;
172: PetscInt op[] = {GL_ADD, 0};
173: PetscInt off, len;
174: PetscScalar *x_ptr;
175: PetscInt *iptr, flag;
176: PetscInt start = 0, end, work;
177: PetscInt op2[] = {GL_MIN, 0};
178: PCTFS_gs_ADT PCTFS_gs_handle;
179: PetscInt *nsep, *lnsep, *fo;
180: PetscInt a_n = xxt_handle->mvi->n;
181: PetscInt a_m = xxt_handle->mvi->m;
182: PetscInt *a_local2global = xxt_handle->mvi->local2global;
183: PetscInt level;
184: PetscInt xxt_nnz = 0, xxt_max_nnz = 0;
185: PetscInt n, m;
186: PetscInt *col_sz, *col_indices, *stages;
187: PetscScalar **col_vals, *x;
188: PetscInt n_global;
189: PetscBLASInt i1 = 1, dlen;
190: PetscScalar dm1 = -1.0;
192: n = xxt_handle->mvi->n;
193: nsep = xxt_handle->info->nsep;
194: lnsep = xxt_handle->info->lnsep;
195: fo = xxt_handle->info->fo;
196: end = lnsep[0];
197: level = xxt_handle->level;
198: PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
200: /* is there a null space? */
201: /* LATER add in ability to detect null space by checking alpha */
202: for (i = 0, j = 0; i <= level; i++) j += nsep[i];
204: m = j - xxt_handle->ns;
205: if (m != j) PetscCall(PetscPrintf(PETSC_COMM_WORLD, "xxt_generate() :: null space exists %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\n", m, j, xxt_handle->ns));
207: /* get and initialize storage for x local */
208: /* note that x local is nxm and stored by columns */
209: col_sz = (PetscInt *)malloc(m * sizeof(PetscInt));
210: col_indices = (PetscInt *)malloc((2 * m + 1) * sizeof(PetscInt));
211: col_vals = (PetscScalar **)malloc(m * sizeof(PetscScalar *));
212: for (i = j = 0; i < m; i++, j += 2) {
213: col_indices[j] = col_indices[j + 1] = col_sz[i] = -1;
214: col_vals[i] = NULL;
215: }
216: col_indices[j] = -1;
218: /* size of separators for each sub-hc working from bottom of tree to top */
219: /* this looks like nsep[]=segments */
220: stages = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
221: segs = (PetscInt *)malloc((level + 1) * sizeof(PetscInt));
222: PetscCall(PCTFS_ivec_zero(stages, level + 1));
223: PCTFS_ivec_copy(segs, nsep, level + 1);
224: for (i = 0; i < level; i++) segs[i + 1] += segs[i];
225: stages[0] = segs[0];
227: /* temporary vectors */
228: u = (PetscScalar *)malloc(n * sizeof(PetscScalar));
229: z = (PetscScalar *)malloc(n * sizeof(PetscScalar));
230: v = (PetscScalar *)malloc(a_m * sizeof(PetscScalar));
231: uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
232: w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
234: /* extra nnz due to replication of vertices across separators */
235: for (i = 1, j = 0; i <= level; i++) j += nsep[i];
237: /* storage for sparse x values */
238: n_global = xxt_handle->info->n_global;
239: xxt_max_nnz = (PetscInt)(2.5 * PetscPowReal(1.0 * n_global, 1.6667) + j * n / 2) / PCTFS_num_nodes;
240: x = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
241: xxt_nnz = 0;
243: /* LATER - can embed next sep to fire in gs */
244: /* time to make the donuts - generate X factor */
245: for (dim = i = j = 0; i < m; i++) {
246: /* time to move to the next level? */
247: while (i == segs[dim]) {
248: PetscCheck(dim != level, PETSC_COMM_SELF, PETSC_ERR_PLIB, "dim about to exceed level");
249: stages[dim++] = i;
250: end += lnsep[dim];
251: }
252: stages[dim] = i;
254: /* which column are we firing? */
255: /* i.e. set v_l */
256: /* use new seps and do global min across hc to determine which one to fire */
257: (start < end) ? (col = fo[start]) : (col = INT_MAX);
258: PetscCall(PCTFS_giop_hc(&col, &work, 1, op2, dim));
260: /* shouldn't need this */
261: if (col == INT_MAX) {
262: PetscCall(PetscInfo(0, "hey ... col==INT_MAX??\n"));
263: continue;
264: }
266: /* do I own it? I should */
267: PetscCall(PCTFS_rvec_zero(v, a_m));
268: if (col == fo[start]) {
269: start++;
270: idex = PCTFS_ivec_linear_search(col, a_local2global, a_n);
271: PetscCheck(idex != -1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "NOT FOUND!");
272: v[idex] = 1.0;
273: j++;
274: } else {
275: idex = PCTFS_ivec_linear_search(col, a_local2global, a_m);
276: if (idex != -1) v[idex] = 1.0;
277: }
279: /* perform u = A.v_l */
280: PetscCall(PCTFS_rvec_zero(u, n));
281: PetscCall(do_matvec(xxt_handle->mvi, v, u));
283: /* uu = X^T.u_l (local portion) */
284: /* technically only need to zero out first i entries */
285: /* later turn this into an XXT_solve call ? */
286: PetscCall(PCTFS_rvec_zero(uu, m));
287: x_ptr = x;
288: iptr = col_indices;
289: for (k = 0; k < i; k++) {
290: off = *iptr++;
291: len = *iptr++;
292: PetscCall(PetscBLASIntCast(len, &dlen));
293: PetscCallBLAS("BLASdot", uu[k] = BLASdot_(&dlen, u + off, &i1, x_ptr, &i1));
294: x_ptr += len;
295: }
297: /* uu = X^T.u_l (comm portion) */
298: PetscCall(PCTFS_ssgl_radd(uu, w, dim, stages));
300: /* z = X.uu */
301: PetscCall(PCTFS_rvec_zero(z, n));
302: x_ptr = x;
303: iptr = col_indices;
304: for (k = 0; k < i; k++) {
305: off = *iptr++;
306: len = *iptr++;
307: PetscCall(PetscBLASIntCast(len, &dlen));
308: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &uu[k], x_ptr, &i1, z + off, &i1));
309: x_ptr += len;
310: }
312: /* compute v_l = v_l - z */
313: PetscCall(PCTFS_rvec_zero(v + a_n, a_m - a_n));
314: PetscCall(PetscBLASIntCast(n, &dlen));
315: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, &dm1, z, &i1, v, &i1));
317: /* compute u_l = A.v_l */
318: if (a_n != a_m) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, v, "+\0", dim));
319: PetscCall(PCTFS_rvec_zero(u, n));
320: PetscCall(do_matvec(xxt_handle->mvi, v, u));
322: /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - local portion */
323: PetscCall(PetscBLASIntCast(n, &dlen));
324: PetscCallBLAS("BLASdot", alpha = BLASdot_(&dlen, u, &i1, v, &i1));
325: /* compute sqrt(alpha) = sqrt(v_l^T.u_l) - comm portion */
326: PetscCall(PCTFS_grop_hc(&alpha, &alpha_w, 1, op, dim));
328: alpha = (PetscScalar)PetscSqrtReal((PetscReal)alpha);
330: /* check for small alpha */
331: /* LATER use this to detect and determine null space */
332: PetscCheck(PetscAbsScalar(alpha) >= 1.0e-14, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bad alpha! %g", (double)PetscAbsScalar(alpha));
334: /* compute v_l = v_l/sqrt(alpha) */
335: PetscCall(PCTFS_rvec_scale(v, 1.0 / alpha, n));
337: /* add newly generated column, v_l, to X */
338: flag = 1;
339: off = len = 0;
340: for (k = 0; k < n; k++) {
341: if (v[k] != 0.0) {
342: len = k;
343: if (flag) {
344: off = k;
345: flag = 0;
346: }
347: }
348: }
350: len -= (off - 1);
352: if (len > 0) {
353: if ((xxt_nnz + len) > xxt_max_nnz) {
354: PetscCall(PetscInfo(0, "increasing space for X by 2x!\n"));
355: xxt_max_nnz *= 2;
356: x_ptr = (PetscScalar *)malloc(xxt_max_nnz * sizeof(PetscScalar));
357: PetscCall(PCTFS_rvec_copy(x_ptr, x, xxt_nnz));
358: free(x);
359: x = x_ptr;
360: x_ptr += xxt_nnz;
361: }
362: xxt_nnz += len;
363: PetscCall(PCTFS_rvec_copy(x_ptr, v + off, len));
365: col_indices[2 * i] = off;
366: col_sz[i] = col_indices[2 * i + 1] = len;
367: col_vals[i] = x_ptr;
368: } else {
369: col_indices[2 * i] = 0;
370: col_sz[i] = col_indices[2 * i + 1] = 0;
371: col_vals[i] = x_ptr;
372: }
373: }
375: /* close off stages for execution phase */
376: while (dim != level) {
377: stages[dim++] = i;
378: PetscCall(PetscInfo(0, "disconnected!!! dim(%" PetscInt_FMT ")!=level(%" PetscInt_FMT ")\n", dim, level));
379: }
380: stages[dim] = i;
382: xxt_handle->info->n = xxt_handle->mvi->n;
383: xxt_handle->info->m = m;
384: xxt_handle->info->nnz = xxt_nnz;
385: xxt_handle->info->max_nnz = xxt_max_nnz;
386: xxt_handle->info->msg_buf_sz = stages[level] - stages[0];
387: xxt_handle->info->solve_uu = (PetscScalar *)malloc(m * sizeof(PetscScalar));
388: xxt_handle->info->solve_w = (PetscScalar *)malloc(m * sizeof(PetscScalar));
389: xxt_handle->info->x = x;
390: xxt_handle->info->col_vals = col_vals;
391: xxt_handle->info->col_sz = col_sz;
392: xxt_handle->info->col_indices = col_indices;
393: xxt_handle->info->stages = stages;
394: xxt_handle->info->nsolves = 0;
395: xxt_handle->info->tot_solve_time = 0.0;
397: free(segs);
398: free(u);
399: free(v);
400: free(uu);
401: free(z);
402: free(w);
404: return PETSC_SUCCESS;
405: }
407: static PetscErrorCode do_xxt_solve(xxt_ADT xxt_handle, PetscScalar *uc)
408: {
409: PetscInt off, len, *iptr;
410: PetscInt level = xxt_handle->level;
411: PetscInt n = xxt_handle->info->n;
412: PetscInt m = xxt_handle->info->m;
413: PetscInt *stages = xxt_handle->info->stages;
414: PetscInt *col_indices = xxt_handle->info->col_indices;
415: PetscScalar *x_ptr, *uu_ptr;
416: PetscScalar *solve_uu = xxt_handle->info->solve_uu;
417: PetscScalar *solve_w = xxt_handle->info->solve_w;
418: PetscScalar *x = xxt_handle->info->x;
419: PetscBLASInt i1 = 1, dlen;
421: PetscFunctionBegin;
422: uu_ptr = solve_uu;
423: PetscCall(PCTFS_rvec_zero(uu_ptr, m));
425: /* x = X.Y^T.b */
426: /* uu = Y^T.b */
427: for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
428: off = *iptr++;
429: len = *iptr++;
430: PetscCall(PetscBLASIntCast(len, &dlen));
431: PetscCallBLAS("BLASdot", *uu_ptr++ = BLASdot_(&dlen, uc + off, &i1, x_ptr, &i1));
432: }
434: /* communication of beta */
435: uu_ptr = solve_uu;
436: if (level) PetscCall(PCTFS_ssgl_radd(uu_ptr, solve_w, level, stages));
438: PetscCall(PCTFS_rvec_zero(uc, n));
440: /* x = X.uu */
441: for (x_ptr = x, iptr = col_indices; *iptr != -1; x_ptr += len) {
442: off = *iptr++;
443: len = *iptr++;
444: PetscCall(PetscBLASIntCast(len, &dlen));
445: PetscCallBLAS("BLASaxpy", BLASaxpy_(&dlen, uu_ptr++, x_ptr, &i1, uc + off, &i1));
446: }
447: PetscFunctionReturn(PETSC_SUCCESS);
448: }
450: static PetscErrorCode check_handle(xxt_ADT xxt_handle)
451: {
452: PetscInt vals[2], work[2], op[] = {NON_UNIFORM, GL_MIN, GL_MAX};
454: PetscFunctionBegin;
455: PetscCheck(xxt_handle, PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: NULL %p", (void *)xxt_handle);
457: vals[0] = vals[1] = xxt_handle->id;
458: PetscCall(PCTFS_giop(vals, work, PETSC_STATIC_ARRAY_LENGTH(op) - 1, op));
459: PetscCheck(!(vals[0] != vals[1]) && !(xxt_handle->id <= 0), PETSC_COMM_SELF, PETSC_ERR_PLIB, "check_handle() :: bad handle :: id mismatch min/max %" PetscInt_FMT "/%" PetscInt_FMT " %" PetscInt_FMT, vals[0], vals[1], xxt_handle->id);
460: PetscFunctionReturn(PETSC_SUCCESS);
461: }
463: static PetscErrorCode det_separators(xxt_ADT xxt_handle)
464: {
465: PetscInt i, ct, id;
466: PetscInt mask, edge, *iptr;
467: PetscInt *dir, *used;
468: PetscInt sum[4], w[4];
469: PetscScalar rsum[4], rw[4];
470: PetscInt op[] = {GL_ADD, 0};
471: PetscScalar *lhs, *rhs;
472: PetscInt *nsep, *lnsep, *fo, nfo = 0;
473: PCTFS_gs_ADT PCTFS_gs_handle = xxt_handle->mvi->PCTFS_gs_handle;
474: PetscInt *local2global = xxt_handle->mvi->local2global;
475: PetscInt n = xxt_handle->mvi->n;
476: PetscInt m = xxt_handle->mvi->m;
477: PetscInt level = xxt_handle->level;
478: PetscInt shared = 0;
480: PetscFunctionBegin;
481: dir = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
482: nsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
483: lnsep = (PetscInt *)malloc(sizeof(PetscInt) * (level + 1));
484: fo = (PetscInt *)malloc(sizeof(PetscInt) * (n + 1));
485: used = (PetscInt *)malloc(sizeof(PetscInt) * n);
487: PetscCall(PCTFS_ivec_zero(dir, level + 1));
488: PetscCall(PCTFS_ivec_zero(nsep, level + 1));
489: PetscCall(PCTFS_ivec_zero(lnsep, level + 1));
490: PetscCall(PCTFS_ivec_set(fo, -1, n + 1));
491: PetscCall(PCTFS_ivec_zero(used, n));
493: lhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
494: rhs = (PetscScalar *)malloc(sizeof(PetscScalar) * m);
496: /* determine the # of unique dof */
497: PetscCall(PCTFS_rvec_zero(lhs, m));
498: PetscCall(PCTFS_rvec_set(lhs, 1.0, n));
499: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", level));
500: PetscCall(PCTFS_rvec_zero(rsum, 2));
501: for (i = 0; i < n; i++) {
502: if (lhs[i] != 0.0) {
503: rsum[0] += 1.0 / lhs[i];
504: rsum[1] += lhs[i];
505: }
506: }
507: PetscCall(PCTFS_grop_hc(rsum, rw, 2, op, level));
508: rsum[0] += 0.1;
509: rsum[1] += 0.1;
511: if (PetscAbsScalar(rsum[0] - rsum[1]) > EPS) shared = 1;
513: xxt_handle->info->n_global = xxt_handle->info->m_global = (PetscInt)rsum[0];
514: xxt_handle->mvi->n_global = xxt_handle->mvi->m_global = (PetscInt)rsum[0];
516: /* determine separator sets top down */
517: if (shared) {
518: for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
519: /* set rsh of hc, fire, and collect lhs responses */
520: PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
521: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
523: /* set lsh of hc, fire, and collect rhs responses */
524: PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
525: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
527: for (i = 0; i < n; i++) {
528: if (id < mask) {
529: if (lhs[i] != 0.0) lhs[i] = 1.0;
530: }
531: if (id >= mask) {
532: if (rhs[i] != 0.0) rhs[i] = 1.0;
533: }
534: }
536: if (id < mask) PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge - 1));
537: else PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge - 1));
539: /* count number of dofs I own that have signal and not in sep set */
540: PetscCall(PCTFS_rvec_zero(rsum, 4));
541: PetscCall(PCTFS_ivec_zero(sum, 4));
542: for (ct = i = 0; i < n; i++) {
543: if (!used[i]) {
544: /* number of unmarked dofs on node */
545: ct++;
546: /* number of dofs to be marked on lhs hc */
547: if (id < mask) {
548: if (lhs[i] != 0.0) {
549: sum[0]++;
550: rsum[0] += 1.0 / lhs[i];
551: }
552: }
553: /* number of dofs to be marked on rhs hc */
554: if (id >= mask) {
555: if (rhs[i] != 0.0) {
556: sum[1]++;
557: rsum[1] += 1.0 / rhs[i];
558: }
559: }
560: }
561: }
563: /* go for load balance - choose half with most unmarked dofs, bias LHS */
564: (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
565: (id < mask) ? (rsum[2] = ct) : (rsum[3] = ct);
566: PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
567: PetscCall(PCTFS_grop_hc(rsum, rw, 4, op, edge));
568: rsum[0] += 0.1;
569: rsum[1] += 0.1;
570: rsum[2] += 0.1;
571: rsum[3] += 0.1;
573: if (id < mask) {
574: /* mark dofs I own that have signal and not in sep set */
575: for (ct = i = 0; i < n; i++) {
576: if ((!used[i]) && (lhs[i] != 0.0)) {
577: ct++;
578: nfo++;
580: PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
582: *--iptr = local2global[i];
583: used[i] = edge;
584: }
585: }
586: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
588: lnsep[edge] = ct;
589: nsep[edge] = (PetscInt)rsum[0];
590: dir[edge] = LEFT;
591: }
593: if (id >= mask) {
594: /* mark dofs I own that have signal and not in sep set */
595: for (ct = i = 0; i < n; i++) {
596: if ((!used[i]) && (rhs[i] != 0.0)) {
597: ct++;
598: nfo++;
600: PetscCheck(nfo <= n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nfo about to exceed n");
602: *--iptr = local2global[i];
603: used[i] = edge;
604: }
605: }
606: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
608: lnsep[edge] = ct;
609: nsep[edge] = (PetscInt)rsum[1];
610: dir[edge] = RIGHT;
611: }
613: /* LATER or we can recur on these to order seps at this level */
614: /* do we need full set of separators for this? */
616: /* fold rhs hc into lower */
617: if (id >= mask) id -= mask;
618: }
619: } else {
620: for (iptr = fo + n, id = PCTFS_my_id, mask = PCTFS_num_nodes >> 1, edge = level; edge > 0; edge--, mask >>= 1) {
621: /* set rsh of hc, fire, and collect lhs responses */
622: PetscCall((id < mask) ? PCTFS_rvec_zero(lhs, m) : PCTFS_rvec_set(lhs, 1.0, m));
623: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, lhs, "+\0", edge));
625: /* set lsh of hc, fire, and collect rhs responses */
626: PetscCall((id < mask) ? PCTFS_rvec_set(rhs, 1.0, m) : PCTFS_rvec_zero(rhs, m));
627: PetscCall(PCTFS_gs_gop_hc(PCTFS_gs_handle, rhs, "+\0", edge));
629: /* count number of dofs I own that have signal and not in sep set */
630: PetscCall(PCTFS_ivec_zero(sum, 4));
631: for (ct = i = 0; i < n; i++) {
632: if (!used[i]) {
633: /* number of unmarked dofs on node */
634: ct++;
635: /* number of dofs to be marked on lhs hc */
636: if ((id < mask) && (lhs[i] != 0.0)) sum[0]++;
637: /* number of dofs to be marked on rhs hc */
638: if ((id >= mask) && (rhs[i] != 0.0)) sum[1]++;
639: }
640: }
642: /* go for load balance - choose half with most unmarked dofs, bias LHS */
643: (id < mask) ? (sum[2] = ct) : (sum[3] = ct);
644: PetscCall(PCTFS_giop_hc(sum, w, 4, op, edge));
646: /* lhs hc wins */
647: if (sum[2] >= sum[3]) {
648: if (id < mask) {
649: /* mark dofs I own that have signal and not in sep set */
650: for (ct = i = 0; i < n; i++) {
651: if ((!used[i]) && (lhs[i] != 0.0)) {
652: ct++;
653: nfo++;
654: *--iptr = local2global[i];
655: used[i] = edge;
656: }
657: }
658: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
659: lnsep[edge] = ct;
660: }
661: nsep[edge] = sum[0];
662: dir[edge] = LEFT;
663: } else { /* rhs hc wins */
664: if (id >= mask) {
665: /* mark dofs I own that have signal and not in sep set */
666: for (ct = i = 0; i < n; i++) {
667: if ((!used[i]) && (rhs[i] != 0.0)) {
668: ct++;
669: nfo++;
670: *--iptr = local2global[i];
671: used[i] = edge;
672: }
673: }
674: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
675: lnsep[edge] = ct;
676: }
677: nsep[edge] = sum[1];
678: dir[edge] = RIGHT;
679: }
680: /* LATER or we can recur on these to order seps at this level */
681: /* do we need full set of separators for this? */
683: /* fold rhs hc into lower */
684: if (id >= mask) id -= mask;
685: }
686: }
688: /* level 0 is on processor case - so mark the remainder */
689: for (ct = i = 0; i < n; i++) {
690: if (!used[i]) {
691: ct++;
692: nfo++;
693: *--iptr = local2global[i];
694: used[i] = edge;
695: }
696: }
697: if (ct > 1) PetscCall(PCTFS_ivec_sort(iptr, ct));
698: lnsep[edge] = ct;
699: nsep[edge] = ct;
700: dir[edge] = LEFT;
702: xxt_handle->info->nsep = nsep;
703: xxt_handle->info->lnsep = lnsep;
704: xxt_handle->info->fo = fo;
705: xxt_handle->info->nfo = nfo;
707: free(dir);
708: free(lhs);
709: free(rhs);
710: free(used);
711: PetscFunctionReturn(PETSC_SUCCESS);
712: }
714: static mv_info *set_mvi(PetscInt *local2global, PetscInt n, PetscInt m, PetscErrorCode (*matvec)(mv_info *, PetscScalar *, PetscScalar *), void *grid_data)
715: {
716: mv_info *mvi = (mv_info *)malloc(sizeof(mv_info));
718: mvi->n = n;
719: mvi->m = m;
720: mvi->n_global = -1;
721: mvi->m_global = -1;
722: mvi->local2global = (PetscInt *)malloc((m + 1) * sizeof(PetscInt));
723: PCTFS_ivec_copy(mvi->local2global, local2global, m);
724: mvi->local2global[m] = INT_MAX;
725: mvi->matvec = matvec;
726: mvi->grid_data = grid_data;
728: /* set xxt communication handle to perform restricted matvec */
729: mvi->PCTFS_gs_handle = PCTFS_gs_init(local2global, m, PCTFS_num_nodes);
731: return mvi;
732: }
734: static PetscErrorCode do_matvec(mv_info *A, PetscScalar *v, PetscScalar *u)
735: {
736: PetscFunctionBegin;
737: PetscCall(A->matvec((mv_info *)A->grid_data, v, u));
738: PetscFunctionReturn(PETSC_SUCCESS);
739: }