Actual source code: bddcschurs.c
petsc-3.8.4 2018-03-24
1: #include <../src/ksp/pc/impls/bddc/bddc.h>
2: #include <../src/ksp/pc/impls/bddc/bddcprivate.h>
3: #include <../src/mat/impls/dense/seq/dense.h>
4: #include <petscblaslapack.h>
6: PETSC_STATIC_INLINE PetscErrorCode PCBDDCAdjGetNextLayer_Private(PetscInt*,PetscInt,PetscBT,PetscInt*,PetscInt*,PetscInt*);
7: static PetscErrorCode PCBDDCComputeExplicitSchur(Mat,PetscBool,MatReuse,Mat*);
8: static PetscErrorCode PCBDDCReuseSolvers_Interior(PC,Vec,Vec);
9: static PetscErrorCode PCBDDCReuseSolvers_Correction(PC,Vec,Vec);
11: /* if v2 is not present, correction is done in-place */
12: PetscErrorCode PCBDDCReuseSolversBenignAdapt(PCBDDCReuseSolvers ctx, Vec v, Vec v2, PetscBool sol, PetscBool full)
13: {
14: PetscScalar *array;
15: PetscScalar *array2;
19: if (!ctx->benign_n) return(0);
20: if (sol && full) {
21: PetscInt n_I,size_schur;
23: /* get sizes */
24: MatGetSize(ctx->benign_csAIB,&size_schur,NULL);
25: VecGetSize(v,&n_I);
26: n_I = n_I - size_schur;
27: /* get schur sol from array */
28: VecGetArray(v,&array);
29: VecPlaceArray(ctx->benign_dummy_schur_vec,array+n_I);
30: VecRestoreArray(v,&array);
31: /* apply interior sol correction */
32: MatMultTranspose(ctx->benign_csAIB,ctx->benign_dummy_schur_vec,ctx->benign_corr_work);
33: VecResetArray(ctx->benign_dummy_schur_vec);
34: MatMultAdd(ctx->benign_AIIm1ones,ctx->benign_corr_work,v,v);
35: }
36: if (v2) {
37: PetscInt nl;
39: VecGetArrayRead(v,(const PetscScalar**)&array);
40: VecGetLocalSize(v2,&nl);
41: VecGetArray(v2,&array2);
42: PetscMemcpy(array2,array,nl*sizeof(PetscScalar));
43: } else {
44: VecGetArray(v,&array);
45: array2 = array;
46: }
47: if (!sol) { /* change rhs */
48: PetscInt n;
49: for (n=0;n<ctx->benign_n;n++) {
50: PetscScalar sum = 0.;
51: const PetscInt *cols;
52: PetscInt nz,i;
54: ISGetLocalSize(ctx->benign_zerodiag_subs[n],&nz);
55: ISGetIndices(ctx->benign_zerodiag_subs[n],&cols);
56: for (i=0;i<nz-1;i++) sum += array[cols[i]];
57: #if defined(PETSC_USE_COMPLEX)
58: sum = -(PetscRealPart(sum)/nz + PETSC_i*(PetscImaginaryPart(sum)/nz));
59: #else
60: sum = -sum/nz;
61: #endif
62: for (i=0;i<nz-1;i++) array2[cols[i]] += sum;
63: ctx->benign_save_vals[n] = array2[cols[nz-1]];
64: array2[cols[nz-1]] = sum;
65: ISRestoreIndices(ctx->benign_zerodiag_subs[n],&cols);
66: }
67: } else {
68: PetscInt n;
69: for (n=0;n<ctx->benign_n;n++) {
70: PetscScalar sum = 0.;
71: const PetscInt *cols;
72: PetscInt nz,i;
73: ISGetLocalSize(ctx->benign_zerodiag_subs[n],&nz);
74: ISGetIndices(ctx->benign_zerodiag_subs[n],&cols);
75: for (i=0;i<nz-1;i++) sum += array[cols[i]];
76: #if defined(PETSC_USE_COMPLEX)
77: sum = -(PetscRealPart(sum)/nz + PETSC_i*(PetscImaginaryPart(sum)/nz));
78: #else
79: sum = -sum/nz;
80: #endif
81: for (i=0;i<nz-1;i++) array2[cols[i]] += sum;
82: array2[cols[nz-1]] = ctx->benign_save_vals[n];
83: ISRestoreIndices(ctx->benign_zerodiag_subs[n],&cols);
84: }
85: }
86: if (v2) {
87: VecRestoreArrayRead(v,(const PetscScalar**)&array);
88: VecRestoreArray(v2,&array2);
89: } else {
90: VecRestoreArray(v,&array);
91: }
92: if (!sol && full) {
93: Vec usedv;
94: PetscInt n_I,size_schur;
96: /* get sizes */
97: MatGetSize(ctx->benign_csAIB,&size_schur,NULL);
98: VecGetSize(v,&n_I);
99: n_I = n_I - size_schur;
100: /* compute schur rhs correction */
101: if (v2) {
102: usedv = v2;
103: } else {
104: usedv = v;
105: }
106: /* apply schur rhs correction */
107: MatMultTranspose(ctx->benign_AIIm1ones,usedv,ctx->benign_corr_work);
108: VecGetArrayRead(usedv,(const PetscScalar**)&array);
109: VecPlaceArray(ctx->benign_dummy_schur_vec,array+n_I);
110: VecRestoreArrayRead(usedv,(const PetscScalar**)&array);
111: MatMultAdd(ctx->benign_csAIB,ctx->benign_corr_work,ctx->benign_dummy_schur_vec,ctx->benign_dummy_schur_vec);
112: VecResetArray(ctx->benign_dummy_schur_vec);
113: }
114: return(0);
115: }
117: static PetscErrorCode PCBDDCReuseSolvers_Solve_Private(PC pc, Vec rhs, Vec sol, PetscBool transpose, PetscBool full)
118: {
119: PCBDDCReuseSolvers ctx;
120: PetscBool copy = PETSC_FALSE;
121: PetscErrorCode ierr;
124: PCShellGetContext(pc,(void **)&ctx);
125: if (full) {
126: #if defined(PETSC_HAVE_MUMPS)
127: MatMumpsSetIcntl(ctx->F,26,-1);
128: #endif
129: #if defined(PETSC_HAVE_MKL_PARDISO)
130: MatMkl_PardisoSetCntl(ctx->F,70,0);
131: #endif
132: copy = ctx->has_vertices;
133: } else { /* interior solver */
134: #if defined(PETSC_HAVE_MUMPS)
135: MatMumpsSetIcntl(ctx->F,26,0);
136: #endif
137: #if defined(PETSC_HAVE_MKL_PARDISO)
138: MatMkl_PardisoSetCntl(ctx->F,70,1);
139: #endif
140: copy = PETSC_TRUE;
141: }
142: /* copy rhs into factored matrix workspace */
143: if (copy) {
144: PetscInt n;
145: PetscScalar *array,*array_solver;
147: VecGetLocalSize(rhs,&n);
148: VecGetArrayRead(rhs,(const PetscScalar**)&array);
149: VecGetArray(ctx->rhs,&array_solver);
150: PetscMemcpy(array_solver,array,n*sizeof(PetscScalar));
151: VecRestoreArray(ctx->rhs,&array_solver);
152: VecRestoreArrayRead(rhs,(const PetscScalar**)&array);
154: PCBDDCReuseSolversBenignAdapt(ctx,ctx->rhs,NULL,PETSC_FALSE,full);
155: if (transpose) {
156: MatSolveTranspose(ctx->F,ctx->rhs,ctx->sol);
157: } else {
158: MatSolve(ctx->F,ctx->rhs,ctx->sol);
159: }
160: PCBDDCReuseSolversBenignAdapt(ctx,ctx->sol,NULL,PETSC_TRUE,full);
162: /* get back data to caller worskpace */
163: VecGetArrayRead(ctx->sol,(const PetscScalar**)&array_solver);
164: VecGetArray(sol,&array);
165: PetscMemcpy(array,array_solver,n*sizeof(PetscScalar));
166: VecRestoreArray(sol,&array);
167: VecRestoreArrayRead(ctx->sol,(const PetscScalar**)&array_solver);
168: } else {
169: if (ctx->benign_n) {
170: PCBDDCReuseSolversBenignAdapt(ctx,rhs,ctx->rhs,PETSC_FALSE,full);
171: if (transpose) {
172: MatSolveTranspose(ctx->F,ctx->rhs,sol);
173: } else {
174: MatSolve(ctx->F,ctx->rhs,sol);
175: }
176: PCBDDCReuseSolversBenignAdapt(ctx,sol,NULL,PETSC_TRUE,full);
177: } else {
178: if (transpose) {
179: MatSolveTranspose(ctx->F,rhs,sol);
180: } else {
181: MatSolve(ctx->F,rhs,sol);
182: }
183: }
184: }
185: /* restore defaults */
186: #if defined(PETSC_HAVE_MUMPS)
187: MatMumpsSetIcntl(ctx->F,26,-1);
188: #endif
189: #if defined(PETSC_HAVE_MKL_PARDISO)
190: MatMkl_PardisoSetCntl(ctx->F,70,0);
191: #endif
192: return(0);
193: }
195: static PetscErrorCode PCBDDCReuseSolvers_Correction(PC pc, Vec rhs, Vec sol)
196: {
197: PetscErrorCode ierr;
200: PCBDDCReuseSolvers_Solve_Private(pc,rhs,sol,PETSC_FALSE,PETSC_TRUE);
201: return(0);
202: }
204: static PetscErrorCode PCBDDCReuseSolvers_CorrectionTranspose(PC pc, Vec rhs, Vec sol)
205: {
206: PetscErrorCode ierr;
209: PCBDDCReuseSolvers_Solve_Private(pc,rhs,sol,PETSC_TRUE,PETSC_TRUE);
210: return(0);
211: }
213: static PetscErrorCode PCBDDCReuseSolvers_Interior(PC pc, Vec rhs, Vec sol)
214: {
215: PetscErrorCode ierr;
218: PCBDDCReuseSolvers_Solve_Private(pc,rhs,sol,PETSC_FALSE,PETSC_FALSE);
219: return(0);
220: }
222: static PetscErrorCode PCBDDCReuseSolvers_InteriorTranspose(PC pc, Vec rhs, Vec sol)
223: {
224: PetscErrorCode ierr;
227: PCBDDCReuseSolvers_Solve_Private(pc,rhs,sol,PETSC_TRUE,PETSC_FALSE);
228: return(0);
229: }
231: static PetscErrorCode PCBDDCReuseSolversReset(PCBDDCReuseSolvers reuse)
232: {
233: PetscInt i;
237: MatDestroy(&reuse->F);
238: VecDestroy(&reuse->sol);
239: VecDestroy(&reuse->rhs);
240: PCDestroy(&reuse->interior_solver);
241: PCDestroy(&reuse->correction_solver);
242: ISDestroy(&reuse->is_R);
243: ISDestroy(&reuse->is_B);
244: VecScatterDestroy(&reuse->correction_scatter_B);
245: VecDestroy(&reuse->sol_B);
246: VecDestroy(&reuse->rhs_B);
247: for (i=0;i<reuse->benign_n;i++) {
248: ISDestroy(&reuse->benign_zerodiag_subs[i]);
249: }
250: PetscFree(reuse->benign_zerodiag_subs);
251: PetscFree(reuse->benign_save_vals);
252: MatDestroy(&reuse->benign_csAIB);
253: MatDestroy(&reuse->benign_AIIm1ones);
254: VecDestroy(&reuse->benign_corr_work);
255: VecDestroy(&reuse->benign_dummy_schur_vec);
256: return(0);
257: }
259: static PetscErrorCode PCBDDCComputeExplicitSchur(Mat M, PetscBool issym, MatReuse reuse, Mat *S)
260: {
261: Mat B, C, D, Bd, Cd, AinvBd;
262: KSP ksp;
263: PC pc;
264: PetscBool isLU, isILU, isCHOL, Bdense, Cdense;
265: PetscReal fill = 2.0;
266: PetscInt n_I;
267: PetscMPIInt size;
271: MPI_Comm_size(PetscObjectComm((PetscObject)M),&size);
272: if (size != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not for parallel matrices");
273: if (reuse == MAT_REUSE_MATRIX) {
274: PetscBool Sdense;
276: PetscObjectTypeCompare((PetscObject)*S, MATSEQDENSE, &Sdense);
277: if (!Sdense) SETERRQ(PetscObjectComm((PetscObject)M),PETSC_ERR_SUP,"S should dense");
278: }
279: MatSchurComplementGetSubMatrices(M, NULL, NULL, &B, &C, &D);
280: MatSchurComplementGetKSP(M, &ksp);
281: KSPGetPC(ksp, &pc);
282: PetscObjectTypeCompare((PetscObject) pc, PCLU, &isLU);
283: PetscObjectTypeCompare((PetscObject) pc, PCILU, &isILU);
284: PetscObjectTypeCompare((PetscObject) pc, PCCHOLESKY, &isCHOL);
285: PetscObjectTypeCompare((PetscObject) B, MATSEQDENSE, &Bdense);
286: PetscObjectTypeCompare((PetscObject) C, MATSEQDENSE, &Cdense);
287: MatGetSize(B,&n_I,NULL);
288: if (n_I) {
289: if (!Bdense) {
290: MatConvert(B, MATSEQDENSE, MAT_INITIAL_MATRIX, &Bd);
291: } else {
292: Bd = B;
293: }
295: if (isLU || isILU || isCHOL) {
296: Mat fact;
297: KSPSetUp(ksp);
298: PCFactorGetMatrix(pc, &fact);
299: MatDuplicate(Bd, MAT_DO_NOT_COPY_VALUES, &AinvBd);
300: MatMatSolve(fact, Bd, AinvBd);
301: } else {
302: PetscBool ex = PETSC_TRUE;
304: if (ex) {
305: Mat Ainvd;
307: PCComputeExplicitOperator(pc, &Ainvd);
308: MatMatMult(Ainvd, Bd, MAT_INITIAL_MATRIX, fill, &AinvBd);
309: MatDestroy(&Ainvd);
310: } else {
311: Vec sol,rhs;
312: PetscScalar *arrayrhs,*arraysol;
313: PetscInt i,nrhs,n;
315: MatDuplicate(Bd, MAT_DO_NOT_COPY_VALUES, &AinvBd);
316: MatGetSize(Bd,&n,&nrhs);
317: MatDenseGetArray(Bd,&arrayrhs);
318: MatDenseGetArray(AinvBd,&arraysol);
319: KSPGetSolution(ksp,&sol);
320: KSPGetRhs(ksp,&rhs);
321: for (i=0;i<nrhs;i++) {
322: VecPlaceArray(rhs,arrayrhs+i*n);
323: VecPlaceArray(sol,arraysol+i*n);
324: KSPSolve(ksp,rhs,sol);
325: VecResetArray(rhs);
326: VecResetArray(sol);
327: }
328: MatDenseRestoreArray(Bd,&arrayrhs);
329: MatDenseRestoreArray(AinvBd,&arrayrhs);
330: }
331: }
332: if (!Bdense & !issym) {
333: MatDestroy(&Bd);
334: }
336: if (!issym) {
337: if (!Cdense) {
338: MatConvert(C, MATSEQDENSE, MAT_INITIAL_MATRIX, &Cd);
339: } else {
340: Cd = C;
341: }
342: MatMatMult(Cd, AinvBd, reuse, fill, S);
343: if (!Cdense) {
344: MatDestroy(&Cd);
345: }
346: } else {
347: MatTransposeMatMult(Bd, AinvBd, reuse, fill, S);
348: if (!Bdense) {
349: MatDestroy(&Bd);
350: }
351: }
352: MatDestroy(&AinvBd);
353: }
355: if (D) {
356: Mat Dd;
357: PetscBool Ddense;
359: PetscObjectTypeCompare((PetscObject)D,MATSEQDENSE,&Ddense);
360: if (!Ddense) {
361: MatConvert(D, MATSEQDENSE, MAT_INITIAL_MATRIX, &Dd);
362: } else {
363: Dd = D;
364: }
365: if (n_I) {
366: MatAYPX(*S,-1.0,Dd,SAME_NONZERO_PATTERN);
367: } else {
368: if (reuse == MAT_INITIAL_MATRIX) {
369: MatDuplicate(Dd,MAT_COPY_VALUES,S);
370: } else {
371: MatCopy(Dd,*S,SAME_NONZERO_PATTERN);
372: }
373: }
374: if (!Ddense) {
375: MatDestroy(&Dd);
376: }
377: } else {
378: MatScale(*S,-1.0);
379: }
380: return(0);
381: }
383: PetscErrorCode PCBDDCSubSchursSetUp(PCBDDCSubSchurs sub_schurs, Mat Ain, Mat Sin, PetscBool exact_schur, PetscInt xadj[], PetscInt adjncy[], PetscInt nlayers, Vec scaling, PetscBool compute_Stilda, PetscBool reuse_solvers, PetscBool benign_trick, PetscInt benign_n, PetscInt benign_p0_lidx[], IS benign_zerodiag_subs[], Mat change, IS change_primal)
384: {
385: Mat F,A_II,A_IB,A_BI,A_BB,AE_II;
386: Mat S_all;
387: Mat global_schur_subsets,work_mat,*submats;
388: ISLocalToGlobalMapping l2gmap_subsets;
389: IS is_I,is_I_layer;
390: IS all_subsets,all_subsets_mult,all_subsets_n;
391: PetscInt *nnz,*all_local_idx_N;
392: PetscInt *auxnum1,*auxnum2;
393: PetscInt i,subset_size,max_subset_size;
394: PetscInt n_B,extra,local_size,global_size;
395: PetscBLASInt B_N,B_ierr,B_lwork,*pivots;
396: PetscScalar *Bwork;
397: PetscSubcomm subcomm;
398: PetscMPIInt color,rank;
399: MPI_Comm comm_n;
400: PetscBool deluxe = PETSC_TRUE, use_getr = PETSC_FALSE;
401: PetscErrorCode ierr;
404: MatDestroy(&sub_schurs->A);
405: MatDestroy(&sub_schurs->S);
407: sub_schurs->is_hermitian = PETSC_TRUE;
408: sub_schurs->is_posdef = PETSC_TRUE;
409: if (benign_trick) sub_schurs->is_posdef = PETSC_FALSE;
410: PetscOptionsBegin(PetscObjectComm((PetscObject)sub_schurs->l2gmap),sub_schurs->prefix,"BDDC sub_schurs options","PC");
411: PetscOptionsBool("-sub_schurs_hermitian","Hermitian problem",NULL,sub_schurs->is_hermitian,&sub_schurs->is_hermitian,NULL);
412: PetscOptionsBool("-sub_schurs_posdef","Positive definite problem",NULL,sub_schurs->is_posdef,&sub_schurs->is_posdef,NULL);
413: PetscOptionsEnd();
415: /* convert matrix if needed */
416: if (Ain) {
417: PetscBool isseqaij;
418: PetscObjectTypeCompare((PetscObject)Ain,MATSEQAIJ,&isseqaij);
419: if (isseqaij) {
420: PetscObjectReference((PetscObject)Ain);
421: sub_schurs->A = Ain;
422: } else {
423: MatConvert(Ain,MATSEQAIJ,MAT_INITIAL_MATRIX,&sub_schurs->A);
424: }
425: }
427: PetscObjectReference((PetscObject)Sin);
428: sub_schurs->S = Sin;
429: if (sub_schurs->schur_explicit) {
430: sub_schurs->schur_explicit = (PetscBool)(!!sub_schurs->A);
431: }
433: /* preliminary checks */
434: if (!sub_schurs->schur_explicit && compute_Stilda) SETERRQ(PetscObjectComm((PetscObject)sub_schurs->l2gmap),PETSC_ERR_SUP,"Adaptive selection of constraints requires MUMPS and/or MKL_PARDISO");
436: /* restrict work on active processes */
437: color = 0;
438: if (!sub_schurs->n_subs) color = 1; /* this can happen if we are in a multilevel case or if the subdomain is disconnected */
439: MPI_Comm_rank(PetscObjectComm((PetscObject)sub_schurs->l2gmap),&rank);
440: PetscSubcommCreate(PetscObjectComm((PetscObject)sub_schurs->l2gmap),&subcomm);
441: PetscSubcommSetNumber(subcomm,2);
442: PetscSubcommSetTypeGeneral(subcomm,color,rank);
443: PetscCommDuplicate(PetscSubcommChild(subcomm),&comm_n,NULL);
444: PetscSubcommDestroy(&subcomm);
445: if (!sub_schurs->n_subs) {
446: PetscCommDestroy(&comm_n);
447: return(0);
448: }
449: /* PetscCommDuplicate(PetscObjectComm((PetscObject)sub_schurs->l2gmap),&comm_n,NULL); */
451: /* get Schur complement matrices */
452: if (!sub_schurs->schur_explicit) {
453: Mat tA_IB,tA_BI,tA_BB;
454: PetscBool isseqsbaij;
455: MatSchurComplementGetSubMatrices(sub_schurs->S,&A_II,NULL,&tA_IB,&tA_BI,&tA_BB);
456: PetscObjectTypeCompare((PetscObject)tA_BB,MATSEQSBAIJ,&isseqsbaij);
457: if (isseqsbaij) {
458: MatConvert(tA_BB,MATSEQAIJ,MAT_INITIAL_MATRIX,&A_BB);
459: MatConvert(tA_IB,MATSEQAIJ,MAT_INITIAL_MATRIX,&A_IB);
460: MatConvert(tA_BI,MATSEQAIJ,MAT_INITIAL_MATRIX,&A_BI);
461: } else {
462: PetscObjectReference((PetscObject)tA_BB);
463: A_BB = tA_BB;
464: PetscObjectReference((PetscObject)tA_IB);
465: A_IB = tA_IB;
466: PetscObjectReference((PetscObject)tA_BI);
467: A_BI = tA_BI;
468: }
469: } else {
470: A_II = NULL;
471: A_IB = NULL;
472: A_BI = NULL;
473: A_BB = NULL;
474: }
475: S_all = NULL;
477: /* determine interior problems */
478: ISGetLocalSize(sub_schurs->is_I,&i);
479: if (nlayers >= 0 && i) { /* Interior problems can be different from the original one */
480: PetscBT touched;
481: const PetscInt* idx_B;
482: PetscInt n_I,n_B,n_local_dofs,n_prev_added,j,layer,*local_numbering;
484: if (!xadj) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cannot request layering without adjacency");
485: /* get sizes */
486: ISGetLocalSize(sub_schurs->is_I,&n_I);
487: ISGetLocalSize(sub_schurs->is_B,&n_B);
489: PetscMalloc1(n_I+n_B,&local_numbering);
490: PetscBTCreate(n_I+n_B,&touched);
491: PetscBTMemzero(n_I+n_B,touched);
493: /* all boundary dofs must be skipped when adding layers */
494: ISGetIndices(sub_schurs->is_B,&idx_B);
495: for (j=0;j<n_B;j++) {
496: PetscBTSet(touched,idx_B[j]);
497: }
498: PetscMemcpy(local_numbering,idx_B,n_B*sizeof(PetscInt));
499: ISRestoreIndices(sub_schurs->is_B,&idx_B);
501: /* add prescribed number of layers of dofs */
502: n_local_dofs = n_B;
503: n_prev_added = n_B;
504: for (layer=0;layer<nlayers;layer++) {
505: PetscInt n_added;
506: if (n_local_dofs == n_I+n_B) break;
507: if (n_local_dofs > n_I+n_B) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Error querying layer %D. Out of bound access (%D > %D)",layer,n_local_dofs,n_I+n_B);
508: PCBDDCAdjGetNextLayer_Private(local_numbering+n_local_dofs,n_prev_added,touched,xadj,adjncy,&n_added);
509: n_prev_added = n_added;
510: n_local_dofs += n_added;
511: if (!n_added) break;
512: }
513: PetscBTDestroy(&touched);
515: /* IS for I layer dofs in original numbering */
516: ISCreateGeneral(PetscObjectComm((PetscObject)sub_schurs->is_I),n_local_dofs-n_B,local_numbering+n_B,PETSC_COPY_VALUES,&is_I_layer);
517: PetscFree(local_numbering);
518: ISSort(is_I_layer);
519: /* IS for I layer dofs in I numbering */
520: if (!sub_schurs->schur_explicit) {
521: ISLocalToGlobalMapping ItoNmap;
522: ISLocalToGlobalMappingCreateIS(sub_schurs->is_I,&ItoNmap);
523: ISGlobalToLocalMappingApplyIS(ItoNmap,IS_GTOLM_DROP,is_I_layer,&is_I);
524: ISLocalToGlobalMappingDestroy(&ItoNmap);
526: /* II block */
527: MatCreateSubMatrix(A_II,is_I,is_I,MAT_INITIAL_MATRIX,&AE_II);
528: }
529: } else {
530: PetscInt n_I;
532: /* IS for I dofs in original numbering */
533: PetscObjectReference((PetscObject)sub_schurs->is_I);
534: is_I_layer = sub_schurs->is_I;
536: /* IS for I dofs in I numbering (strided 1) */
537: if (!sub_schurs->schur_explicit) {
538: ISGetSize(sub_schurs->is_I,&n_I);
539: ISCreateStride(PetscObjectComm((PetscObject)sub_schurs->is_I),n_I,0,1,&is_I);
541: /* II block is the same */
542: PetscObjectReference((PetscObject)A_II);
543: AE_II = A_II;
544: }
545: }
547: /* Get info on subset sizes and sum of all subsets sizes */
548: max_subset_size = 0;
549: local_size = 0;
550: for (i=0;i<sub_schurs->n_subs;i++) {
551: ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);
552: max_subset_size = PetscMax(subset_size,max_subset_size);
553: local_size += subset_size;
554: }
556: /* Work arrays for local indices */
557: extra = 0;
558: ISGetLocalSize(sub_schurs->is_B,&n_B);
559: if (sub_schurs->schur_explicit && is_I_layer) {
560: ISGetLocalSize(is_I_layer,&extra);
561: }
562: PetscMalloc1(n_B+extra,&all_local_idx_N);
563: if (extra) {
564: const PetscInt *idxs;
565: ISGetIndices(is_I_layer,&idxs);
566: PetscMemcpy(all_local_idx_N,idxs,extra*sizeof(PetscInt));
567: ISRestoreIndices(is_I_layer,&idxs);
568: }
569: PetscMalloc1(local_size,&nnz);
570: PetscMalloc1(sub_schurs->n_subs,&auxnum1);
571: PetscMalloc1(sub_schurs->n_subs,&auxnum2);
573: /* Get local indices in local numbering */
574: local_size = 0;
575: for (i=0;i<sub_schurs->n_subs;i++) {
576: PetscInt j;
577: const PetscInt *idxs;
579: ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);
580: ISGetIndices(sub_schurs->is_subs[i],&idxs);
581: /* start (smallest in global ordering) and multiplicity */
582: auxnum1[i] = idxs[0];
583: auxnum2[i] = subset_size;
584: /* subset indices in local numbering */
585: PetscMemcpy(all_local_idx_N+local_size+extra,idxs,subset_size*sizeof(PetscInt));
586: ISRestoreIndices(sub_schurs->is_subs[i],&idxs);
587: for (j=0;j<subset_size;j++) nnz[local_size+j] = subset_size;
588: local_size += subset_size;
589: }
591: /* allocate extra workspace needed only for GETRI */
592: if (local_size && !benign_trick && (!sub_schurs->is_hermitian || !sub_schurs->is_posdef)) {
593: PetscScalar lwork,dummyscalar = 0.;
594: PetscBLASInt dummyint = 0;
596: use_getr = PETSC_TRUE;
597: B_lwork = -1;
598: PetscBLASIntCast(local_size,&B_N);
599: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
600: PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,&dummyscalar,&B_N,&dummyint,&lwork,&B_lwork,&B_ierr));
601: PetscFPTrapPop();
602: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in query to GETRI Lapack routine %d",(int)B_ierr);
603: PetscBLASIntCast((PetscInt)PetscRealPart(lwork),&B_lwork);
604: PetscMalloc2(B_lwork,&Bwork,B_N,&pivots);
605: } else {
606: Bwork = NULL;
607: pivots = NULL;
608: }
610: /* prepare parallel matrices for summing up properly schurs on subsets */
611: ISCreateGeneral(comm_n,sub_schurs->n_subs,auxnum1,PETSC_OWN_POINTER,&all_subsets_n);
612: ISLocalToGlobalMappingApplyIS(sub_schurs->l2gmap,all_subsets_n,&all_subsets);
613: ISDestroy(&all_subsets_n);
614: ISCreateGeneral(comm_n,sub_schurs->n_subs,auxnum2,PETSC_OWN_POINTER,&all_subsets_mult);
615: ISRenumber(all_subsets,all_subsets_mult,&global_size,&all_subsets_n);
616: ISDestroy(&all_subsets);
617: ISDestroy(&all_subsets_mult);
618: ISGetLocalSize(all_subsets_n,&i);
619: if (i != local_size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Invalid size of new subset! %D != %D",i,local_size);
620: ISLocalToGlobalMappingCreateIS(all_subsets_n,&l2gmap_subsets);
621: MatCreateIS(comm_n,1,PETSC_DECIDE,PETSC_DECIDE,global_size,global_size,l2gmap_subsets,NULL,&work_mat);
622: ISLocalToGlobalMappingDestroy(&l2gmap_subsets);
623: MatCreate(PetscObjectComm((PetscObject)work_mat),&global_schur_subsets);
624: MatSetSizes(global_schur_subsets,PETSC_DECIDE,PETSC_DECIDE,global_size,global_size);
625: MatSetType(global_schur_subsets,MATMPIAIJ);
627: /* subset indices in local boundary numbering */
628: if (!sub_schurs->is_Ej_all) {
629: PetscInt *all_local_idx_B;
631: PetscMalloc1(local_size,&all_local_idx_B);
632: ISGlobalToLocalMappingApply(sub_schurs->BtoNmap,IS_GTOLM_DROP,local_size,all_local_idx_N+extra,&subset_size,all_local_idx_B);
633: if (subset_size != local_size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Error in sub_schurs serial (BtoNmap)! %D != %D\n",subset_size,local_size);
634: ISCreateGeneral(PETSC_COMM_SELF,local_size,all_local_idx_B,PETSC_OWN_POINTER,&sub_schurs->is_Ej_all);
635: }
637: if (change) {
638: ISLocalToGlobalMapping BtoS;
639: IS change_primal_B;
640: IS change_primal_all;
642: if (sub_schurs->change_primal_sub) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"This should not happen");
643: if (sub_schurs->change) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"This should not happen");
644: PetscMalloc1(sub_schurs->n_subs,&sub_schurs->change_primal_sub);
645: for (i=0;i<sub_schurs->n_subs;i++) {
646: ISLocalToGlobalMapping NtoS;
647: ISLocalToGlobalMappingCreateIS(sub_schurs->is_subs[i],&NtoS);
648: ISGlobalToLocalMappingApplyIS(NtoS,IS_GTOLM_DROP,change_primal,&sub_schurs->change_primal_sub[i]);
649: ISLocalToGlobalMappingDestroy(&NtoS);
650: }
651: ISGlobalToLocalMappingApplyIS(sub_schurs->BtoNmap,IS_GTOLM_DROP,change_primal,&change_primal_B);
652: ISLocalToGlobalMappingCreateIS(sub_schurs->is_Ej_all,&BtoS);
653: ISGlobalToLocalMappingApplyIS(BtoS,IS_GTOLM_DROP,change_primal_B,&change_primal_all);
654: ISLocalToGlobalMappingDestroy(&BtoS);
655: ISDestroy(&change_primal_B);
656: PetscMalloc1(sub_schurs->n_subs,&sub_schurs->change);
657: for (i=0;i<sub_schurs->n_subs;i++) {
658: Mat change_sub;
660: ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);
661: KSPCreate(PETSC_COMM_SELF,&sub_schurs->change[i]);
662: KSPSetType(sub_schurs->change[i],KSPPREONLY);
663: if (!sub_schurs->change_with_qr) {
664: MatCreateSubMatrix(change,sub_schurs->is_subs[i],sub_schurs->is_subs[i],MAT_INITIAL_MATRIX,&change_sub);
665: } else {
666: Mat change_subt;
667: MatCreateSubMatrix(change,sub_schurs->is_subs[i],sub_schurs->is_subs[i],MAT_INITIAL_MATRIX,&change_subt);
668: MatConvert(change_subt,MATSEQDENSE,MAT_INITIAL_MATRIX,&change_sub);
669: MatDestroy(&change_subt);
670: }
671: KSPSetOperators(sub_schurs->change[i],change_sub,change_sub);
672: MatDestroy(&change_sub);
673: KSPSetOptionsPrefix(sub_schurs->change[i],sub_schurs->prefix);
674: KSPAppendOptionsPrefix(sub_schurs->change[i],"sub_schurs_change_");
675: }
676: ISDestroy(&change_primal_all);
677: }
679: /* Local matrix of all local Schur on subsets (transposed) */
680: if (!sub_schurs->S_Ej_all) {
681: MatCreate(PETSC_COMM_SELF,&sub_schurs->S_Ej_all);
682: MatSetSizes(sub_schurs->S_Ej_all,PETSC_DECIDE,PETSC_DECIDE,local_size,local_size);
683: MatSetType(sub_schurs->S_Ej_all,MATAIJ);
684: MatSeqAIJSetPreallocation(sub_schurs->S_Ej_all,0,nnz);
685: }
687: /* Compute Schur complements explicitly */
688: F = NULL;
689: if (!sub_schurs->schur_explicit) { /* this code branch is used when MatFactor with Schur complemnent support is not present; it is not very efficient, unless the economic version of the scaling is required */
690: Mat S_Ej_expl;
691: PetscScalar *work;
692: PetscInt j,*dummy_idx;
693: PetscBool Sdense;
695: PetscMalloc2(max_subset_size,&dummy_idx,max_subset_size*max_subset_size,&work);
696: local_size = 0;
697: for (i=0;i<sub_schurs->n_subs;i++) {
698: IS is_subset_B;
699: Mat AE_EE,AE_IE,AE_EI,S_Ej;
701: /* subsets in original and boundary numbering */
702: ISGlobalToLocalMappingApplyIS(sub_schurs->BtoNmap,IS_GTOLM_DROP,sub_schurs->is_subs[i],&is_subset_B);
703: /* EE block */
704: MatCreateSubMatrix(A_BB,is_subset_B,is_subset_B,MAT_INITIAL_MATRIX,&AE_EE);
705: /* IE block */
706: MatCreateSubMatrix(A_IB,is_I,is_subset_B,MAT_INITIAL_MATRIX,&AE_IE);
707: /* EI block */
708: if (sub_schurs->is_hermitian) {
709: MatCreateTranspose(AE_IE,&AE_EI);
710: } else {
711: MatCreateSubMatrix(A_BI,is_subset_B,is_I,MAT_INITIAL_MATRIX,&AE_EI);
712: }
713: ISDestroy(&is_subset_B);
714: MatCreateSchurComplement(AE_II,AE_II,AE_IE,AE_EI,AE_EE,&S_Ej);
715: MatDestroy(&AE_EE);
716: MatDestroy(&AE_IE);
717: MatDestroy(&AE_EI);
718: if (AE_II == A_II) { /* we can reuse the same ksp */
719: KSP ksp;
720: MatSchurComplementGetKSP(sub_schurs->S,&ksp);
721: MatSchurComplementSetKSP(S_Ej,ksp);
722: } else { /* build new ksp object which inherits ksp and pc types from the original one */
723: KSP origksp,schurksp;
724: PC origpc,schurpc;
725: KSPType ksp_type;
726: PetscInt n_internal;
727: PetscBool ispcnone;
729: MatSchurComplementGetKSP(sub_schurs->S,&origksp);
730: MatSchurComplementGetKSP(S_Ej,&schurksp);
731: KSPGetType(origksp,&ksp_type);
732: KSPSetType(schurksp,ksp_type);
733: KSPGetPC(schurksp,&schurpc);
734: KSPGetPC(origksp,&origpc);
735: PetscObjectTypeCompare((PetscObject)origpc,PCNONE,&ispcnone);
736: if (!ispcnone) {
737: PCType pc_type;
738: PCGetType(origpc,&pc_type);
739: PCSetType(schurpc,pc_type);
740: } else {
741: PCSetType(schurpc,PCLU);
742: }
743: ISGetSize(is_I,&n_internal);
744: if (n_internal) { /* UMFPACK gives error with 0 sized problems */
745: MatSolverPackage solver=NULL;
746: PCFactorGetMatSolverPackage(origpc,(const MatSolverPackage*)&solver);
747: if (solver) {
748: PCFactorSetMatSolverPackage(schurpc,solver);
749: }
750: }
751: KSPSetUp(schurksp);
752: }
753: ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);
754: MatCreateSeqDense(PETSC_COMM_SELF,subset_size,subset_size,work,&S_Ej_expl);
755: PCBDDCComputeExplicitSchur(S_Ej,sub_schurs->is_hermitian,MAT_REUSE_MATRIX,&S_Ej_expl);
756: PetscObjectTypeCompare((PetscObject)S_Ej_expl,MATSEQDENSE,&Sdense);
757: if (Sdense) {
758: for (j=0;j<subset_size;j++) {
759: dummy_idx[j]=local_size+j;
760: }
761: MatSetValues(sub_schurs->S_Ej_all,subset_size,dummy_idx,subset_size,dummy_idx,work,INSERT_VALUES);
762: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Not yet implemented for sparse matrices");
763: MatDestroy(&S_Ej);
764: MatDestroy(&S_Ej_expl);
765: local_size += subset_size;
766: }
767: PetscFree2(dummy_idx,work);
768: /* free */
769: ISDestroy(&is_I);
770: MatDestroy(&AE_II);
771: PetscFree(all_local_idx_N);
772: } else {
773: Mat A,cs_AIB_mat = NULL,benign_AIIm1_ones_mat = NULL;
774: Vec Dall = NULL;
775: IS is_A_all,*is_p_r = NULL;
776: PetscScalar *work,*S_data,*schur_factor,infty = PETSC_MAX_REAL;
777: PetscInt n,n_I,*dummy_idx,size_schur,size_active_schur,cum,cum2;
778: PetscBool economic,solver_S,S_lower_triangular = PETSC_FALSE;
779: PetscBool schur_has_vertices,factor_workaround;
780: char solver[256];
782: /* get sizes */
783: n_I = 0;
784: if (is_I_layer) {
785: ISGetLocalSize(is_I_layer,&n_I);
786: }
787: economic = PETSC_FALSE;
788: ISGetLocalSize(sub_schurs->is_I,&cum);
789: if (cum != n_I) economic = PETSC_TRUE;
790: MatGetLocalSize(sub_schurs->A,&n,NULL);
791: size_active_schur = local_size;
793: /* import scaling vector (wrong formulation if we have 3D edges) */
794: if (scaling && compute_Stilda) {
795: const PetscScalar *array;
796: PetscScalar *array2;
797: const PetscInt *idxs;
798: PetscInt i;
800: ISGetIndices(sub_schurs->is_Ej_all,&idxs);
801: VecCreateSeq(PETSC_COMM_SELF,size_active_schur,&Dall);
802: VecGetArrayRead(scaling,&array);
803: VecGetArray(Dall,&array2);
804: for (i=0;i<size_active_schur;i++) array2[i] = array[idxs[i]];
805: VecRestoreArray(Dall,&array2);
806: VecRestoreArrayRead(scaling,&array);
807: ISRestoreIndices(sub_schurs->is_Ej_all,&idxs);
808: deluxe = PETSC_FALSE;
809: }
811: /* size active schurs does not count any dirichlet or vertex dof on the interface */
812: factor_workaround = PETSC_FALSE;
813: schur_has_vertices = PETSC_FALSE;
814: cum = n_I+size_active_schur;
815: if (sub_schurs->is_dir) {
816: const PetscInt* idxs;
817: PetscInt n_dir;
819: ISGetLocalSize(sub_schurs->is_dir,&n_dir);
820: ISGetIndices(sub_schurs->is_dir,&idxs);
821: PetscMemcpy(all_local_idx_N+cum,idxs,n_dir*sizeof(PetscInt));
822: ISRestoreIndices(sub_schurs->is_dir,&idxs);
823: cum += n_dir;
824: factor_workaround = PETSC_TRUE;
825: }
826: /* include the primal vertices in the Schur complement */
827: if (exact_schur && sub_schurs->is_vertices && (compute_Stilda || benign_n)) {
828: PetscInt n_v;
830: ISGetLocalSize(sub_schurs->is_vertices,&n_v);
831: if (n_v) {
832: const PetscInt* idxs;
834: ISGetIndices(sub_schurs->is_vertices,&idxs);
835: PetscMemcpy(all_local_idx_N+cum,idxs,n_v*sizeof(PetscInt));
836: ISRestoreIndices(sub_schurs->is_vertices,&idxs);
837: cum += n_v;
838: factor_workaround = PETSC_TRUE;
839: schur_has_vertices = PETSC_TRUE;
840: }
841: }
842: size_schur = cum - n_I;
843: ISCreateGeneral(PETSC_COMM_SELF,cum,all_local_idx_N,PETSC_OWN_POINTER,&is_A_all);
844: /* get working mat (TODO: factorize without actually permuting) */
845: if (cum == n) {
846: ISSetPermutation(is_A_all);
847: MatPermute(sub_schurs->A,is_A_all,is_A_all,&A);
848: } else {
849: MatCreateSubMatrix(sub_schurs->A,is_A_all,is_A_all,MAT_INITIAL_MATRIX,&A);
850: }
851: MatSetOptionsPrefix(A,sub_schurs->prefix);
852: MatAppendOptionsPrefix(A,"sub_schurs_");
854: /* if we actually change the basis for the pressures, LDL^T factors will use a lot of memory
855: n^2 instead of n^1.5 or something. This is a workaround */
856: if (benign_n) {
857: Vec v;
858: ISLocalToGlobalMapping N_to_reor;
859: IS is_p0,is_p0_p;
860: PetscScalar *cs_AIB,*AIIm1_data;
861: PetscInt sizeA;
863: ISLocalToGlobalMappingCreateIS(is_A_all,&N_to_reor);
864: ISCreateGeneral(PETSC_COMM_SELF,benign_n,benign_p0_lidx,PETSC_COPY_VALUES,&is_p0);
865: ISGlobalToLocalMappingApplyIS(N_to_reor,IS_GTOLM_DROP,is_p0,&is_p0_p);
866: ISDestroy(&is_p0);
867: MatCreateVecs(A,&v,NULL);
868: VecGetSize(v,&sizeA);
869: MatCreateSeqDense(PETSC_COMM_SELF,sizeA,benign_n,NULL,&benign_AIIm1_ones_mat);
870: MatCreateSeqDense(PETSC_COMM_SELF,size_schur,benign_n,NULL,&cs_AIB_mat);
871: MatDenseGetArray(cs_AIB_mat,&cs_AIB);
872: MatDenseGetArray(benign_AIIm1_ones_mat,&AIIm1_data);
873: PetscMalloc1(benign_n,&is_p_r);
874: /* compute colsum of A_IB restricted to pressures */
875: for (i=0;i<benign_n;i++) {
876: Vec benign_AIIm1_ones;
877: PetscScalar *array;
878: const PetscInt *idxs;
879: PetscInt j,nz;
881: ISGlobalToLocalMappingApplyIS(N_to_reor,IS_GTOLM_DROP,benign_zerodiag_subs[i],&is_p_r[i]);
882: ISGetLocalSize(is_p_r[i],&nz);
883: ISGetIndices(is_p_r[i],&idxs);
884: for (j=0;j<nz;j++) AIIm1_data[idxs[j]+sizeA*i] = 1.;
885: ISRestoreIndices(is_p_r[i],&idxs);
886: VecCreateSeqWithArray(PETSC_COMM_SELF,1,sizeA,AIIm1_data+sizeA*i,&benign_AIIm1_ones);
887: MatMult(A,benign_AIIm1_ones,v);
888: VecDestroy(&benign_AIIm1_ones);
889: VecGetArray(v,&array);
890: for (j=0;j<size_schur;j++) {
891: #if defined(PETSC_USE_COMPLEX)
892: cs_AIB[i*size_schur + j] = (PetscRealPart(array[j+n_I])/nz + PETSC_i*(PetscImaginaryPart(array[j+n_I])/nz));
893: #else
894: cs_AIB[i*size_schur + j] = array[j+n_I]/nz;
895: #endif
896: }
897: VecRestoreArray(v,&array);
898: }
899: MatDenseRestoreArray(cs_AIB_mat,&cs_AIB);
900: MatDenseRestoreArray(benign_AIIm1_ones_mat,&AIIm1_data);
901: VecDestroy(&v);
902: MatSetOption(A,MAT_KEEP_NONZERO_PATTERN,PETSC_FALSE);
903: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);
904: MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_FALSE);
905: MatZeroRowsColumnsIS(A,is_p0_p,1.0,NULL,NULL);
906: ISDestroy(&is_p0_p);
907: ISLocalToGlobalMappingDestroy(&N_to_reor);
908: }
909: MatSetOption(A,MAT_SYMMETRIC,sub_schurs->is_hermitian);
910: MatSetOption(A,MAT_HERMITIAN,sub_schurs->is_hermitian);
911: MatSetOption(A,MAT_SPD,sub_schurs->is_posdef);
912: #if defined(PETSC_HAVE_MUMPS)
913: PetscStrcpy(solver,MATSOLVERMUMPS);
914: #else
915: PetscStrcpy(solver,MATSOLVERMKL_PARDISO);
916: #endif
917: PetscOptionsGetString(NULL,((PetscObject)A)->prefix,"-mat_solver_package",solver,256,NULL);
919: /* when using the benign subspace trick, the local Schur complements are SPD */
920: if (benign_trick) sub_schurs->is_posdef = PETSC_TRUE;
922: if (n_I) { /* TODO add ordering from options */
923: IS is_schur;
925: if (sub_schurs->is_hermitian && sub_schurs->is_posdef) {
926: MatGetFactor(A,solver,MAT_FACTOR_CHOLESKY,&F);
927: } else {
928: MatGetFactor(A,solver,MAT_FACTOR_LU,&F);
929: }
930: MatSetErrorIfFailure(A,PETSC_TRUE);
932: /* subsets ordered last */
933: ISCreateStride(PETSC_COMM_SELF,size_schur,n_I,1,&is_schur);
934: MatFactorSetSchurIS(F,is_schur);
935: ISDestroy(&is_schur);
937: /* factorization step */
938: if (sub_schurs->is_hermitian && sub_schurs->is_posdef) {
939: MatCholeskyFactorSymbolic(F,A,NULL,NULL);
940: #if defined(PETSC_HAVE_MUMPS) /* be sure that icntl 19 is not set by command line */
941: MatMumpsSetIcntl(F,19,2);
942: #endif
943: MatCholeskyFactorNumeric(F,A,NULL);
944: S_lower_triangular = PETSC_TRUE;
945: } else {
946: MatLUFactorSymbolic(F,A,NULL,NULL,NULL);
947: #if defined(PETSC_HAVE_MUMPS) /* be sure that icntl 19 is not set by command line */
948: MatMumpsSetIcntl(F,19,3);
949: #endif
950: MatLUFactorNumeric(F,A,NULL);
951: }
952: MatViewFromOptions(F,(PetscObject)A,"-mat_factor_view");
954: /* get explicit Schur Complement computed during numeric factorization */
955: MatFactorGetSchurComplement(F,&S_all,NULL);
956: MatSetOption(S_all,MAT_SPD,sub_schurs->is_posdef);
957: MatSetOption(S_all,MAT_HERMITIAN,sub_schurs->is_hermitian);
959: /* we can reuse the solvers if we are not using the economic version */
960: reuse_solvers = (PetscBool)(reuse_solvers && !economic);
961: factor_workaround = (PetscBool)(reuse_solvers && factor_workaround);
962: if (!sub_schurs->is_posdef && factor_workaround && compute_Stilda && size_active_schur)
963: reuse_solvers = factor_workaround = PETSC_FALSE;
965: solver_S = PETSC_TRUE;
967: /* update the Schur complement with the change of basis on the pressures */
968: if (benign_n) {
969: PetscScalar *S_data,*cs_AIB,*AIIm1_data;
970: Vec v;
971: PetscInt sizeA;
973: MatDenseGetArray(S_all,&S_data);
974: MatCreateVecs(A,&v,NULL);
975: VecGetSize(v,&sizeA);
976: #if defined(PETSC_HAVE_MUMPS)
977: MatMumpsSetIcntl(F,26,0);
978: #endif
979: #if defined(PETSC_HAVE_MKL_PARDISO)
980: MatMkl_PardisoSetCntl(F,70,1);
981: #endif
982: MatDenseGetArray(cs_AIB_mat,&cs_AIB);
983: MatDenseGetArray(benign_AIIm1_ones_mat,&AIIm1_data);
984: for (i=0;i<benign_n;i++) {
985: Vec benign_AIIm1_ones;
986: PetscScalar *array,sum = 0.,one = 1.;
987: const PetscInt *idxs;
988: PetscInt j,nz;
989: PetscBLASInt B_k,B_n;
991: VecCreateSeqWithArray(PETSC_COMM_SELF,1,sizeA,AIIm1_data+sizeA*i,&benign_AIIm1_ones);
992: VecCopy(benign_AIIm1_ones,v);
993: MatSolve(F,v,benign_AIIm1_ones);
994: ISGetLocalSize(is_p_r[i],&nz);
995: ISGetIndices(is_p_r[i],&idxs);
996: /* p0 dof (eliminated) is excluded from the sum */
997: for (j=0;j<nz-1;j++) sum -= AIIm1_data[idxs[j]+sizeA*i];
998: ISRestoreIndices(is_p_r[i],&idxs);
999: MatMult(A,benign_AIIm1_ones,v);
1000: VecGetArray(v,&array);
1001: /* perform sparse rank updates on symmetric Schur (TODO: move outside of the loop?) */
1002: /* cs_AIB already scaled by 1./nz */
1003: B_k = 1;
1004: PetscBLASIntCast(size_schur,&B_n);
1005: PetscStackCallBLAS("BLASsyrk",BLASsyrk_("L","N",&B_n,&B_k,&sum,cs_AIB+i*size_schur,&B_n,&one,S_data,&B_n));
1006: sum = 1.;
1007: PetscStackCallBLAS("BLASsyr2k",BLASsyr2k_("L","N",&B_n,&B_k,&sum,array+n_I,&B_n,cs_AIB+i*size_schur,&B_n,&one,S_data,&B_n));
1008: VecRestoreArray(v,&array);
1009: /* set p0 entry of AIIm1_ones to zero */
1010: ISGetIndices(is_p_r[i],&idxs);
1011: for (j=0;j<benign_n;j++) AIIm1_data[idxs[nz-1]+sizeA*j] = 0.;
1012: ISRestoreIndices(is_p_r[i],&idxs);
1013: VecDestroy(&benign_AIIm1_ones);
1014: }
1015: if (!S_lower_triangular) { /* I need to expand the upper triangular data (column oriented) */
1016: PetscInt k,j;
1017: for (k=0;k<size_schur;k++) {
1018: for (j=k;j<size_schur;j++) {
1019: S_data[j*size_schur+k] = PetscConj(S_data[k*size_schur+j]);
1020: }
1021: }
1022: }
1024: /* restore defaults */
1025: #if defined(PETSC_HAVE_MUMPS)
1026: MatMumpsSetIcntl(F,26,-1);
1027: #endif
1028: #if defined(PETSC_HAVE_MKL_PARDISO)
1029: MatMkl_PardisoSetCntl(F,70,0);
1030: #endif
1031: MatDenseRestoreArray(cs_AIB_mat,&cs_AIB);
1032: MatDenseRestoreArray(benign_AIIm1_ones_mat,&AIIm1_data);
1033: VecDestroy(&v);
1034: MatDenseRestoreArray(S_all,&S_data);
1035: }
1036: if (!reuse_solvers) {
1037: for (i=0;i<benign_n;i++) {
1038: ISDestroy(&is_p_r[i]);
1039: }
1040: PetscFree(is_p_r);
1041: MatDestroy(&cs_AIB_mat);
1042: MatDestroy(&benign_AIIm1_ones_mat);
1043: }
1044: } else { /* we can't use MatFactor when size_schur == size_of_the_problem */
1045: MatConvert(A,MATSEQDENSE,MAT_INITIAL_MATRIX,&S_all);
1046: reuse_solvers = PETSC_FALSE; /* TODO: why we can't reuse the solvers here? */
1047: factor_workaround = PETSC_FALSE;
1048: solver_S = PETSC_FALSE;
1049: }
1051: if (reuse_solvers) {
1052: Mat A_II,Afake;
1053: Vec vec1_B;
1054: PCBDDCReuseSolvers msolv_ctx;
1055: PetscInt n_R;
1057: if (sub_schurs->reuse_solver) {
1058: PCBDDCReuseSolversReset(sub_schurs->reuse_solver);
1059: } else {
1060: PetscNew(&sub_schurs->reuse_solver);
1061: }
1062: msolv_ctx = sub_schurs->reuse_solver;
1063: MatSchurComplementGetSubMatrices(sub_schurs->S,&A_II,NULL,NULL,NULL,NULL);
1064: PetscObjectReference((PetscObject)F);
1065: msolv_ctx->F = F;
1066: MatCreateVecs(F,&msolv_ctx->sol,NULL);
1067: /* currently PETSc has no support for MatSolve(F,x,x), so cheat and let rhs and sol share the same memory */
1068: {
1069: PetscScalar *array;
1070: PetscInt n;
1072: VecGetLocalSize(msolv_ctx->sol,&n);
1073: VecGetArray(msolv_ctx->sol,&array);
1074: VecCreateSeqWithArray(PetscObjectComm((PetscObject)msolv_ctx->sol),1,n,array,&msolv_ctx->rhs);
1075: VecRestoreArray(msolv_ctx->sol,&array);
1076: }
1077: msolv_ctx->has_vertices = schur_has_vertices;
1079: /* interior solver */
1080: PCCreate(PetscObjectComm((PetscObject)A_II),&msolv_ctx->interior_solver);
1081: PCSetOperators(msolv_ctx->interior_solver,A_II,A_II);
1082: PCSetType(msolv_ctx->interior_solver,PCSHELL);
1083: PCShellSetContext(msolv_ctx->interior_solver,msolv_ctx);
1084: PCShellSetApply(msolv_ctx->interior_solver,PCBDDCReuseSolvers_Interior);
1085: PCShellSetApplyTranspose(msolv_ctx->interior_solver,PCBDDCReuseSolvers_InteriorTranspose);
1087: /* correction solver */
1088: PCCreate(PetscObjectComm((PetscObject)A_II),&msolv_ctx->correction_solver);
1089: PCSetType(msolv_ctx->correction_solver,PCSHELL);
1090: PCShellSetContext(msolv_ctx->correction_solver,msolv_ctx);
1091: PCShellSetApply(msolv_ctx->correction_solver,PCBDDCReuseSolvers_Correction);
1092: PCShellSetApplyTranspose(msolv_ctx->correction_solver,PCBDDCReuseSolvers_CorrectionTranspose);
1094: /* scatter and vecs for Schur complement solver */
1095: MatCreateVecs(S_all,&msolv_ctx->sol_B,&msolv_ctx->rhs_B);
1096: MatCreateVecs(sub_schurs->S,&vec1_B,NULL);
1097: if (!schur_has_vertices) {
1098: ISGlobalToLocalMappingApplyIS(sub_schurs->BtoNmap,IS_GTOLM_DROP,is_A_all,&msolv_ctx->is_B);
1099: VecScatterCreate(vec1_B,msolv_ctx->is_B,msolv_ctx->sol_B,NULL,&msolv_ctx->correction_scatter_B);
1100: PetscObjectReference((PetscObject)is_A_all);
1101: msolv_ctx->is_R = is_A_all;
1102: } else {
1103: IS is_B_all;
1104: const PetscInt* idxs;
1105: PetscInt dual,n_v,n;
1107: ISGetLocalSize(sub_schurs->is_vertices,&n_v);
1108: dual = size_schur - n_v;
1109: ISGetLocalSize(is_A_all,&n);
1110: ISGetIndices(is_A_all,&idxs);
1111: ISCreateGeneral(PetscObjectComm((PetscObject)is_A_all),dual,idxs+n_I,PETSC_COPY_VALUES,&is_B_all);
1112: ISGlobalToLocalMappingApplyIS(sub_schurs->BtoNmap,IS_GTOLM_DROP,is_B_all,&msolv_ctx->is_B);
1113: ISDestroy(&is_B_all);
1114: ISCreateStride(PetscObjectComm((PetscObject)is_A_all),dual,0,1,&is_B_all);
1115: VecScatterCreate(vec1_B,msolv_ctx->is_B,msolv_ctx->sol_B,is_B_all,&msolv_ctx->correction_scatter_B);
1116: ISDestroy(&is_B_all);
1117: ISCreateGeneral(PetscObjectComm((PetscObject)is_A_all),n-n_v,idxs,PETSC_COPY_VALUES,&msolv_ctx->is_R);
1118: ISRestoreIndices(is_A_all,&idxs);
1119: }
1120: ISGetLocalSize(msolv_ctx->is_R,&n_R);
1121: MatCreateSeqAIJ(PETSC_COMM_SELF,n_R,n_R,0,NULL,&Afake);
1122: MatAssemblyBegin(Afake,MAT_FINAL_ASSEMBLY);
1123: MatAssemblyEnd(Afake,MAT_FINAL_ASSEMBLY);
1124: PCSetOperators(msolv_ctx->correction_solver,Afake,Afake);
1125: MatDestroy(&Afake);
1126: VecDestroy(&vec1_B);
1128: /* communicate benign info to solver context */
1129: if (benign_n) {
1130: PetscScalar *array;
1132: msolv_ctx->benign_n = benign_n;
1133: msolv_ctx->benign_zerodiag_subs = is_p_r;
1134: PetscMalloc1(benign_n,&msolv_ctx->benign_save_vals);
1135: msolv_ctx->benign_csAIB = cs_AIB_mat;
1136: MatCreateVecs(cs_AIB_mat,&msolv_ctx->benign_corr_work,NULL);
1137: VecGetArray(msolv_ctx->benign_corr_work,&array);
1138: VecCreateSeqWithArray(PETSC_COMM_SELF,1,size_schur,array,&msolv_ctx->benign_dummy_schur_vec);
1139: VecRestoreArray(msolv_ctx->benign_corr_work,&array);
1140: msolv_ctx->benign_AIIm1ones = benign_AIIm1_ones_mat;
1141: }
1142: } else {
1143: if (sub_schurs->reuse_solver) {
1144: PCBDDCReuseSolversReset(sub_schurs->reuse_solver);
1145: }
1146: PetscFree(sub_schurs->reuse_solver);
1147: }
1148: MatDestroy(&A);
1149: ISDestroy(&is_A_all);
1151: /* Work arrays */
1152: PetscMalloc2(max_subset_size,&dummy_idx,max_subset_size*max_subset_size,&work);
1154: /* matrices for deluxe scaling and adaptive selection */
1155: if (compute_Stilda) {
1156: if (!sub_schurs->sum_S_Ej_tilda_all) {
1157: MatDuplicate(sub_schurs->S_Ej_all,MAT_SHARE_NONZERO_PATTERN,&sub_schurs->sum_S_Ej_tilda_all);
1158: }
1159: if (!sub_schurs->sum_S_Ej_inv_all && deluxe) {
1160: MatDuplicate(sub_schurs->S_Ej_all,MAT_SHARE_NONZERO_PATTERN,&sub_schurs->sum_S_Ej_inv_all);
1161: }
1162: }
1164: /* S_Ej_all */
1165: cum = cum2 = 0;
1166: MatDenseGetArray(S_all,&S_data);
1167: for (i=0;i<sub_schurs->n_subs;i++) {
1168: PetscInt j;
1170: /* get S_E */
1171: ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);
1172: if (S_lower_triangular) { /* I need to expand the upper triangular data (column oriented) */
1173: PetscInt k;
1174: for (k=0;k<subset_size;k++) {
1175: for (j=k;j<subset_size;j++) {
1176: work[k*subset_size+j] = S_data[cum2+k*size_schur+j];
1177: work[j*subset_size+k] = PetscConj(S_data[cum2+k*size_schur+j]);
1178: }
1179: }
1180: } else { /* just copy to workspace */
1181: PetscInt k;
1182: for (k=0;k<subset_size;k++) {
1183: for (j=0;j<subset_size;j++) {
1184: work[k*subset_size+j] = S_data[cum2+k*size_schur+j];
1185: }
1186: }
1187: }
1188: /* insert S_E values */
1189: for (j=0;j<subset_size;j++) dummy_idx[j] = cum+j;
1190: if (sub_schurs->change) {
1191: Mat change_sub,SEj,T;
1193: /* change basis */
1194: KSPGetOperators(sub_schurs->change[i],&change_sub,NULL);
1195: MatCreateSeqDense(PETSC_COMM_SELF,subset_size,subset_size,work,&SEj);
1196: if (!sub_schurs->change_with_qr) { /* currently there's no support for PtAP with P SeqAIJ */
1197: Mat T2;
1198: MatTransposeMatMult(change_sub,SEj,MAT_INITIAL_MATRIX,1.0,&T2);
1199: MatMatMult(T2,change_sub,MAT_INITIAL_MATRIX,1.0,&T);
1200: MatConvert(T,MATSEQDENSE,MAT_INPLACE_MATRIX,&T);
1201: MatDestroy(&T2);
1202: } else {
1203: MatPtAP(SEj,change_sub,MAT_INITIAL_MATRIX,1.0,&T);
1204: }
1205: MatCopy(T,SEj,SAME_NONZERO_PATTERN);
1206: MatDestroy(&T);
1207: MatZeroRowsColumnsIS(SEj,sub_schurs->change_primal_sub[i],1.0,NULL,NULL);
1208: MatDestroy(&SEj);
1209: }
1210: if (deluxe) {
1211: MatSetValues(sub_schurs->S_Ej_all,subset_size,dummy_idx,subset_size,dummy_idx,work,INSERT_VALUES);
1212: /* if adaptivity is requested, invert S_E blocks */
1213: if (compute_Stilda) {
1214: PetscBLASIntCast(subset_size,&B_N);
1215: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
1216: if (!use_getr) { /* TODO add sytrf/i for symmetric non hermitian */
1217: PetscInt k;
1218: PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_("L",&B_N,work,&B_N,&B_ierr));
1219: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr);
1220: PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_("L",&B_N,work,&B_N,&B_ierr));
1221: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr);
1222: for (k=0;k<subset_size;k++) {
1223: for (j=k;j<subset_size;j++) {
1224: work[j*subset_size+k] = work[k*subset_size+j];
1225: }
1226: }
1227: } else {
1228: PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&B_N,&B_N,work,&B_N,pivots,&B_ierr));
1229: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)B_ierr);
1230: PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,work,&B_N,pivots,Bwork,&B_lwork,&B_ierr));
1231: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRI Lapack routine %d",(int)B_ierr);
1232: }
1233: PetscFPTrapPop();
1234: MatSetValues(sub_schurs->sum_S_Ej_inv_all,subset_size,dummy_idx,subset_size,dummy_idx,work,INSERT_VALUES);
1235: }
1236: } else if (compute_Stilda) { /* not using deluxe */
1237: Mat SEj;
1238: Vec D;
1239: PetscScalar *array;
1241: MatCreateSeqDense(PETSC_COMM_SELF,subset_size,subset_size,work,&SEj);
1242: VecGetArray(Dall,&array);
1243: VecCreateSeqWithArray(PETSC_COMM_SELF,1,subset_size,array+cum,&D);
1244: VecRestoreArray(Dall,&array);
1245: VecShift(D,-1.);
1246: MatDiagonalScale(SEj,D,D);
1247: MatDestroy(&SEj);
1248: VecDestroy(&D);
1249: MatSetValues(sub_schurs->S_Ej_all,subset_size,dummy_idx,subset_size,dummy_idx,work,INSERT_VALUES);
1250: }
1251: cum += subset_size;
1252: cum2 += subset_size*(size_schur + 1);
1253: }
1254: MatDenseRestoreArray(S_all,&S_data);
1256: if (solver_S) {
1257: MatFactorRestoreSchurComplement(F,&S_all,MAT_FACTOR_SCHUR_UNFACTORED);
1258: }
1260: schur_factor = NULL;
1261: if (compute_Stilda && size_active_schur) {
1263: if (sub_schurs->n_subs == 1 && size_schur == size_active_schur && deluxe) { /* we already computed the inverse */
1264: PetscInt j;
1265: for (j=0;j<size_schur;j++) dummy_idx[j] = j;
1266: MatSetValues(sub_schurs->sum_S_Ej_tilda_all,size_schur,dummy_idx,size_schur,dummy_idx,work,INSERT_VALUES);
1267: } else {
1268: Mat S_all_inv=NULL;
1269: if (solver_S) {
1270: /* for adaptive selection we need S^-1; for solver reusage we need S_\Delta\Delta^-1.
1271: The latter is not the principal subminor for S^-1. However, the factors can be reused since S_\Delta\Delta is the leading principal submatrix of S */
1272: if (factor_workaround) {/* invert without calling MatFactorInvertSchurComplement, since we are hacking */
1273: PetscScalar *data;
1274: PetscInt nd = 0;
1276: if (!sub_schurs->is_posdef) {
1277: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor update not yet implemented for non SPD matrices");
1278: }
1279: MatFactorGetSchurComplement(F,&S_all_inv,NULL);
1280: MatDenseGetArray(S_all_inv,&data);
1281: if (sub_schurs->is_dir) { /* dirichlet dofs could have different scalings */
1282: ISGetLocalSize(sub_schurs->is_dir,&nd);
1283: }
1285: /* factor and invert activedofs and vertices (dirichlet dofs does not contribute) */
1286: if (schur_has_vertices) {
1287: Mat M;
1288: PetscScalar *tdata;
1289: PetscInt nv = 0, news;
1291: ISGetLocalSize(sub_schurs->is_vertices,&nv);
1292: news = size_active_schur + nv;
1293: PetscCalloc1(news*news,&tdata);
1294: for (i=0;i<size_active_schur;i++) {
1295: PetscMemcpy(tdata+i*(news+1),data+i*(size_schur+1),(size_active_schur-i)*sizeof(PetscScalar));
1296: PetscMemcpy(tdata+i*(news+1)+size_active_schur-i,data+i*size_schur+size_active_schur+nd,nv*sizeof(PetscScalar));
1297: }
1298: for (i=0;i<nv;i++) {
1299: PetscInt k = i+size_active_schur;
1300: PetscMemcpy(tdata+k*(news+1),data+(k+nd)*(size_schur+1),(nv-i)*sizeof(PetscScalar));
1301: }
1303: MatCreateSeqDense(PETSC_COMM_SELF,news,news,tdata,&M);
1304: MatSetOption(M,MAT_SPD,PETSC_TRUE);
1305: MatCholeskyFactor(M,NULL,NULL);
1306: /* save the factors */
1307: cum = 0;
1308: PetscMalloc1((size_active_schur*(size_active_schur +1))/2+nd,&schur_factor);
1309: for (i=0;i<size_active_schur;i++) {
1310: PetscMemcpy(schur_factor+cum,tdata+i*(news+1),(size_active_schur-i)*sizeof(PetscScalar));
1311: cum += size_active_schur - i;
1312: }
1313: for (i=0;i<nd;i++) schur_factor[cum+i] = PetscSqrtReal(PetscRealPart(data[(i+size_active_schur)*(size_schur+1)]));
1314: MatSeqDenseInvertFactors_Private(M);
1315: /* move back just the active dofs to the Schur complement */
1316: for (i=0;i<size_active_schur;i++) {
1317: PetscMemcpy(data+i*size_schur,tdata+i*news,size_active_schur*sizeof(PetscScalar));
1318: }
1319: PetscFree(tdata);
1320: MatDestroy(&M);
1321: } else { /* we can factorize and invert just the activedofs */
1322: Mat M;
1324: MatCreateSeqDense(PETSC_COMM_SELF,size_active_schur,size_active_schur,data,&M);
1325: MatSeqDenseSetLDA(M,size_schur);
1326: MatSetOption(M,MAT_SPD,PETSC_TRUE);
1327: MatCholeskyFactor(M,NULL,NULL);
1328: MatSeqDenseInvertFactors_Private(M);
1329: MatDestroy(&M);
1330: for (i=0;i<nd;i++) data[(i+size_active_schur)*(size_schur+1)] = 1.0/data[(i+size_active_schur)*(size_schur+1)];
1331: }
1332: MatDenseRestoreArray(S_all_inv,&data);
1333: } else { /* use MatFactor calls to invert S */
1334: MatFactorInvertSchurComplement(F);
1335: MatFactorGetSchurComplement(F,&S_all_inv,NULL);
1336: }
1337: } else { /* we need to invert explicitly since we are not using MatFactor for S */
1338: PetscObjectReference((PetscObject)S_all);
1339: S_all_inv = S_all;
1340: MatDenseGetArray(S_all_inv,&S_data);
1341: PetscBLASIntCast(size_schur,&B_N);
1342: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
1343: if (!use_getr) { /* TODO add sytrf/i for symmetric non hermitian */
1344: PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_("L",&B_N,S_data,&B_N,&B_ierr));
1345: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr);
1346: PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_("L",&B_N,S_data,&B_N,&B_ierr));
1347: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr);
1348: } else {
1349: PetscStackCallBLAS("LAPACKgetrf",LAPACKgetrf_(&B_N,&B_N,S_data,&B_N,pivots,&B_ierr));
1350: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRF Lapack routine %d",(int)B_ierr);
1351: PetscStackCallBLAS("LAPACKgetri",LAPACKgetri_(&B_N,S_data,&B_N,pivots,Bwork,&B_lwork,&B_ierr));
1352: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in GETRI Lapack routine %d",(int)B_ierr);
1353: }
1354: PetscFPTrapPop();
1355: MatDenseRestoreArray(S_all_inv,&S_data);
1356: }
1357: /* S_Ej_tilda_all */
1358: cum = cum2 = 0;
1359: MatDenseGetArray(S_all_inv,&S_data);
1360: for (i=0;i<sub_schurs->n_subs;i++) {
1361: PetscInt j;
1363: ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);
1364: /* get (St^-1)_E */
1365: /* Unless we are changing the variables, I don't need to expand to upper triangular since St^-1
1366: will be properly accessed later during adaptive selection */
1367: if (S_lower_triangular) {
1368: PetscInt k;
1369: if (sub_schurs->change) {
1370: for (k=0;k<subset_size;k++) {
1371: for (j=k;j<subset_size;j++) {
1372: work[k*subset_size+j] = S_data[cum2+k*size_schur+j];
1373: work[j*subset_size+k] = work[k*subset_size+j];
1374: }
1375: }
1376: } else {
1377: for (k=0;k<subset_size;k++) {
1378: for (j=k;j<subset_size;j++) {
1379: work[k*subset_size+j] = S_data[cum2+k*size_schur+j];
1380: }
1381: }
1382: }
1383: } else {
1384: PetscInt k;
1385: for (k=0;k<subset_size;k++) {
1386: for (j=0;j<subset_size;j++) {
1387: work[k*subset_size+j] = S_data[cum2+k*size_schur+j];
1388: }
1389: }
1390: }
1391: if (sub_schurs->change) {
1392: Mat change_sub,SEj,T;
1394: /* change basis */
1395: KSPGetOperators(sub_schurs->change[i],&change_sub,NULL);
1396: MatCreateSeqDense(PETSC_COMM_SELF,subset_size,subset_size,work,&SEj);
1397: if (!sub_schurs->change_with_qr) { /* currently there's no support for PtAP with P SeqAIJ */
1398: Mat T2;
1399: MatTransposeMatMult(change_sub,SEj,MAT_INITIAL_MATRIX,1.0,&T2);
1400: MatMatMult(T2,change_sub,MAT_INITIAL_MATRIX,1.0,&T);
1401: MatDestroy(&T2);
1402: MatConvert(T,MATSEQDENSE,MAT_INPLACE_MATRIX,&T);
1403: } else {
1404: MatPtAP(SEj,change_sub,MAT_INITIAL_MATRIX,1.0,&T);
1405: }
1406: MatCopy(T,SEj,SAME_NONZERO_PATTERN);
1407: MatDestroy(&T);
1408: /* set diagonal entry to a very large value to pick the basis we are eliminating as the first eigenvectors with adaptive selection */
1409: MatZeroRowsColumnsIS(SEj,sub_schurs->change_primal_sub[i],1./PETSC_SMALL,NULL,NULL);
1410: MatDestroy(&SEj);
1411: }
1412: for (j=0;j<subset_size;j++) dummy_idx[j] = cum+j;
1413: MatSetValues(sub_schurs->sum_S_Ej_tilda_all,subset_size,dummy_idx,subset_size,dummy_idx,work,INSERT_VALUES);
1414: cum += subset_size;
1415: cum2 += subset_size*(size_schur + 1);
1416: }
1417: MatDenseRestoreArray(S_all_inv,&S_data);
1418: if (solver_S) {
1419: if (schur_has_vertices) {
1420: MatFactorRestoreSchurComplement(F,&S_all_inv,MAT_FACTOR_SCHUR_FACTORED);
1421: } else {
1422: MatFactorRestoreSchurComplement(F,&S_all_inv,MAT_FACTOR_SCHUR_INVERTED);
1423: }
1424: }
1425: MatDestroy(&S_all_inv);
1426: }
1428: /* move back factors if needed */
1429: if (schur_has_vertices) {
1430: Mat S_tmp;
1431: PetscInt nd = 0;
1433: if (!solver_S) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"This should not happen");
1434: MatFactorGetSchurComplement(F,&S_tmp,NULL);
1435: if (sub_schurs->is_posdef) {
1436: PetscScalar *data;
1438: MatDenseGetArray(S_tmp,&data);
1439: PetscMemzero(data,size_schur*size_schur*sizeof(PetscScalar));
1441: if (S_lower_triangular) {
1442: cum = 0;
1443: for (i=0;i<size_active_schur;i++) {
1444: PetscMemcpy(data+i*(size_schur+1),schur_factor+cum,(size_active_schur-i)*sizeof(PetscScalar));
1445: cum += size_active_schur-i;
1446: }
1447: } else {
1448: PetscMemcpy(data,schur_factor,size_schur*size_schur*sizeof(PetscScalar));
1449: }
1450: if (sub_schurs->is_dir) {
1451: ISGetLocalSize(sub_schurs->is_dir,&nd);
1452: for (i=0;i<nd;i++) {
1453: data[(i+size_active_schur)*(size_schur+1)] = schur_factor[cum+i];
1454: }
1455: }
1456: /* workaround: since I cannot modify the matrices used inside the solvers for the forward and backward substitutions,
1457: set the diagonal entry of the Schur factor to a very large value */
1458: for (i=size_active_schur+nd;i<size_schur;i++) {
1459: data[i*(size_schur+1)] = infty;
1460: }
1461: MatDenseRestoreArray(S_tmp,&data);
1462: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Factor update not yet implemented for non SPD matrices");
1463: MatFactorRestoreSchurComplement(F,&S_tmp,MAT_FACTOR_SCHUR_FACTORED);
1464: }
1465: } else if (factor_workaround) { /* we need to eliminate any unneeded coupling */
1466: PetscScalar *data;
1467: PetscInt nd = 0;
1469: if (sub_schurs->is_dir) { /* dirichlet dofs could have different scalings */
1470: ISGetLocalSize(sub_schurs->is_dir,&nd);
1471: }
1472: MatFactorGetSchurComplement(F,&S_all,NULL);
1473: MatDenseGetArray(S_all,&data);
1474: for (i=0;i<size_active_schur;i++) {
1475: PetscMemzero(data+i*size_schur+size_active_schur,(size_schur-size_active_schur)*sizeof(PetscScalar));
1476: }
1477: for (i=size_active_schur+nd;i<size_schur;i++) {
1478: PetscMemzero(data+i*size_schur+size_active_schur,(size_schur-size_active_schur)*sizeof(PetscScalar));
1479: data[i*(size_schur+1)] = infty;
1480: }
1481: MatDenseRestoreArray(S_all,&data);
1482: MatFactorRestoreSchurComplement(F,&S_all,MAT_FACTOR_SCHUR_UNFACTORED);
1483: }
1484: PetscFree2(dummy_idx,work);
1485: PetscFree(schur_factor);
1486: VecDestroy(&Dall);
1487: }
1488: PetscFree(nnz);
1489: MatDestroy(&F);
1490: ISDestroy(&is_I_layer);
1491: MatDestroy(&S_all);
1492: MatDestroy(&A_BB);
1493: MatDestroy(&A_IB);
1494: MatDestroy(&A_BI);
1495: MatAssemblyBegin(sub_schurs->S_Ej_all,MAT_FINAL_ASSEMBLY);
1496: MatAssemblyEnd(sub_schurs->S_Ej_all,MAT_FINAL_ASSEMBLY);
1497: if (compute_Stilda) {
1498: MatAssemblyBegin(sub_schurs->sum_S_Ej_tilda_all,MAT_FINAL_ASSEMBLY);
1499: MatAssemblyEnd(sub_schurs->sum_S_Ej_tilda_all,MAT_FINAL_ASSEMBLY);
1500: if (deluxe) {
1501: MatAssemblyBegin(sub_schurs->sum_S_Ej_inv_all,MAT_FINAL_ASSEMBLY);
1502: MatAssemblyEnd(sub_schurs->sum_S_Ej_inv_all,MAT_FINAL_ASSEMBLY);
1503: }
1504: }
1506: /* Global matrix of all assembled Schur on subsets */
1507: MatISSetLocalMat(work_mat,sub_schurs->S_Ej_all);
1508: MatISSetMPIXAIJPreallocation_Private(work_mat,global_schur_subsets,PETSC_TRUE);
1509: MatISGetMPIXAIJ(work_mat,MAT_REUSE_MATRIX,&global_schur_subsets);
1511: /* Get local part of (\sum_j S_Ej) */
1512: MatCreateSubMatrices(global_schur_subsets,1,&all_subsets_n,&all_subsets_n,MAT_INITIAL_MATRIX,&submats);
1513: if (!sub_schurs->sum_S_Ej_all) {
1514: MatDuplicate(submats[0],MAT_COPY_VALUES,&sub_schurs->sum_S_Ej_all);
1515: } else {
1516: MatCopy(submats[0],sub_schurs->sum_S_Ej_all,SAME_NONZERO_PATTERN);
1517: }
1519: /* Get local part of (\sum_j S^-1_Ej) (\sum_j St^-1_Ej) */
1520: if (compute_Stilda) {
1521: MatISSetLocalMat(work_mat,sub_schurs->sum_S_Ej_tilda_all);
1522: MatISGetMPIXAIJ(work_mat,MAT_REUSE_MATRIX,&global_schur_subsets);
1523: MatCreateSubMatrices(global_schur_subsets,1,&all_subsets_n,&all_subsets_n,MAT_REUSE_MATRIX,&submats);
1524: MatCopy(submats[0],sub_schurs->sum_S_Ej_tilda_all,SAME_NONZERO_PATTERN);
1525: if (deluxe) {
1526: MatISSetLocalMat(work_mat,sub_schurs->sum_S_Ej_inv_all);
1527: MatISGetMPIXAIJ(work_mat,MAT_REUSE_MATRIX,&global_schur_subsets);
1528: MatCreateSubMatrices(global_schur_subsets,1,&all_subsets_n,&all_subsets_n,MAT_REUSE_MATRIX,&submats);
1529: MatCopy(submats[0],sub_schurs->sum_S_Ej_inv_all,SAME_NONZERO_PATTERN);
1530: } else {
1531: PetscScalar *array;
1532: PetscInt cum;
1534: MatSeqAIJGetArray(sub_schurs->sum_S_Ej_tilda_all,&array);
1535: cum = 0;
1536: for (i=0;i<sub_schurs->n_subs;i++) {
1537: ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);
1538: PetscBLASIntCast(subset_size,&B_N);
1539: PetscFPTrapPush(PETSC_FP_TRAP_OFF);
1540: PetscStackCallBLAS("LAPACKpotrf",LAPACKpotrf_("L",&B_N,array+cum,&B_N,&B_ierr));
1541: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRF Lapack routine %d",(int)B_ierr);
1542: PetscStackCallBLAS("LAPACKpotri",LAPACKpotri_("L",&B_N,array+cum,&B_N,&B_ierr));
1543: if (B_ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error in POTRI Lapack routine %d",(int)B_ierr);
1544: PetscFPTrapPop();
1545: cum += subset_size*subset_size;
1546: }
1547: MatSeqAIJRestoreArray(sub_schurs->sum_S_Ej_tilda_all,&array);
1548: PetscObjectReference((PetscObject)sub_schurs->sum_S_Ej_all);
1549: MatDestroy(&sub_schurs->sum_S_Ej_inv_all);
1550: sub_schurs->sum_S_Ej_inv_all = sub_schurs->sum_S_Ej_all;
1551: }
1552: }
1553: MatDestroySubMatrices(1,&submats);
1555: /* free workspace */
1556: PetscFree(submats);
1557: PetscFree2(Bwork,pivots);
1558: MatDestroy(&global_schur_subsets);
1559: MatDestroy(&work_mat);
1560: ISDestroy(&all_subsets_n);
1561: PetscCommDestroy(&comm_n);
1562: return(0);
1563: }
1565: PetscErrorCode PCBDDCSubSchursInit(PCBDDCSubSchurs sub_schurs, IS is_I, IS is_B, PCBDDCGraph graph, ISLocalToGlobalMapping BtoNmap, PetscBool copycc)
1566: {
1567: IS *faces,*edges,*all_cc,vertices;
1568: PetscInt i,n_faces,n_edges,n_all_cc;
1569: PetscBool is_sorted;
1570: PetscErrorCode ierr;
1573: ISSorted(is_I,&is_sorted);
1574: if (!is_sorted) SETERRQ(PetscObjectComm((PetscObject)is_I),PETSC_ERR_PLIB,"IS for I dofs should be shorted");
1575: ISSorted(is_B,&is_sorted);
1576: if (!is_sorted) SETERRQ(PetscObjectComm((PetscObject)is_B),PETSC_ERR_PLIB,"IS for B dofs should be shorted");
1578: /* reset any previous data */
1579: PCBDDCSubSchursReset(sub_schurs);
1581: /* get index sets for faces and edges (already sorted by global ordering) */
1582: PCBDDCGraphGetCandidatesIS(graph,&n_faces,&faces,&n_edges,&edges,&vertices);
1583: n_all_cc = n_faces+n_edges;
1584: PetscBTCreate(n_all_cc,&sub_schurs->is_edge);
1585: PetscMalloc1(n_all_cc,&all_cc);
1586: for (i=0;i<n_faces;i++) {
1587: if (copycc) {
1588: ISDuplicate(faces[i],&all_cc[i]);
1589: } else {
1590: PetscObjectReference((PetscObject)faces[i]);
1591: all_cc[i] = faces[i];
1592: }
1593: }
1594: for (i=0;i<n_edges;i++) {
1595: if (copycc) {
1596: ISDuplicate(edges[i],&all_cc[n_faces+i]);
1597: } else {
1598: PetscObjectReference((PetscObject)edges[i]);
1599: all_cc[n_faces+i] = edges[i];
1600: }
1601: PetscBTSet(sub_schurs->is_edge,n_faces+i);
1602: }
1603: PetscObjectReference((PetscObject)vertices);
1604: sub_schurs->is_vertices = vertices;
1605: PCBDDCGraphRestoreCandidatesIS(graph,&n_faces,&faces,&n_edges,&edges,&vertices);
1606: sub_schurs->is_dir = NULL;
1607: PCBDDCGraphGetDirichletDofsB(graph,&sub_schurs->is_dir);
1609: /* Determine if MatFactor can be used */
1610: sub_schurs->schur_explicit = PETSC_FALSE;
1611: #if defined(PETSC_HAVE_MUMPS)
1612: sub_schurs->schur_explicit = PETSC_TRUE;
1613: #endif
1614: #if defined(PETSC_HAVE_MKL_PARDISO)
1615: sub_schurs->schur_explicit = PETSC_TRUE;
1616: #endif
1618: PetscObjectReference((PetscObject)is_I);
1619: sub_schurs->is_I = is_I;
1620: PetscObjectReference((PetscObject)is_B);
1621: sub_schurs->is_B = is_B;
1622: PetscObjectReference((PetscObject)graph->l2gmap);
1623: sub_schurs->l2gmap = graph->l2gmap;
1624: PetscObjectReference((PetscObject)BtoNmap);
1625: sub_schurs->BtoNmap = BtoNmap;
1626: sub_schurs->n_subs = n_all_cc;
1627: sub_schurs->is_subs = all_cc;
1628: if (!sub_schurs->schur_explicit) { /* sort by local ordering if we are not using MatFactor */
1629: for (i=0;i<sub_schurs->n_subs;i++) {
1630: ISSort(sub_schurs->is_subs[i]);
1631: }
1632: }
1633: sub_schurs->S_Ej_all = NULL;
1634: sub_schurs->sum_S_Ej_all = NULL;
1635: sub_schurs->sum_S_Ej_inv_all = NULL;
1636: sub_schurs->sum_S_Ej_tilda_all = NULL;
1637: sub_schurs->is_Ej_all = NULL;
1638: return(0);
1639: }
1641: PetscErrorCode PCBDDCSubSchursCreate(PCBDDCSubSchurs *sub_schurs)
1642: {
1643: PCBDDCSubSchurs schurs_ctx;
1644: PetscErrorCode ierr;
1647: PetscNew(&schurs_ctx);
1648: schurs_ctx->n_subs = 0;
1649: *sub_schurs = schurs_ctx;
1650: return(0);
1651: }
1653: PetscErrorCode PCBDDCSubSchursReset(PCBDDCSubSchurs sub_schurs)
1654: {
1655: PetscInt i;
1659: if (!sub_schurs) return(0);
1660: MatDestroy(&sub_schurs->A);
1661: MatDestroy(&sub_schurs->S);
1662: ISDestroy(&sub_schurs->is_I);
1663: ISDestroy(&sub_schurs->is_B);
1664: ISLocalToGlobalMappingDestroy(&sub_schurs->l2gmap);
1665: ISLocalToGlobalMappingDestroy(&sub_schurs->BtoNmap);
1666: MatDestroy(&sub_schurs->S_Ej_all);
1667: MatDestroy(&sub_schurs->sum_S_Ej_all);
1668: MatDestroy(&sub_schurs->sum_S_Ej_inv_all);
1669: MatDestroy(&sub_schurs->sum_S_Ej_tilda_all);
1670: ISDestroy(&sub_schurs->is_Ej_all);
1671: ISDestroy(&sub_schurs->is_vertices);
1672: ISDestroy(&sub_schurs->is_dir);
1673: PetscBTDestroy(&sub_schurs->is_edge);
1674: for (i=0;i<sub_schurs->n_subs;i++) {
1675: ISDestroy(&sub_schurs->is_subs[i]);
1676: }
1677: if (sub_schurs->n_subs) {
1678: PetscFree(sub_schurs->is_subs);
1679: }
1680: if (sub_schurs->reuse_solver) {
1681: PCBDDCReuseSolversReset(sub_schurs->reuse_solver);
1682: }
1683: PetscFree(sub_schurs->reuse_solver);
1684: if (sub_schurs->change) {
1685: for (i=0;i<sub_schurs->n_subs;i++) {
1686: KSPDestroy(&sub_schurs->change[i]);
1687: ISDestroy(&sub_schurs->change_primal_sub[i]);
1688: }
1689: }
1690: PetscFree(sub_schurs->change);
1691: PetscFree(sub_schurs->change_primal_sub);
1692: sub_schurs->n_subs = 0;
1693: return(0);
1694: }
1696: PetscErrorCode PCBDDCSubSchursDestroy(PCBDDCSubSchurs* sub_schurs)
1697: {
1701: PCBDDCSubSchursReset(*sub_schurs);
1702: PetscFree(*sub_schurs);
1703: return(0);
1704: }
1706: PETSC_STATIC_INLINE PetscErrorCode PCBDDCAdjGetNextLayer_Private(PetscInt* queue_tip,PetscInt n_prev,PetscBT touched,PetscInt* xadj,PetscInt* adjncy,PetscInt* n_added)
1707: {
1708: PetscInt i,j,n;
1712: n = 0;
1713: for (i=-n_prev;i<0;i++) {
1714: PetscInt start_dof = queue_tip[i];
1715: for (j=xadj[start_dof];j<xadj[start_dof+1];j++) {
1716: PetscInt dof = adjncy[j];
1717: if (!PetscBTLookup(touched,dof)) {
1718: PetscBTSet(touched,dof);
1719: queue_tip[n] = dof;
1720: n++;
1721: }
1722: }
1723: }
1724: *n_added = n;
1725: return(0);
1726: }