Actual source code: ex7.c
2: static char help[] = "Block Jacobi preconditioner for solving a linear system in parallel with KSP.\n\
3: The code indicates the\n\
4: procedures for setting the particular block sizes and for using different\n\
5: linear solvers on the individual blocks.\n\n";
7: /*
8: Note: This example focuses on ways to customize the block Jacobi
9: preconditioner. See ex1.c and ex2.c for more detailed comments on
10: the basic usage of KSP (including working with matrices and vectors).
12: Recall: The block Jacobi method is equivalent to the ASM preconditioner
13: with zero overlap.
14: */
16: /*T
17: Concepts: KSP^customizing the block Jacobi preconditioner
18: Processors: n
19: T*/
21: /*
22: Include "petscksp.h" so that we can use KSP solvers. Note that this file
23: automatically includes:
24: petsc.h - base PETSc routines petscvec.h - vectors
25: petscsys.h - system routines petscmat.h - matrices
26: petscis.h - index sets petscksp.h - Krylov subspace methods
27: petscviewer.h - viewers petscpc.h - preconditioners
28: */
29: #include petscksp.h
33: int main(int argc,char **args)
34: {
35: Vec x,b,u; /* approx solution, RHS, exact solution */
36: Mat A; /* linear system matrix */
37: KSP ksp; /* KSP context */
38: KSP *subksp; /* array of local KSP contexts on this processor */
39: PC pc; /* PC context */
40: PC subpc; /* PC context for subdomain */
41: PetscReal norm; /* norm of solution error */
43: PetscInt i,j,Ii,J,*blks,m = 8,n;
44: PetscMPIInt rank,size;
45: PetscInt its,nlocal,first,Istart,Iend;
46: PetscScalar v,one = 1.0,none = -1.0;
47: PetscTruth isbjacobi,flg;
49: PetscInitialize(&argc,&args,(char *)0,help);
50: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
51: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
52: MPI_Comm_size(PETSC_COMM_WORLD,&size);
53: n = m+2;
55: /* -------------------------------------------------------------------
56: Compute the matrix and right-hand-side vector that define
57: the linear system, Ax = b.
58: ------------------------------------------------------------------- */
60: /*
61: Create and assemble parallel matrix
62: */
63: MatCreate(PETSC_COMM_WORLD,&A);
64: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
65: MatSetFromOptions(A);
66: MatGetOwnershipRange(A,&Istart,&Iend);
67: for (Ii=Istart; Ii<Iend; Ii++) {
68: v = -1.0; i = Ii/n; j = Ii - i*n;
69: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
70: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
71: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
72: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
73: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
74: }
75: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
76: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
78: /*
79: Create parallel vectors
80: */
81: VecCreate(PETSC_COMM_WORLD,&u);
82: VecSetSizes(u,PETSC_DECIDE,m*n);
83: VecSetFromOptions(u);
84: VecDuplicate(u,&b);
85: VecDuplicate(b,&x);
87: /*
88: Set exact solution; then compute right-hand-side vector.
89: */
90: VecSet(u,one);
91: MatMult(A,u,b);
93: /*
94: Create linear solver context
95: */
96: KSPCreate(PETSC_COMM_WORLD,&ksp);
98: /*
99: Set operators. Here the matrix that defines the linear system
100: also serves as the preconditioning matrix.
101: */
102: KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
104: /*
105: Set default preconditioner for this program to be block Jacobi.
106: This choice can be overridden at runtime with the option
107: -pc_type <type>
108: */
109: KSPGetPC(ksp,&pc);
110: PCSetType(pc,PCBJACOBI);
113: /* -------------------------------------------------------------------
114: Define the problem decomposition
115: ------------------------------------------------------------------- */
117: /*
118: Call PCBJacobiSetTotalBlocks() to set individually the size of
119: each block in the preconditioner. This could also be done with
120: the runtime option
121: -pc_bjacobi_blocks <blocks>
122: Also, see the command PCBJacobiSetLocalBlocks() to set the
123: local blocks.
125: Note: The default decomposition is 1 block per processor.
126: */
127: PetscMalloc(m*sizeof(PetscInt),&blks);
128: for (i=0; i<m; i++) blks[i] = n;
129: PCBJacobiSetTotalBlocks(pc,m,blks);
130: PetscFree(blks);
133: /* -------------------------------------------------------------------
134: Set the linear solvers for the subblocks
135: ------------------------------------------------------------------- */
137: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138: Basic method, should be sufficient for the needs of most users.
139: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: By default, the block Jacobi method uses the same solver on each
142: block of the problem. To set the same solver options on all blocks,
143: use the prefix -sub before the usual PC and KSP options, e.g.,
144: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
145: */
147: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: Advanced method, setting different solvers for various blocks.
149: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: Note that each block's KSP context is completely independent of
152: the others, and the full range of uniprocessor KSP options is
153: available for each block. The following section of code is intended
154: to be a simple illustration of setting different linear solvers for
155: the individual blocks. These choices are obviously not recommended
156: for solving this particular problem.
157: */
158: PetscTypeCompare((PetscObject)pc,PCBJACOBI,&isbjacobi);
159: if (isbjacobi) {
160: /*
161: Call KSPSetUp() to set the block Jacobi data structures (including
162: creation of an internal KSP context for each block).
164: Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP().
165: */
166: KSPSetUp(ksp);
168: /*
169: Extract the array of KSP contexts for the local blocks
170: */
171: PCBJacobiGetSubKSP(pc,&nlocal,&first,&subksp);
173: /*
174: Loop over the local blocks, setting various KSP options
175: for each block.
176: */
177: for (i=0; i<nlocal; i++) {
178: KSPGetPC(subksp[i],&subpc);
179: if (!rank) {
180: if (i%2) {
181: PCSetType(subpc,PCILU);
182: } else {
183: PCSetType(subpc,PCNONE);
184: KSPSetType(subksp[i],KSPBCGS);
185: KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
186: }
187: } else {
188: PCSetType(subpc,PCJACOBI);
189: KSPSetType(subksp[i],KSPGMRES);
190: KSPSetTolerances(subksp[i],1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
191: }
192: }
193: }
195: /* -------------------------------------------------------------------
196: Solve the linear system
197: ------------------------------------------------------------------- */
199: /*
200: Set runtime options
201: */
202: KSPSetFromOptions(ksp);
204: /*
205: Solve the linear system
206: */
207: KSPSolve(ksp,b,x);
209: /*
210: View info about the solver
211: */
212: PetscOptionsHasName(PETSC_NULL,"-nokspview",&flg);
213: if (!flg) {
214: KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);
215: }
217: /* -------------------------------------------------------------------
218: Check solution and clean up
219: ------------------------------------------------------------------- */
221: /*
222: Check the error
223: */
224: VecAXPY(x,none,u);
225: VecNorm(x,NORM_2,&norm);
226: KSPGetIterationNumber(ksp,&its);
227: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %D\n",norm,its);
229: /*
230: Free work space. All PETSc objects should be destroyed when they
231: are no longer needed.
232: */
233: KSPDestroy(ksp);
234: VecDestroy(u); VecDestroy(x);
235: VecDestroy(b); MatDestroy(A);
236: PetscFinalize();
237: return 0;
238: }