Actual source code: ex4.c

  2: static char help[] = "Uses a different preconditioner matrix and linear system matrix in the KSP solvers.\n\
  3: Note that different storage formats\n\
  4: can be used for the different matrices.\n\n";

  6: /*T
  7:    Concepts: KSP^different matrices for linear system and preconditioner;
  8:    Processors: n
  9: T*/

 11: /* 
 12:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 13:   automatically includes:
 14:      petsc.h       - base PETSc routines   petscvec.h - vectors
 15:      petscsys.h    - system routines       petscmat.h - matrices
 16:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 17:      petscviewer.h - viewers               petscpc.h  - preconditioners
 18: */
 19:  #include petscksp.h

 23: int main(int argc,char **args)
 24: {
 25:   KSP            ksp;      /* linear solver context */
 26:   Mat            A,B;      /* linear system matrix, preconditioning matrix */
 27:   PetscRandom    rctx;      /* random number generator context */
 28:   Vec            x,b,u;   /* approx solution, RHS, exact solution */
 29:   Vec            tmp;       /* work vector */
 30:   PetscScalar    v,one = 1.0,scale = 0.0;
 31:   PetscInt       i,j,m = 15,n = 17,Ii,J,Istart,Iend;

 34:   PetscInitialize(&argc,&args,(char *)0,help);
 35:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 36:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 37:   PetscOptionsGetScalar(PETSC_NULL,"-scale",&scale,PETSC_NULL);

 39:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
 40:          Compute the matrix and right-hand-side vector that define
 41:          the linear system,Ax = b.  Also, create a different
 42:          preconditioner matrix.
 43:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 45:   /*
 46:      Create the linear system matrix (A).
 47:       - Here we use a block diagonal matrix format (MATBDIAG) and
 48:         specify only the global size.  The parallel partitioning of
 49:         the matrix will be determined at runtime by PETSc.
 50:   */
 51:   MatCreate(PETSC_COMM_WORLD,&A);
 52:   MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 53:   MatSetType(A,MATBDIAG);
 54:   MatSeqBDiagSetPreallocation(A,0,1,PETSC_NULL,PETSC_NULL);
 55:   MatMPIBDiagSetPreallocation(A,0,1,PETSC_NULL,PETSC_NULL);

 57:   /* 
 58:      Create a different preconditioner matrix (B).  This is usually
 59:      done to form a cheaper (or sparser) preconditioner matrix
 60:      compared to the linear system matrix.
 61:       - Here we use MatCreate() followed by MatSetFromOptions(),
 62:         so that the matrix format and parallel partitioning will be
 63:         determined at runtime.
 64:   */
 65:   MatCreate(PETSC_COMM_WORLD,&B);
 66:   MatSetSizes(B,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
 67:   MatSetFromOptions(B);

 69:   /* 
 70:      Currently, all PETSc parallel matrix formats are partitioned by
 71:      contiguous chunks of rows across the processors.  Determine which
 72:      rows of the matrix are locally owned. 
 73:   */
 74:   MatGetOwnershipRange(A,&Istart,&Iend);

 76:   /*
 77:      Set entries within the two matrices
 78:   */
 79:   for (Ii=Istart; Ii<Iend; Ii++) {
 80:     v = -1.0; i = Ii/n; j = Ii - i*n;
 81:     if (i>0) {
 82:       J=Ii-n;
 83:       MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
 84:       MatSetValues(B,1,&Ii,1,&J,&v,INSERT_VALUES);
 85:     }
 86:     if (i<m-1) {
 87:       J=Ii+n;
 88:       MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
 89:       MatSetValues(B,1,&Ii,1,&J,&v,INSERT_VALUES);
 90:     }
 91:     if (j>0) {
 92:       J=Ii-1;
 93:       MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
 94:     }
 95:     if (j<n-1) {
 96:       J=Ii+1;
 97:       MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
 98:     }
 99:     v = 5.0; MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
100:     v = 3.0; MatSetValues(B,1,&Ii,1,&Ii,&v,INSERT_VALUES);
101:   }

103:   /*
104:      Assemble the preconditioner matrix (B), using the 2-step process
105:        MatAssemblyBegin(), MatAssemblyEnd()
106:      Note that computations can be done while messages are in
107:      transition by placing code between these two statements.
108:   */
109:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
110:   for (Ii=Istart; Ii<Iend; Ii++) {
111:     v = -0.5; i = Ii/n;
112:     if (i>1) {
113:       J=Ii-(n+1); MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
114:     }
115:     if (i<m-2) {
116:       J=Ii+n+1; MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
117:     }
118:   }
119:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);

121:   /* 
122:      Assemble the linear system matrix, (A)
123:   */
124:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
125:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

127:   /* 
128:      Create parallel vectors.
129:       - When using VecSeSizes(), we specify only the vector's global
130:         dimension; the parallel partitioning is determined at runtime. 
131:       - Note: We form 1 vector from scratch and then duplicate as needed.
132:   */
133:   VecCreate(PETSC_COMM_WORLD,&b);
134:   VecSetSizes(b,PETSC_DECIDE,m*n);
135:   VecSetFromOptions(b);
136:   VecDuplicate(b,&u);
137:   VecDuplicate(b,&x);

139:   /* 
140:      Make solution vector be 1 to random noise
141:   */
142:   VecSet(u,one);
143:   VecDuplicate(u,&tmp);
144:   PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
145:   PetscRandomSetFromOptions(rctx);
146:   VecSetRandom(tmp,rctx);
147:   PetscRandomDestroy(rctx);
148:   VecAXPY(u,scale,tmp);
149:   VecDestroy(tmp);

151:   /*
152:      Compute right-hand-side vector 
153:   */
154:   MatMult(A,u,b);

156:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
157:                 Create the linear solver and set various options
158:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

160:   /* 
161:     Create linear solver context
162:   */
163:   KSPCreate(PETSC_COMM_WORLD,&ksp);

165:   /* 
166:      Set operators. Note that we use different matrices to define the
167:      linear system and to precondition it.
168:   */
169:   KSPSetOperators(ksp,A,B,DIFFERENT_NONZERO_PATTERN);

171:   /* 
172:      Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
173:   */
174:   KSPSetFromOptions(ksp);

176:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
177:                       Solve the linear system
178:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

180:   KSPSolve(ksp,b,x);

182:   /* 
183:      Free work space.  All PETSc objects should be destroyed when they
184:      are no longer needed.
185:   */
186:   KSPDestroy(ksp); VecDestroy(u);
187:   MatDestroy(B);   VecDestroy(x);
188:   MatDestroy(A);   VecDestroy(b);

190:   /*
191:      Always call PetscFinalize() before exiting a program.  This routine
192:        - finalizes the PETSc libraries as well as MPI
193:        - provides summary and diagnostic information if certain runtime
194:          options are chosen (e.g., -log_summary). 
195:   */
196:   PetscFinalize();
197:   return 0;
198: }