Actual source code: ex23.c

  2: /* Program usage:  mpirun ex23 [-help] [all PETSc options] */

  4: static char help[] = "Solves a tridiagonal linear system.\n\n";

  6: /*T
  7:    Concepts: KSP^basic parallel example;
  8:    Processors: n
  9: T*/

 11: /* 
 12:   Include "petscksp.h" so that we can use KSP solvers.  Note that this file
 13:   automatically includes:
 14:      petsc.h       - base PETSc routines   petscvec.h - vectors
 15:      petscsys.h    - system routines       petscmat.h - matrices
 16:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 17:      petscviewer.h - viewers               petscpc.h  - preconditioners

 19:   Note:  The corresponding uniprocessor example is ex1.c
 20: */
 21:  #include petscksp.h

 25: int main(int argc,char **args)
 26: {
 27:   Vec            x, b, u;      /* approx solution, RHS, exact solution */
 28:   Mat            A;            /* linear system matrix */
 29:   KSP            ksp;         /* linear solver context */
 30:   PC             pc;           /* preconditioner context */
 31:   PetscReal      norm;         /* norm of solution error */
 33:   PetscInt       i,n = 10,col[3],its,rstart,rend,nlocal;
 34:   PetscScalar    neg_one = -1.0,one = 1.0,value[3];

 36:   PetscInitialize(&argc,&args,(char *)0,help);
 37:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);

 39:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
 40:          Compute the matrix and right-hand-side vector that define
 41:          the linear system, Ax = b.
 42:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */

 44:   /* 
 45:      Create vectors.  Note that we form 1 vector from scratch and
 46:      then duplicate as needed. For this simple case let PETSc decide how
 47:      many elements of the vector are stored on each processor. The second
 48:      argument to VecSetSizes() below causes PETSc to decide.
 49:   */
 50:   VecCreate(PETSC_COMM_WORLD,&x);
 51:   VecSetSizes(x,PETSC_DECIDE,n);
 52:   VecSetFromOptions(x);
 53:   VecDuplicate(x,&b);
 54:   VecDuplicate(x,&u);

 56:   /* Identify the starting and ending mesh points on each
 57:      processor for the interior part of the mesh. We let PETSc decide
 58:      above. */

 60:   VecGetOwnershipRange(x,&rstart,&rend);
 61:   VecGetLocalSize(x,&nlocal);

 63:   /* 
 64:      Create matrix.  When using MatCreate(), the matrix format can
 65:      be specified at runtime.

 67:      Performance tuning note:  For problems of substantial size,
 68:      preallocation of matrix memory is crucial for attaining good 
 69:      performance. See the matrix chapter of the users manual for details.

 71:      We pass in nlocal as the "local" size of the matrix to force it
 72:      to have the same parallel layout as the vector created above.
 73:   */
 74:   MatCreate(PETSC_COMM_WORLD,&A);
 75:   MatSetSizes(A,nlocal,nlocal,n,n);
 76:   MatSetFromOptions(A);

 78:   /* 
 79:      Assemble matrix.  

 81:      The linear system is distributed across the processors by 
 82:      chunks of contiguous rows, which correspond to contiguous
 83:      sections of the mesh on which the problem is discretized.  
 84:      For matrix assembly, each processor contributes entries for
 85:      the part that it owns locally.
 86:   */


 89:   if (!rstart) {
 90:     rstart = 1;
 91:     i = 0; col[0] = 0; col[1] = 1; value[0] = 2.0; value[1] = -1.0;
 92:     MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
 93:   }
 94:   if (rend == n) {
 95:     rend = n-1;
 96:     i = n-1; col[0] = n-2; col[1] = n-1; value[0] = -1.0; value[1] = 2.0;
 97:     MatSetValues(A,1,&i,2,col,value,INSERT_VALUES);
 98:   }

100:   /* Set entries corresponding to the mesh interior */
101:   value[0] = -1.0; value[1] = 2.0; value[2] = -1.0;
102:   for (i=rstart; i<rend; i++) {
103:     col[0] = i-1; col[1] = i; col[2] = i+1;
104:     MatSetValues(A,1,&i,3,col,value,INSERT_VALUES);
105:   }

107:   /* Assemble the matrix */
108:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
109:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

111:   /* 
112:      Set exact solution; then compute right-hand-side vector.
113:   */
114:   VecSet(u,one);
115:   MatMult(A,u,b);

117:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
118:                 Create the linear solver and set various options
119:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
120:   /* 
121:      Create linear solver context
122:   */
123:   KSPCreate(PETSC_COMM_WORLD,&ksp);

125:   /* 
126:      Set operators. Here the matrix that defines the linear system
127:      also serves as the preconditioning matrix.
128:   */
129:   KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);

131:   /* 
132:      Set linear solver defaults for this problem (optional).
133:      - By extracting the KSP and PC contexts from the KSP context,
134:        we can then directly call any KSP and PC routines to set
135:        various options.
136:      - The following four statements are optional; all of these
137:        parameters could alternatively be specified at runtime via
138:        KSPSetFromOptions();
139:   */
140:   KSPGetPC(ksp,&pc);
141:   PCSetType(pc,PCJACOBI);
142:   KSPSetTolerances(ksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);

144:   /* 
145:     Set runtime options, e.g.,
146:         -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
147:     These options will override those specified above as long as
148:     KSPSetFromOptions() is called _after_ any other customization
149:     routines.
150:   */
151:   KSPSetFromOptions(ksp);
152: 
153:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
154:                       Solve the linear system
155:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
156:   /* 
157:      Solve linear system
158:   */
159:   KSPSolve(ksp,b,x);

161:   /* 
162:      View solver info; we could instead use the option -ksp_view to
163:      print this info to the screen at the conclusion of KSPSolve().
164:   */
165:   KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);

167:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
168:                       Check solution and clean up
169:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
170:   /* 
171:      Check the error
172:   */
173:   VecAXPY(x,neg_one,u);
174:   VecNorm(x,NORM_2,&norm);
175:   KSPGetIterationNumber(ksp,&its);
176:   PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A, Iterations %D\n",norm,its);
177:   /* 
178:      Free work space.  All PETSc objects should be destroyed when they
179:      are no longer needed.
180:   */
181:   VecDestroy(x); VecDestroy(u);
182:   VecDestroy(b); MatDestroy(A);
183:   KSPDestroy(ksp);

185:   /*
186:      Always call PetscFinalize() before exiting a program.  This routine
187:        - finalizes the PETSc libraries as well as MPI
188:        - provides summary and diagnostic information if certain runtime
189:          options are chosen (e.g., -log_summary).
190:   */
191:   PetscFinalize();
192:   return 0;
193: }