Actual source code: ex2f.F

  1: !
  2: !  Description: Solves a linear system in parallel with KSP (Fortran code).
  3: !               Also shows how to set a user-defined monitoring routine.
  4: !
  5: !  Program usage: mpirun -np <procs> ex2f [-help] [all PETSc options]
  6: !
  7: !/*T
  8: !  Concepts: KSP^basic parallel example
  9: !  Concepts: KSP^setting a user-defined monitoring routine
 10: !  Processors: n
 11: !T*/
 12: !
 13: ! -----------------------------------------------------------------------

 15:       program main
 16:       implicit none

 18: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 19: !                    Include files
 20: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 21: !
 22: !  This program uses CPP for preprocessing, as indicated by the use of
 23: !  PETSc include files in the directory petsc/include/finclude.  This
 24: !  convention enables use of the CPP preprocessor, which allows the use
 25: !  of the #include statements that define PETSc objects and variables.
 26: !
 27: !  Use of the conventional Fortran include statements is also supported
 28: !  In this case, the PETsc include files are located in the directory
 29: !  petsc/include/foldinclude.
 30: !
 31: !  Since one must be very careful to include each file no more than once
 32: !  in a Fortran routine, application programmers must exlicitly list
 33: !  each file needed for the various PETSc components within their
 34: !  program (unlike the C/C++ interface).
 35: !
 36: !  See the Fortran section of the PETSc users manual for details.
 37: !
 38: !  The following include statements are required for KSP Fortran programs:
 39: !     petsc.h       - base PETSc routines
 40: !     petscvec.h    - vectors
 41: !     petscmat.h    - matrices
 42: !     petscpc.h     - preconditioners
 43: !     petscksp.h    - Krylov subspace methods
 44: !  Include the following to use PETSc random numbers:
 45: !     petscsys.h    - system routines
 46: !  Additional include statements may be needed if using additional
 47: !  PETSc routines in a Fortran program, e.g.,
 48: !     petscviewer.h - viewers
 49: !     petscis.h     - index sets
 50: !
 51:  #include include/finclude/petsc.h
 52:  #include include/finclude/petscvec.h
 53:  #include include/finclude/petscmat.h
 54:  #include include/finclude/petscpc.h
 55:  #include include/finclude/petscksp.h
 56:  #include include/finclude/petscsys.h
 57: !
 58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 59: !                   Variable declarations
 60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 61: !
 62: !  Variables:
 63: !     ksp     - linear solver context
 64: !     ksp      - Krylov subspace method context
 65: !     pc       - preconditioner context
 66: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 67: !     A        - matrix that defines linear system
 68: !     its      - iterations for convergence
 69: !     norm     - norm of error in solution
 70: !     rctx     - random number generator context
 71: !
 72: !  Note that vectors are declared as PETSc "Vec" objects.  These vectors
 73: !  are mathematical objects that contain more than just an array of
 74: !  double precision numbers. I.e., vectors in PETSc are not just
 75: !        double precision x(*).
 76: !  However, local vector data can be easily accessed via VecGetArray().
 77: !  See the Fortran section of the PETSc users manual for details.
 78: !
 79:       double precision  norm
 80:       PetscInt  i,j,II,JJ,m,n,its
 81:       PetscInt  Istart,Iend,ione
 82:       PetscErrorCode ierr
 83:       PetscMPIInt     rank,size
 84:       PetscTruth  flg
 85:       PetscScalar v,one,neg_one
 86:       Vec         x,b,u
 87:       Mat         A
 88:       KSP         ksp
 89:       PetscRandom rctx

 91: !  These variables are not currently used.
 92: !      PC          pc
 93: !      PCType      ptype
 94: !      double precision tol


 97: !  Note: Any user-defined Fortran routines (such as MyKSPMonitor)
 98: !  MUST be declared as external.

100:       external MyKSPMonitor,MyKSPConverged

102: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
103: !                 Beginning of program
104: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

106:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
107:       m = 3
108:       n = 3
109:       one  = 1.0
110:       neg_one = -1.0
111:       ione    = 1
112:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
113:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
114:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
115:       call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr)

117: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
118: !      Compute the matrix and right-hand-side vector that define
119: !      the linear system, Ax = b.
120: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

122: !  Create parallel matrix, specifying only its global dimensions.
123: !  When using MatCreate(), the matrix format can be specified at
124: !  runtime. Also, the parallel partitioning of the matrix is
125: !  determined by PETSc at runtime.

127:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
128:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
129:       call MatSetFromOptions(A,ierr)

131: !  Currently, all PETSc parallel matrix formats are partitioned by
132: !  contiguous chunks of rows across the processors.  Determine which
133: !  rows of the matrix are locally owned.

135:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

137: !  Set matrix elements for the 2-D, five-point stencil in parallel.
138: !   - Each processor needs to insert only elements that it owns
139: !     locally (but any non-local elements will be sent to the
140: !     appropriate processor during matrix assembly).
141: !   - Always specify global row and columns of matrix entries.
142: !   - Note that MatSetValues() uses 0-based row and column numbers
143: !     in Fortran as well as in C.

145: !     Note: this uses the less common natural ordering that orders first
146: !     all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n
147: !     instead of JJ = II +- m as you might expect. The more standard ordering
148: !     would first do all variables for y = h, then y = 2h etc.

150:       do 10, II=Istart,Iend-1
151:         v = -1.0
152:         i = II/n
153:         j = II - i*n
154:         if (i.gt.0) then
155:           JJ = II - n
156:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
157:         endif
158:         if (i.lt.m-1) then
159:           JJ = II + n
160:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
161:         endif
162:         if (j.gt.0) then
163:           JJ = II - 1
164:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
165:         endif
166:         if (j.lt.n-1) then
167:           JJ = II + 1
168:           call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr)
169:         endif
170:         v = 4.0
171:         call  MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr)
172:  10   continue

174: !  Assemble matrix, using the 2-step process:
175: !       MatAssemblyBegin(), MatAssemblyEnd()
176: !  Computations can be done while messages are in transition,
177: !  by placing code between these two statements.

179:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
180:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

182: !  Create parallel vectors.
183: !   - Here, the parallel partitioning of the vector is determined by
184: !     PETSc at runtime.  We could also specify the local dimensions
185: !     if desired -- or use the more general routine VecCreate().
186: !   - When solving a linear system, the vectors and matrices MUST
187: !     be partitioned accordingly.  PETSc automatically generates
188: !     appropriately partitioned matrices and vectors when MatCreate()
189: !     and VecCreate() are used with the same communicator.
190: !   - Note: We form 1 vector from scratch and then duplicate as needed.

192:       call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
193:       call VecSetFromOptions(u,ierr)
194:       call VecDuplicate(u,b,ierr)
195:       call VecDuplicate(b,x,ierr)

197: !  Set exact solution; then compute right-hand-side vector.
198: !  By default we use an exact solution of a vector with all
199: !  elements of 1.0;  Alternatively, using the runtime option
200: !  -random_sol forms a solution vector with random components.

202:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,                    &
203:      &             "-random_exact_sol",flg,ierr)
204:       if (flg .eq. 1) then
205:          call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr)
206:          call PetscRandomSetFromOptions(rctx,ierr)
207:          call VecSetRandom(u,rctx,ierr)
208:          call PetscRandomDestroy(rctx,ierr)
209:       else
210:          call VecSet(u,one,ierr)
211:       endif
212:       call MatMult(A,u,b,ierr)

214: !  View the exact solution vector if desired

216:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,                    &
217:      &             "-view_exact_sol",flg,ierr)
218:       if (flg .eq. 1) then
219:          call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
220:       endif

222: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
223: !         Create the linear solver and set various options
224: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

226: !  Create linear solver context

228:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

230: !  Set operators. Here the matrix that defines the linear system
231: !  also serves as the preconditioning matrix.

233:       call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)

235: !  Set linear solver defaults for this problem (optional).
236: !   - By extracting the KSP and PC contexts from the KSP context,
237: !     we can then directly directly call any KSP and PC routines
238: !     to set various options.
239: !   - The following four statements are optional; all of these
240: !     parameters could alternatively be specified at runtime via
241: !     KSPSetFromOptions(). All of these defaults can be
242: !     overridden at runtime, as indicated below.

244: !     We comment out this section of code since the Jacobi
245: !     preconditioner is not a good general default.

247: !      call KSPGetPC(ksp,pc,ierr)
248: !      ptype = PCJACOBI
249: !      call PCSetType(pc,ptype,ierr)
250: !      tol = 1.e-7
251: !      call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION,
252: !     &     PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_INTEGER,ierr)

254: !  Set user-defined monitoring routine if desired

256:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-my_ksp_monitor',  &
257:      &                    flg,ierr)
258:       if (flg .eq. 1) then
259:         call KSPMonitorSet(ksp,MyKSPMonitor,PETSC_NULL_OBJECT,          &
260:      &                     PETSC_NULL_FUNCTION,ierr)
261:       endif


264: !  Set runtime options, e.g.,
265: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
266: !  These options will override those specified above as long as
267: !  KSPSetFromOptions() is called _after_ any other customization
268: !  routines.

270:       call KSPSetFromOptions(ksp,ierr)

272: !  Set convergence test routine if desired

274:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,                    &
275:      &     '-my_ksp_convergence',flg,ierr)
276:       if (flg .eq. 1) then
277:         call KSPSetConvergenceTest(ksp,MyKSPConverged,                  &
278:      &          PETSC_NULL_OBJECT,ierr)
279:       endif
280: !
281: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
282: !                      Solve the linear system
283: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

285:       call KSPSolve(ksp,b,x,ierr)

287: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
288: !                     Check solution and clean up
289: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

291: !  Check the error

293:       call VecAXPY(x,neg_one,u,ierr)
294:       call VecNorm(x,NORM_2,norm,ierr)
295:       call KSPGetIterationNumber(ksp,its,ierr)
296:       if (rank .eq. 0) then
297:         if (norm .gt. 1.e-12) then
298:            write(6,100) norm,its
299:         else
300:            write(6,110) its
301:         endif
302:       endif
303:   100 format('Norm of error ',e10.4,' iterations ',i5)
304:   110 format('Norm of error < 1.e-12,iterations ',i5)

306: !  Free work space.  All PETSc objects should be destroyed when they
307: !  are no longer needed.

309:       call KSPDestroy(ksp,ierr)
310:       call VecDestroy(u,ierr)
311:       call VecDestroy(x,ierr)
312:       call VecDestroy(b,ierr)
313:       call MatDestroy(A,ierr)

315: !  Always call PetscFinalize() before exiting a program.  This routine
316: !    - finalizes the PETSc libraries as well as MPI
317: !    - provides summary and diagnostic information if certain runtime
318: !      options are chosen (e.g., -log_summary).  See PetscFinalize()
319: !      manpage for more information.

321:       call PetscFinalize(ierr)
322:       end

324: ! --------------------------------------------------------------
325: !
326: !  MyKSPMonitor - This is a user-defined routine for monitoring
327: !  the KSP iterative solvers.
328: !
329: !  Input Parameters:
330: !    ksp   - iterative context
331: !    n     - iteration number
332: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
333: !    dummy - optional user-defined monitor context (unused here)
334: !
335:       subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr)

337:       implicit none

339:  #include include/finclude/petsc.h
340:  #include include/finclude/petscvec.h
341:  #include include/finclude/petscksp.h

343:       KSP              ksp
344:       Vec              x
345:       PetscErrorCode ierr
346:       PetscInt n,dummy
347:       PetscMPIInt rank
348:       double precision rnorm

350: !  Build the solution vector

352:       call KSPBuildSolution(ksp,PETSC_NULL_OBJECT,x,ierr)

354: !  Write the solution vector and residual norm to stdout
355: !   - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD
356: !     handles data from multiple processors so that the
357: !     output is not jumbled.

359:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
360:       if (rank .eq. 0) write(6,100) n
361:       call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr)
362:       if (rank .eq. 0) write(6,200) n,rnorm

364:  100  format('iteration ',i5,' solution vector:')
365:  200  format('iteration ',i5,' residual norm ',e10.4)
366:       0
367:       end

369: ! --------------------------------------------------------------
370: !
371: !  MyKSPConverged - This is a user-defined routine for testing
372: !  convergence of the KSP iterative solvers.
373: !
374: !  Input Parameters:
375: !    ksp   - iterative context
376: !    n     - iteration number
377: !    rnorm - 2-norm (preconditioned) residual value (may be estimated)
378: !    dummy - optional user-defined monitor context (unused here)
379: !
380:       subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr)

382:       implicit none

384:  #include include/finclude/petsc.h
385:  #include include/finclude/petscvec.h
386:  #include include/finclude/petscksp.h

388:       KSP              ksp
389:       PetscErrorCode ierr
390:       PetscInt n,dummy
391:       KSPConvergedReason flag
392:       double precision rnorm

394:       if (rnorm .le. .05) then
395:         flag = 1
396:       else
397:         flag = 0
398:       endif
399:       0

401:       end