Actual source code: ex2f.F
1: !
2: !/*T
3: ! Concepts: TS^time-dependent nonlinear problems
4: ! Processors: n
5: !T*/
6: !
7: ! ------------------------------------------------------------------------
8: !
9: ! This program solves a simple time-dependent nonlinear PDE using implicit
10: ! timestepping:
11: ! u * u_xx
12: ! u_t = ---------
13: ! 2*(t+1)^2
14: !
15: ! u(0,x) = 1 + x*x; u(t,0) = t + 1; u(t,1) = 2*t + 2
16: !
17: ! The exact solution is u(t,x) = (1 + x*x) * (1 + t).
18: !
19: ! Note that since the solution is linear in time and quadratic in x,
20: ! the finite difference scheme actually computes the "exact" solution.
21: !
22: ! We use the backward Euler method.
23: !
24: ! --------------------------------------------------------------------------
26: program main
27: implicit none
29: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
30: ! Include files
31: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
32: !
33: ! Each routine within this program uses the include file 'ex2f.h',
34: ! which itself includes the various PETSc include files as well as
35: ! problem-specific data in several common blocks.
36: !
37: ! This program uses CPP for preprocessing, as indicated by the use of
38: ! PETSc include files in the directory petsc/include/finclude. This
39: ! convention enables use of the CPP preprocessor, which allows the use
40: ! of the #include statements that define PETSc objects and variables.
41: !
42: ! Use of the conventional Fortran include statements is also supported
43: ! In this case, the PETsc include files are located in the directory
44: ! petsc/include/foldinclude.
45: !
46: ! Since one must be very careful to include each file no more than once
47: ! in a Fortran routine, application programmers must exlicitly list
48: ! each file needed for the various PETSc components within their
49: ! program (unlike the C/C++ interface).
50: !
51: ! See the Fortran section of the PETSc users manual for details.
53: #include "ex2f.h"
55: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
56: ! Variable declarations
57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: !
59: ! Variables:
60: ! ts - timestepping solver
61: ! A - Jacobian matrix context
62: ! u_local - local vector
63: ! u - approximate solution vector
64: ! ftime - final time
65: ! time_total_max - default total max time
66: ! time_steps_max - default max timesteps
67: !
68: ! Note that vectors are declared as PETSc "Vec" objects. These vectors
69: ! are mathematical objects that contain more than just an array of
70: ! double precision numbers. I.e., vectors in PETSc are not just
71: ! double precision x(*).
72: ! However, local vector data can be easily accessed via VecGetArray().
73: ! See the Fortran section of the PETSc users manual for details.
75: TS ts
76: Vec u
77: Mat A
78: PetscTruth flg,monitor
79: PetscErrorCode ierr
80: PetscInt time_steps_max,steps,i1
81: PetscReal dt,ftime,time_total_max
83: ! Note: Any user-defined Fortran routines (such as RHSFunction)
84: ! MUST be declared as external.
86: external MyMonitor,RHSFunction
87: external InitialConditions,RHSJacobian
89: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
90: ! Beginning of program
91: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
93: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
94: comm = PETSC_COMM_WORLD
95: call MPI_Comm_size(comm,size,ierr)
96: call MPI_Comm_rank(comm,rank,ierr)
98: ! Initialize problem parameters
100: time_steps_max = 1000
101: time_total_max = 100.0
102: m = 60
103: debug = 0
104: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',M,flg,ierr)
105: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-debug',debug,ierr)
106: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-monitor',monitor, &
107: & ierr)
108: one_d0 = 1.0d0
109: two_d0 = 2.0d0
110: four_d0 = 4.0d0
111: h = one_d0/(m-one_d0)
113: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
114: ! Create vector data structures
115: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: ! Create distributed array (DA) to manage parallel grid and vectors
118: ! Set up the ghost point communication pattern. There are m total
119: ! grid values spread equally among all the processors.
120: i1 = 1
121: call DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,i1,i1, &
122: & PETSC_NULL_INTEGER,da,ierr)
124: ! Extract global and local vectors from DA; then duplicate for remaining
125: ! vectors that are the same types.
127: call DACreateGlobalVector(da,u,ierr)
128: call DACreateLocalVector(da,u_local,ierr)
130: ! Make local work vector for evaluating right-hand-side function
131: call VecDuplicate(u_local,localwork,ierr)
133: ! Make global work vector for storing exact solution
134: call VecDuplicate(u,solution,ierr)
136: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
137: ! Create timestepping solver context; set callback routine for
138: ! right-hand-side function evaluation.
139: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
141: call TSCreate(comm,ts,ierr)
142: call TSSetProblemType(ts,TS_NONLINEAR,ierr)
143: call TSSetRHSFunction(ts,RHSFunction,PETSC_NULL_OBJECT,ierr)
145: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
146: ! Set optional user-defined monitoring routine
147: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: if (monitor .eq. 1) then
151: call TSMonitorSet(ts,MyMonitor,PETSC_NULL_OBJECT, &
152: & PETSC_NULL_FUNCTION,ierr)
153: endif
155: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
156: ! For nonlinear problems, the user can provide a Jacobian evaluation
157: ! routine (or use a finite differencing approximation).
158: !
159: ! Create matrix data structure; set Jacobian evaluation routine.
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
162: call MatCreate(comm,A,ierr)
163: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m,ierr)
164: call MatSetFromOptions(A,ierr)
165: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-fdjac',flg,ierr)
166: if (flg .eq. 1) then
167: call SetCRoutineFromFortran(ts,A,A,ierr)
168: else
169: call TSSetRHSJacobian(ts,A,A,RHSJacobian,PETSC_NULL_OBJECT, &
170: & ierr)
171: endif
173: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
174: ! Set solution vector and initial timestep
175: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
177: dt = h/two_d0
178: call TSSetInitialTimeStep(ts,0.d0,dt,ierr)
179: call TSSetSolution(ts,u,ierr)
181: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: ! Customize timestepping solver:
183: ! - Set the solution method to be the Backward Euler method.
184: ! - Set timestepping duration info
185: ! Then set runtime options, which can override these defaults.
186: ! For example,
187: ! -ts_max_steps <maxsteps> -ts_max_time <maxtime>
188: ! to override the defaults set by TSSetDuration().
189: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: call TSSetType(ts,TS_BEULER,ierr)
192: call TSSetDuration(ts,time_steps_max,time_total_max,ierr)
193: call TSSetFromOptions(ts,ierr)
195: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
196: ! Solve the problem
197: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: ! Evaluate initial conditions
201: call InitialConditions(u)
203: ! Run the timestepping solver
205: call TSStep(ts,steps,ftime,ierr)
207: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: ! Free work space. All PETSc objects should be destroyed when they
209: ! are no longer needed.
210: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
212: call TSDestroy(ts,ierr)
213: call VecDestroy(localwork,ierr)
214: call VecDestroy(solution,ierr)
215: call VecDestroy(u_local,ierr)
216: call VecDestroy(u,ierr)
217: call DADestroy(da,ierr)
218: call MatDestroy(A,ierr)
220: ! Always call PetscFinalize() before exiting a program. This routine
221: ! - finalizes the PETSc libraries as well as MPI
222: ! - provides summary and diagnostic information if certain runtime
223: ! options are chosen (e.g., -log_summary).
225: call PetscFinalize(ierr)
226: end
228: ! ------------------------------------------------------------------------
229: !
230: ! InitialConditions - Computes the solution at the initial time.
231: !
232: ! Input Parameters:
233: ! u - uninitialized solution vector (global)
234: ! appctx - user-defined application context
235: !
236: ! Output Parameter:
237: ! u - vector with solution at initial time (global)
238: !
239: subroutine InitialConditions(u)
240: implicit none
241: #include "ex2f.h"
243: ! Input/output parameters:
244: Vec u
246: ! Local variables:
247: PetscScalar localptr(1),x
248: PetscInt i,mybase,myend
249: PetscErrorCode ierr
250: PetscOffset idx
252: ! Determine starting and ending point of each processor's range of
253: ! grid values. Note that we shift by 1 to convert from the 0-based
254: ! C convention of starting indices to the 1-based Fortran convention.
256: call VecGetOwnershipRange(u,mybase,myend,ierr)
257: mybase = mybase + 1
259: ! Get a pointer to vector data.
260: ! - For default PETSc vectors, VecGetArray() returns a pointer to
261: ! the data array. Otherwise, the routine is implementation dependent.
262: ! - You MUST call VecRestoreArray() when you no longer need access to
263: ! the array.
264: ! - Note that the Fortran interface to VecGetArray() differs from the
265: ! C version. See the users manual for details.
267: call VecGetArray(u,localptr,idx,ierr)
269: ! We initialize the solution array by simply writing the solution
270: ! directly into the array locations. Alternatively, we could use
271: ! VecSetValues() or VecSetValuesLocal().
273: do 10, i=mybase,myend
274: ! x - current location in global grid
275: x = h*(i-1)
276: localptr(i-mybase+idx+1) = one_d0 + x*x
277: 10 continue
279: ! Restore vector
281: call VecRestoreArray(u,localptr,idx,ierr)
283: ! Print debugging information if desired
284: if (debug .eq. 1) then
285: if (rank .eq. 0) write(6,*) 'initial guess vector'
286: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
287: endif
289: return
290: end
292: ! ------------------------------------------------------------------------
293: !
294: ! ExactSolution - Computes the exact solution at a given time.
295: !
296: ! Input Parameters:
297: ! t - current time
298: ! appctx - user-defined application context
299: !
300: ! Output Parameter:
301: ! ierr - error code
302: !
303: ! Note: The solution vector is stored in the common block!
304: !
305: subroutine ExactSolution(t,ierr)
306: implicit none
307: #include "ex2f.h"
309: ! Input/output parameters:
310: PetscReal t
311: PetscScalar x
312: PetscErrorCode ierr
314: ! Local variables:
315: PetscScalar localptr(1)
316: PetscInt i,mybase,myend
317: PetscOffset idx
319: ! Determine starting and ending point of each processors range of
320: ! grid values. Note that we shift by 1 to convert from the 0-based
321: ! C convention of starting indices to the 1-based Fortran convention.
323: call VecGetOwnershipRange(solution,mybase,myend,ierr)
324: mybase = mybase + 1
326: ! Get a pointer to vector data
327: call VecGetArray(solution,localptr,idx,ierr)
329: ! Simply write the solution directly into the array locations
330: ! Alternatively, we could use VecSetValues() or VecSetValuesLocal().
332: do 10, i=mybase,myend
333: ! x - current location in global grid
334: x = h*(i-1)
335: localptr(i-mybase+idx+1) = (t + one_d0)*(one_d0 + x*x)
336: 10 continue
338: ! Restore vector
340: call VecRestoreArray(solution,localptr,idx,ierr)
342: return
343: end
345: ! ------------------------------------------------------------------------
346: !
347: ! MyMonitor - A user-provided routine to monitor the solution computed at
348: ! each time-step. This example plots the solution and computes the
349: ! error in two different norms.
350: !
351: ! Input Parameters:
352: ! ts - the time-step context
353: ! step - the count of the current step (with 0 meaning the
354: ! initial condition)
355: ! time - the current time
356: ! u - the solution at this timestep
357: ! dummy - optional user-provided context for this monitoring
358: ! routine (not used here)
359: !
360: subroutine MyMonitor(ts,step,time,u,dummy,ierr)
361: implicit none
362: #include "ex2f.h"
364: ! Input/output parameters:
365: TS ts
366: PetscInt step
367: PetscObject dummy
368: PetscReal time
369: Vec u
370: PetscErrorCode ierr
372: ! Local variables:
373: PetscScalar en2,en2s,enmax
374: PetscScalar mone
375: PetscDraw draw
377: mone = -1.0d0
379: ! We use the default X windows viewer
380: ! PETSC_VIEWER_DRAW_WORLD
381: ! that is associated with the PETSC_COMM_WORLD communicator. This
382: ! saves the effort of calling PetscViewerDrawOpen() to create the window.
383: ! Note that if we wished to plot several items in separate windows we
384: ! would create each viewer with PetscViewerDrawOpen() and store them in
385: ! the application context, appctx.
386: !
387: ! Double buffering makes graphics look better.
389: call PetscViewerDrawGetDraw(PETSC_VIEWER_DRAW_WORLD,0,draw,ierr)
390: call PetscDrawSetDoubleBuffer(draw,ierr)
391: call VecView(u,PETSC_VIEWER_DRAW_WORLD,ierr)
393: ! Compute the exact solution at this time-step. Note that the
394: ! solution vector is passed via the user-defined common block.
395: call ExactSolution(time,ierr)
397: ! Print debugging information if desired
398: if (debug .eq. 1) then
399: if (rank .eq. 0) write(6,*) 'Computed solution vector'
400: call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr)
401: if (rank .eq. 0) write(6,*) 'Exact solution vector'
402: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
403: endif
405: ! Compute the 2-norm and max-norm of the error
406: call VecAXPY(solution,mone,u,ierr)
407: call VecNorm(solution,NORM_2,en2,ierr)
409: ! Scale the 2-norm by the grid spacing
410: en2s = dsqrt(h)*en2
411: call VecNorm(solution,NORM_MAX,enmax,ierr)
413: ! Print only from processor 0
414: if (rank .eq. 0) write(6,100) step,time,en2s,enmax
415: 100 format('Timestep = ',i5,',time = ',f8.3, &
416: & ' sec, error [2-norm] = ',e9.3, &
417: & ', error [max-norm] = ',e9.3)
419: ! Print debugging information if desired
420: if (debug .eq. 1) then
421: if (rank .eq. 0) write(6,*) 'Error vector'
422: call VecView(solution,PETSC_VIEWER_STDOUT_WORLD,ierr)
423: endif
425: return
426: end
428: ! ------------------------------------------------------------------------
429: !
430: ! RHSFunction - User-provided routine that evalues the RHS function
431: ! in the ODE. This routine is set in the main program by calling
432: ! TSSetRHSFunction(). We compute:
433: ! global_out = F(global_in)
434: !
435: ! Input Parameters:
436: ! ts - timestep context
437: ! t - current time
438: ! global_in - input vector to function
439: ! dummy - (optional) user-provided context for function evaluation
440: ! (not used here because we use a common block instead)
441: !
442: ! Output Parameter:
443: ! global_out - value of function
444: !
445: subroutine RHSFunction(ts,t,global_in,global_out,dummy)
446: implicit none
447: #include "ex2f.h"
449: ! Input/output parameters:
450: TS ts
451: PetscReal t
452: Vec global_in,global_out
453: integer dummy
455: ! Local variables:
456: PetscScalar localptr(1),copyptr(1),sc
457: PetscErrorCode ierr
458: PetscInt i,localsize
459: PetscOffset idx_c,idx_l,il
461: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
462: ! Get ready for local function computations
463: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
465: ! Scatter ghost points to local vector, using the 2-step process
466: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
467: ! By placing code between these two statements, computations can be
468: ! done while messages are in transition.
470: call DAGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
471: call DAGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
473: ! Access directly the values in our local INPUT work vector
474: call VecGetArray(u_local,localptr,idx_l,ierr)
476: ! Access directly the values in our local OUTPUT work vector
477: call VecGetArray(localwork,copyptr,idx_c,ierr)
479: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
481: ! Evaluate our function on the nodes owned by this processor
482: call VecGetLocalSize(u_local,localsize,ierr)
484: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
485: ! Compute entries for the locally owned part
486: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
488: ! Handle boundary conditions: This is done by using the boundary condition
489: ! u(t,boundary) = g(t,boundary)
490: ! for some function g. Now take the derivative with respect to t to obtain
491: ! u_{t}(t,boundary) = g_{t}(t,boundary)
492: !
493: ! In our case, u(t,0) = t + 1, so that u_{t}(t,0) = 1
494: ! and u(t,1) = 2t+ 1, so that u_{t}(t,1) = 2
496: if (rank .eq. 0) copyptr(1+idx_c) = one_d0
497: if (rank .eq. size-1) copyptr(localsize+idx_c) = two_d0
499: ! Handle the interior nodes where the PDE is replace by finite
500: ! difference operators.
502: do 10, i=2,localsize-1
503: il = i + idx_l
504: copyptr(i+idx_c) = localptr(il) * sc * &
505: & (localptr(il+1) + localptr(il-1) - two_d0*localptr(il))
506: 10 continue
508: call VecRestoreArray(u_local,localptr,idx_l,ierr)
509: call VecRestoreArray(localwork,copyptr,idx_c,ierr)
511: ! Return the values from our local OUTPUT vector into our global
512: ! output vector.
514: call DALocalToGlobal(da,localwork,INSERT_VALUES,global_out,ierr)
516: ! Print debugging information if desired
517: if (debug .eq. 1) then
518: if (rank .eq. 0) write(6,*) 'RHS function vector'
519: call VecView(global_out,PETSC_VIEWER_STDOUT_WORLD,ierr)
520: endif
522: return
523: end
525: ! ------------------------------------------------------------------------
526: !
527: ! RHSJacobian - User-provided routine to compute the Jacobian of the
528: ! right-hand-side function.
529: !
530: ! Input Parameters:
531: ! ts - the TS context
532: ! t - current time
533: ! global_in - global input vector
534: ! dummy - optional user-defined context, as set by TSetRHSJacobian()
535: !
536: ! Output Parameters:
537: ! A - Jacobian matrix
538: ! B - optionally different preconditioning matrix
539: ! str - flag indicating matrix structure
540: !
541: ! Notes:
542: ! RHSJacobian computes entries for the locally owned part of the Jacobian.
543: ! - Currently, all PETSc parallel matrix formats are partitioned by
544: ! contiguous chunks of rows across the processors. The "grow"
545: ! parameter computed below specifies the global row number
546: ! corresponding to each local grid point.
547: ! - Each processor needs to insert only elements that it owns
548: ! locally (but any non-local elements will be sent to the
549: ! appropriate processor during matrix assembly).
550: ! - Always specify global row and columns of matrix entries.
551: ! - Here, we set all entries for a particular row at once.
552: ! - Note that MatSetValues() uses 0-based row and column numbers
553: ! in Fortran as well as in C.
555: subroutine RHSJacobian(ts,t,global_in,A,B,str,dummy)
556: implicit none
557: #include "ex2f.h"
559: ! Input/output parameters:
560: TS ts
561: PetscReal t
562: Vec global_in
563: Mat A,B
564: MatStructure str
565: integer dummy
567: ! Local variables:
568: PetscScalar localptr(1),sc,v(3)
569: PetscInt i,col(3),i1,i3
570: PetscErrorCode ierr
571: PetscInt mstart(1),mend(1)
572: PetscInt mstarts,mends
573: PetscOffset idx,is
575: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
576: ! Get ready for local Jacobian computations
577: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
579: ! Scatter ghost points to local vector, using the 2-step process
580: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
581: ! By placing code between these two statements, computations can be
582: ! done while messages are in transition.
584: call DAGlobalToLocalBegin(da,global_in,INSERT_VALUES,u_local,ierr)
585: call DAGlobalToLocalEnd(da,global_in,INSERT_VALUES,u_local,ierr)
587: ! Get pointer to vector data
588: call VecGetArray(u_local,localptr,idx,ierr)
590: ! Get starting and ending locally owned rows of the matrix
591: call MatGetOwnershipRange(A,mstarts,mends,ierr)
592: mstart(1) = mstarts
593: mend(1) = mends
595: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
596: ! Compute entries for the locally owned part of the Jacobian.
597: ! - Currently, all PETSc parallel matrix formats are partitioned by
598: ! contiguous chunks of rows across the processors.
599: ! - Each processor needs to insert only elements that it owns
600: ! locally (but any non-local elements will be sent to the
601: ! appropriate processor during matrix assembly).
602: ! - Here, we set all entries for a particular row at once.
603: ! - We can set matrix entries either using either
604: ! MatSetValuesLocal() or MatSetValues().
605: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
607: ! Set matrix rows corresponding to boundary data
608: i1 = 1
609: i3 = 3
610: if (mstart(1) .eq. 0) then
611: v(1) = zero_d0
612: call MatSetValues(A,i1,mstart,i1,mstart,v,INSERT_VALUES,ierr)
613: mstart(1) = mstart(1) + 1
614: endif
615: if (mend(1) .eq. m) then
616: mend(1) = mend(1) - 1
617: v(1) = zero_d0
618: call MatSetValues(A,i1,mend,i1,mend,v,INSERT_VALUES,ierr)
619: endif
621: ! Set matrix rows corresponding to interior data.
622: ! We construct the matrix one row at a time.
624: sc = one_d0/(h*h*two_d0*(one_d0+t)*(one_d0+t))
625: do 10, i=mstart(1),mend(1)-1
626: col(1) = i-1
627: col(2) = i
628: col(3) = i+1
629: is = i - mstart(1) + 1 +idx + 1
630: v(1) = sc*localptr(is)
631: v(2) = sc*(localptr(is+1) + localptr(is-1) - &
632: & four_d0*localptr(is))
633: v(3) = sc*localptr(is)
634: call MatSetValues(A,i1,i,i3,col,v,INSERT_VALUES,ierr)
635: 10 continue
637: ! Restore vector
638: call VecRestoreArray(u_local,localptr,idx,ierr)
640: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
641: ! Complete the matrix assembly process and set some options
642: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
644: ! Assemble matrix, using the 2-step process:
645: ! MatAssemblyBegin(), MatAssemblyEnd()
646: ! Computations can be done while messages are in transition,
647: ! by placing code between these two statements.
649: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
650: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
652: ! Set flag to indicate that the Jacobian matrix retains an identical
653: ! nonzero structure throughout all timestepping iterations (although the
654: ! values of the entries change). Thus, we can save some work in setting
655: ! up the preconditioner (e.g., no need to redo symbolic factorization for
656: ! ILU/ICC preconditioners).
657: ! - If the nonzero structure of the matrix is different during
658: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
659: ! must be used instead. If you are unsure whether the matrix
660: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
661: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
662: ! believes your assertion and does not check the structure
663: ! of the matrix. If you erroneously claim that the structure
664: ! is the same when it actually is not, the new preconditioner
665: ! will not function correctly. Thus, use this optimization
666: ! feature with caution!
668: str = SAME_NONZERO_PATTERN
670: ! Set and option to indicate that we will never add a new nonzero location
671: ! to the matrix. If we do, it will generate an error.
673: call MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,ierr)
675: return
676: end