Actual source code: ex4.c
2: /* Program usage: mpirun -np <procs> ex4 [-help] [all PETSc options] */
4: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
5: Input parameters include:\n\
6: -m <points>, where <points> = number of grid points\n\
7: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
8: -debug : Activate debugging printouts\n\
9: -nox : Deactivate x-window graphics\n\n";
11: /*
12: Concepts: TS^time-dependent linear problems
13: Concepts: TS^heat equation
14: Concepts: TS^diffusion equation
15: Processors: n
16: */
18: /* ------------------------------------------------------------------------
20: This program solves the one-dimensional heat equation (also called the
21: diffusion equation),
22: u_t = u_xx,
23: on the domain 0 <= x <= 1, with the boundary conditions
24: u(t,0) = 0, u(t,1) = 0,
25: and the initial condition
26: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
27: This is a linear, second-order, parabolic equation.
29: We discretize the right-hand side using finite differences with
30: uniform grid spacing h:
31: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
32: We then demonstrate time evolution using the various TS methods by
33: running the program via
34: mpirun -np <procs> ex3 -ts_type <timestepping solver>
36: We compare the approximate solution with the exact solution, given by
37: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
38: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
40: Notes:
41: This code demonstrates the TS solver interface to two variants of
42: linear problems, u_t = f(u,t), namely
43: - time-dependent f: f(u,t) is a function of t
44: - time-independent f: f(u,t) is simply f(u)
46: The uniprocessor version of this code is ts/examples/tutorials/ex3.c
48: ------------------------------------------------------------------------- */
50: /*
51: Include "petscda.h" so that we can use distributed arrays (DAs) to manage
52: the parallel grid. Include "petscts.h" so that we can use TS solvers.
53: Note that this file automatically includes:
54: petsc.h - base PETSc routines petscvec.h - vectors
55: petscsys.h - system routines petscmat.h - matrices
56: petscis.h - index sets petscksp.h - Krylov subspace methods
57: petscviewer.h - viewers petscpc.h - preconditioners
58: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
59: */
61: #include petscda.h
62: #include petscts.h
64: /*
65: User-defined application context - contains data needed by the
66: application-provided call-back routines.
67: */
68: typedef struct {
69: MPI_Comm comm; /* communicator */
70: DA da; /* distributed array data structure */
71: Vec localwork; /* local ghosted work vector */
72: Vec u_local; /* local ghosted approximate solution vector */
73: Vec solution; /* global exact solution vector */
74: PetscInt m; /* total number of grid points */
75: PetscReal h; /* mesh width h = 1/(m-1) */
76: PetscTruth debug; /* flag (1 indicates activation of debugging printouts) */
77: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
78: PetscReal norm_2,norm_max; /* error norms */
79: } AppCtx;
81: /*
82: User-defined routines
83: */
91: int main(int argc,char **argv)
92: {
93: AppCtx appctx; /* user-defined application context */
94: TS ts; /* timestepping context */
95: Mat A; /* matrix data structure */
96: Vec u; /* approximate solution vector */
97: PetscReal time_total_max = 100.0; /* default max total time */
98: PetscInt time_steps_max = 100; /* default max timesteps */
99: PetscDraw draw; /* drawing context */
101: PetscInt steps,m;
102: PetscMPIInt size;
103: PetscReal dt,ftime;
104: PetscTruth flg;
106: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
107: Initialize program and set problem parameters
108: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
109:
110: PetscInitialize(&argc,&argv,(char*)0,help);
111: appctx.comm = PETSC_COMM_WORLD;
113: m = 60;
114: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
115: PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
116: appctx.m = m;
117: appctx.h = 1.0/(m-1.0);
118: appctx.norm_2 = 0.0;
119: appctx.norm_max = 0.0;
120: MPI_Comm_size(PETSC_COMM_WORLD,&size);
121: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size);
123: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
124: Create vector data structures
125: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
126: /*
127: Create distributed array (DA) to manage parallel grid and vectors
128: and to set up the ghost point communication pattern. There are M
129: total grid values spread equally among all the processors.
130: */
132: DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,1,1,PETSC_NULL,&appctx.da);
134: /*
135: Extract global and local vectors from DA; we use these to store the
136: approximate solution. Then duplicate these for remaining vectors that
137: have the same types.
138: */
139: DACreateGlobalVector(appctx.da,&u);
140: DACreateLocalVector(appctx.da,&appctx.u_local);
142: /*
143: Create local work vector for use in evaluating right-hand-side function;
144: create global work vector for storing exact solution.
145: */
146: VecDuplicate(appctx.u_local,&appctx.localwork);
147: VecDuplicate(u,&appctx.solution);
149: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
150: Set up displays to show graphs of the solution and error
151: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
153: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
154: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
155: PetscDrawSetDoubleBuffer(draw);
156: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
157: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
158: PetscDrawSetDoubleBuffer(draw);
160: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: Create timestepping solver context
162: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
164: TSCreate(PETSC_COMM_WORLD,&ts);
165: TSSetProblemType(ts,TS_LINEAR);
167: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: Set optional user-defined monitoring routine
169: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
171: TSMonitorSet(ts,Monitor,&appctx,PETSC_NULL);
173: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: Create matrix data structure; set matrix evaluation routine.
176: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
178: MatCreate(PETSC_COMM_WORLD,&A);
179: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m);
180: MatSetFromOptions(A);
182: PetscOptionsHasName(PETSC_NULL,"-time_dependent_rhs",&flg);
183: if (flg) {
184: /*
185: For linear problems with a time-dependent f(u,t) in the equation
186: u_t = f(u,t), the user provides the discretized right-hand-side
187: as a time-dependent matrix.
188: */
189: TSSetRHSMatrix(ts,A,A,RHSMatrixHeat,&appctx);
190: } else {
191: /*
192: For linear problems with a time-independent f(u) in the equation
193: u_t = f(u), the user provides the discretized right-hand-side
194: as a matrix only once, and then sets a null matrix evaluation
195: routine.
196: */
197: MatStructure A_structure;
198: RHSMatrixHeat(ts,0.0,&A,&A,&A_structure,&appctx);
199: TSSetRHSMatrix(ts,A,A,PETSC_NULL,&appctx);
200: }
202: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203: Set solution vector and initial timestep
204: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
206: dt = appctx.h*appctx.h/2.0;
207: TSSetInitialTimeStep(ts,0.0,dt);
208: TSSetSolution(ts,u);
210: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211: Customize timestepping solver:
212: - Set the solution method to be the Backward Euler method.
213: - Set timestepping duration info
214: Then set runtime options, which can override these defaults.
215: For example,
216: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
217: to override the defaults set by TSSetDuration().
218: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
220: TSSetDuration(ts,time_steps_max,time_total_max);
221: TSSetFromOptions(ts);
223: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: Solve the problem
225: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
227: /*
228: Evaluate initial conditions
229: */
230: InitialConditions(u,&appctx);
232: /*
233: Run the timestepping solver
234: */
235: TSStep(ts,&steps,&ftime);
237: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
238: View timestepping solver info
239: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
241: PetscPrintf(PETSC_COMM_WORLD,"avg. error (2 norm) = %G, avg. error (max norm) = %G\n",
242: appctx.norm_2/steps,appctx.norm_max/steps);
243: TSView(ts,PETSC_VIEWER_STDOUT_WORLD);
245: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
246: Free work space. All PETSc objects should be destroyed when they
247: are no longer needed.
248: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
250: TSDestroy(ts);
251: MatDestroy(A);
252: VecDestroy(u);
253: PetscViewerDestroy(appctx.viewer1);
254: PetscViewerDestroy(appctx.viewer2);
255: VecDestroy(appctx.localwork);
256: VecDestroy(appctx.solution);
257: VecDestroy(appctx.u_local);
258: DADestroy(appctx.da);
260: /*
261: Always call PetscFinalize() before exiting a program. This routine
262: - finalizes the PETSc libraries as well as MPI
263: - provides summary and diagnostic information if certain runtime
264: options are chosen (e.g., -log_summary).
265: */
266: PetscFinalize();
267: return 0;
268: }
269: /* --------------------------------------------------------------------- */
272: /*
273: InitialConditions - Computes the solution at the initial time.
275: Input Parameter:
276: u - uninitialized solution vector (global)
277: appctx - user-defined application context
279: Output Parameter:
280: u - vector with solution at initial time (global)
281: */
282: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
283: {
284: PetscScalar *u_localptr,h = appctx->h;
285: PetscInt i,mybase,myend;
288: /*
289: Determine starting point of each processor's range of
290: grid values.
291: */
292: VecGetOwnershipRange(u,&mybase,&myend);
294: /*
295: Get a pointer to vector data.
296: - For default PETSc vectors, VecGetArray() returns a pointer to
297: the data array. Otherwise, the routine is implementation dependent.
298: - You MUST call VecRestoreArray() when you no longer need access to
299: the array.
300: - Note that the Fortran interface to VecGetArray() differs from the
301: C version. See the users manual for details.
302: */
303: VecGetArray(u,&u_localptr);
305: /*
306: We initialize the solution array by simply writing the solution
307: directly into the array locations. Alternatively, we could use
308: VecSetValues() or VecSetValuesLocal().
309: */
310: for (i=mybase; i<myend; i++) {
311: u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
312: }
314: /*
315: Restore vector
316: */
317: VecRestoreArray(u,&u_localptr);
319: /*
320: Print debugging information if desired
321: */
322: if (appctx->debug) {
323: PetscPrintf(appctx->comm,"initial guess vector\n");
324: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
325: }
327: return 0;
328: }
329: /* --------------------------------------------------------------------- */
332: /*
333: ExactSolution - Computes the exact solution at a given time.
335: Input Parameters:
336: t - current time
337: solution - vector in which exact solution will be computed
338: appctx - user-defined application context
340: Output Parameter:
341: solution - vector with the newly computed exact solution
342: */
343: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
344: {
345: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
346: PetscInt i,mybase,myend;
349: /*
350: Determine starting and ending points of each processor's
351: range of grid values
352: */
353: VecGetOwnershipRange(solution,&mybase,&myend);
355: /*
356: Get a pointer to vector data.
357: */
358: VecGetArray(solution,&s_localptr);
360: /*
361: Simply write the solution directly into the array locations.
362: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
363: */
364: ex1 = exp(-36.*PETSC_PI*PETSC_PI*t); ex2 = exp(-4.*PETSC_PI*PETSC_PI*t);
365: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
366: for (i=mybase; i<myend; i++) {
367: s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
368: }
370: /*
371: Restore vector
372: */
373: VecRestoreArray(solution,&s_localptr);
374: return 0;
375: }
376: /* --------------------------------------------------------------------- */
379: /*
380: Monitor - User-provided routine to monitor the solution computed at
381: each timestep. This example plots the solution and computes the
382: error in two different norms.
384: Input Parameters:
385: ts - the timestep context
386: step - the count of the current step (with 0 meaning the
387: initial condition)
388: time - the current time
389: u - the solution at this timestep
390: ctx - the user-provided context for this monitoring routine.
391: In this case we use the application context which contains
392: information about the problem size, workspace and the exact
393: solution.
394: */
395: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
396: {
397: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
399: PetscReal norm_2,norm_max;
401: /*
402: View a graph of the current iterate
403: */
404: VecView(u,appctx->viewer2);
406: /*
407: Compute the exact solution
408: */
409: ExactSolution(time,appctx->solution,appctx);
411: /*
412: Print debugging information if desired
413: */
414: if (appctx->debug) {
415: PetscPrintf(appctx->comm,"Computed solution vector\n");
416: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
417: PetscPrintf(appctx->comm,"Exact solution vector\n");
418: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
419: }
421: /*
422: Compute the 2-norm and max-norm of the error
423: */
424: VecAXPY(appctx->solution,-1.0,u);
425: VecNorm(appctx->solution,NORM_2,&norm_2);
426: norm_2 = sqrt(appctx->h)*norm_2;
427: VecNorm(appctx->solution,NORM_MAX,&norm_max);
429: /*
430: PetscPrintf() causes only the first processor in this
431: communicator to print the timestep information.
432: */
433: PetscPrintf(appctx->comm,"Timestep %D: time = %G, 2-norm error = %G, max norm error = %G\n",
434: step,time,norm_2,norm_max);
435: appctx->norm_2 += norm_2;
436: appctx->norm_max += norm_max;
438: /*
439: View a graph of the error
440: */
441: VecView(appctx->solution,appctx->viewer1);
443: /*
444: Print debugging information if desired
445: */
446: if (appctx->debug) {
447: PetscPrintf(appctx->comm,"Error vector\n");
448: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
449: }
451: return 0;
452: }
453: /* --------------------------------------------------------------------- */
456: /*
457: RHSMatrixHeat - User-provided routine to compute the right-hand-side
458: matrix for the heat equation.
460: Input Parameters:
461: ts - the TS context
462: t - current time
463: global_in - global input vector
464: dummy - optional user-defined context, as set by TSetRHSJacobian()
466: Output Parameters:
467: AA - Jacobian matrix
468: BB - optionally different preconditioning matrix
469: str - flag indicating matrix structure
471: Notes:
472: RHSMatrixHeat computes entries for the locally owned part of the system.
473: - Currently, all PETSc parallel matrix formats are partitioned by
474: contiguous chunks of rows across the processors.
475: - Each processor needs to insert only elements that it owns
476: locally (but any non-local elements will be sent to the
477: appropriate processor during matrix assembly).
478: - Always specify global row and columns of matrix entries when
479: using MatSetValues(); we could alternatively use MatSetValuesLocal().
480: - Here, we set all entries for a particular row at once.
481: - Note that MatSetValues() uses 0-based row and column numbers
482: in Fortran as well as in C.
483: */
484: PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
485: {
486: Mat A = *AA; /* Jacobian matrix */
487: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
489: PetscInt i,mstart,mend,idx[3];
490: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
492: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
493: Compute entries for the locally owned part of the matrix
494: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
496: MatGetOwnershipRange(A,&mstart,&mend);
498: /*
499: Set matrix rows corresponding to boundary data
500: */
502: if (mstart == 0) { /* first processor only */
503: v[0] = 1.0;
504: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
505: mstart++;
506: }
508: if (mend == appctx->m) { /* last processor only */
509: mend--;
510: v[0] = 1.0;
511: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
512: }
514: /*
515: Set matrix rows corresponding to interior data. We construct the
516: matrix one row at a time.
517: */
518: v[0] = sone; v[1] = stwo; v[2] = sone;
519: for (i=mstart; i<mend; i++) {
520: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
521: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
522: }
524: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
525: Complete the matrix assembly process and set some options
526: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
527: /*
528: Assemble matrix, using the 2-step process:
529: MatAssemblyBegin(), MatAssemblyEnd()
530: Computations can be done while messages are in transition
531: by placing code between these two statements.
532: */
533: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
534: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
536: /*
537: Set flag to indicate that the Jacobian matrix retains an identical
538: nonzero structure throughout all timestepping iterations (although the
539: values of the entries change). Thus, we can save some work in setting
540: up the preconditioner (e.g., no need to redo symbolic factorization for
541: ILU/ICC preconditioners).
542: - If the nonzero structure of the matrix is different during
543: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
544: must be used instead. If you are unsure whether the matrix
545: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
546: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
547: believes your assertion and does not check the structure
548: of the matrix. If you erroneously claim that the structure
549: is the same when it actually is not, the new preconditioner
550: will not function correctly. Thus, use this optimization
551: feature with caution!
552: */
553: *str = SAME_NONZERO_PATTERN;
555: /*
556: Set and option to indicate that we will never add a new nonzero location
557: to the matrix. If we do, it will generate an error.
558: */
559: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR);
561: return 0;
562: }