Actual source code: ex96.c

  2: static char help[] ="Tests sequential and parallel DAGetMatrix(), MatMatMult() and MatPtAP()\n\
  3:   -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
  4:   -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
  5:   -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
  6:   -Npx <npx>, where <npx> = number of processors in the x-direction\n\
  7:   -Npy <npy>, where <npy> = number of processors in the y-direction\n\
  8:   -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";

 10: /*  
 11:     This test is modified from ~src/ksp/examples/tests/ex19.c. 
 12:     Example of usage: mpirun -np 3 ex96 -Mx 10 -My 10 -Mz 10
 13: */

 15:  #include petscksp.h
 16:  #include petscda.h
 17:  #include petscmg.h
 18:  #include src/mat/impls/aij/seq/aij.h
 19:  #include src/mat/impls/aij/mpi/mpiaij.h

 21: /* User-defined application contexts */
 22: typedef struct {
 23:    PetscInt   mx,my,mz;         /* number grid points in x, y and z direction */
 24:    Vec        localX,localF;    /* local vectors with ghost region */
 25:    DA         da;
 26:    Vec        x,b,r;            /* global vectors */
 27:    Mat        J;                /* Jacobian on grid */
 28: } GridCtx;
 29: typedef struct {
 30:    GridCtx     fine;
 31:    GridCtx     coarse;
 32:    KSP         ksp_coarse;
 33:    PetscInt    ratio;
 34:    Mat         Ii;              /* interpolation from coarse to fine */
 35: } AppCtx;

 37: #define COARSE_LEVEL 0
 38: #define FINE_LEVEL   1

 40: /*
 41:       Mm_ratio - ration of grid lines between fine and coarse grids.
 42: */
 45: int main(int argc,char **argv)
 46: {
 48:   AppCtx         user;
 49:   PetscInt       Npx=PETSC_DECIDE,Npy=PETSC_DECIDE,Npz=PETSC_DECIDE;
 50:   PetscMPIInt    size,rank;
 51:   PetscInt       m,n,M,N,i,nrows,*ia,*ja;
 52:   PetscScalar    one = 1.0;
 53:   PetscReal      fill=2.0;
 54:   Mat            A,A_tmp,P,C,C1,C2;
 55:   PetscScalar    *array,none = -1.0,alpha;
 56:   Vec           x,v1,v2,v3,v4;
 57:   PetscReal     norm,norm_tmp,norm_tmp1,tol=1.e-12;
 58:   PetscRandom   rdm;
 59:   PetscTruth    Test_MatMatMult=PETSC_TRUE,Test_MatPtAP=PETSC_TRUE,Test_3D=PETSC_FALSE,flg;

 61:   PetscInitialize(&argc,&argv,PETSC_NULL,help);
 62:   PetscOptionsGetReal(PETSC_NULL,"-tol",&tol,PETSC_NULL);

 64:   user.ratio = 2;
 65:   user.coarse.mx = 2; user.coarse.my = 2; user.coarse.mz = 0;
 66:   PetscOptionsGetInt(PETSC_NULL,"-Mx",&user.coarse.mx,PETSC_NULL);
 67:   PetscOptionsGetInt(PETSC_NULL,"-My",&user.coarse.my,PETSC_NULL);
 68:   PetscOptionsGetInt(PETSC_NULL,"-Mz",&user.coarse.mz,PETSC_NULL);
 69:   PetscOptionsGetInt(PETSC_NULL,"-ratio",&user.ratio,PETSC_NULL);
 70:   if (user.coarse.mz) Test_3D = PETSC_TRUE;

 72:   user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
 73:   user.fine.my = user.ratio*(user.coarse.my-1)+1;
 74:   user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
 75:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 76:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 77:   PetscOptionsGetInt(PETSC_NULL,"-Npx",&Npx,PETSC_NULL);
 78:   PetscOptionsGetInt(PETSC_NULL,"-Npy",&Npy,PETSC_NULL);
 79:   PetscOptionsGetInt(PETSC_NULL,"-Npz",&Npz,PETSC_NULL);

 81:   /* Set up distributed array for fine grid */
 82:   if (!Test_3D){
 83:     DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_STAR,user.fine.mx,
 84:                     user.fine.my,Npx,Npy,1,1,PETSC_NULL,PETSC_NULL,&user.fine.da);
 85:   } else {
 86:     DACreate3d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_STAR,
 87:                     user.fine.mx,user.fine.my,user.fine.mz,Npx,Npy,Npz,
 88:                     1,1,PETSC_NULL,PETSC_NULL,PETSC_NULL,&user.fine.da);
 89:   }

 91:   /* Test DAGetMatrix()                                         */
 92:   /*------------------------------------------------------------*/
 93:   DAGetMatrix(user.fine.da,MATAIJ,&A);
 94:   DAGetMatrix(user.fine.da,MATBAIJ,&C);
 95: 
 96:   MatConvert(C,MATAIJ,MAT_INITIAL_MATRIX,&A_tmp); /* not work for mpisbaij matrix! */
 97:   MatEqual(A,A_tmp,&flg);
 98:   if (!flg) {
 99:     SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"A != C");
100:   }
101:   MatDestroy(C);
102:   MatDestroy(A_tmp);
103: 
104:   /*------------------------------------------------------------*/
105: 
106:   MatGetLocalSize(A,&m,&n);
107:   MatGetSize(A,&M,&N);
108:   /* set val=one to A */
109:   if (size == 1){
110:     MatGetRowIJ(A,0,PETSC_FALSE,&nrows,&ia,&ja,&flg);
111:     if (flg){
112:       MatGetArray(A,&array);
113:       for (i=0; i<ia[nrows]; i++) array[i] = one;
114:       MatRestoreArray(A,&array);
115:     }
116:     MatRestoreRowIJ(A,0,PETSC_FALSE,&nrows,&ia,&ja,&flg);
117:   } else {
118:     Mat_MPIAIJ *aij = (Mat_MPIAIJ*)A->data;
119:     Mat_SeqAIJ *a=(Mat_SeqAIJ*)(aij->A)->data, *b=(Mat_SeqAIJ*)(aij->B)->data;
120:     /* A_part */
121:     for (i=0; i<a->i[m]; i++) a->a[i] = one;
122:     /* B_part */
123:     for (i=0; i<b->i[m]; i++) b->a[i] = one;
124: 
125:   }
126:   /* MatView(A, PETSC_VIEWER_STDOUT_WORLD); */

128:   /* Set up distributed array for coarse grid */
129:   if (!Test_3D){
130:     DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_STAR,user.coarse.mx,
131:                     user.coarse.my,Npx,Npy,1,1,PETSC_NULL,PETSC_NULL,&user.coarse.da);
132:   } else {
133:     DACreate3d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_STAR,
134:                     user.coarse.mx,user.coarse.my,user.coarse.mz,Npx,Npy,Npz,
135:                     1,1,PETSC_NULL,PETSC_NULL,PETSC_NULL,&user.coarse.da);
136:   }

138:   /* Create interpolation between the levels */
139:   DAGetInterpolation(user.coarse.da,user.fine.da,&P,PETSC_NULL);
140: 
141:   MatGetLocalSize(P,&m,&n);
142:   MatGetSize(P,&M,&N);

144:   /* Create vectors v1 and v2 that are compatible with A */
145:   VecCreate(PETSC_COMM_WORLD,&v1);
146:   MatGetLocalSize(A,&m,PETSC_NULL);
147:   VecSetSizes(v1,m,PETSC_DECIDE);
148:   VecSetFromOptions(v1);
149:   VecDuplicate(v1,&v2);
150:   PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
151:   PetscRandomSetFromOptions(rdm);

153:   /* Test MatMatMult(): C = A*P */
154:   /*----------------------------*/
155:   if (Test_MatMatMult){
156:     MatDuplicate(A,MAT_COPY_VALUES,&A_tmp);
157:     MatMatMult(A_tmp,P,MAT_INITIAL_MATRIX,fill,&C);
158: 
159:     /* Test MAT_REUSE_MATRIX - reuse symbolic C */
160:     alpha=1.0;
161:     for (i=0; i<2; i++){
162:       alpha -=0.1;
163:       MatScale(A_tmp,alpha);
164:       MatMatMult(A_tmp,P,MAT_REUSE_MATRIX,fill,&C);
165:     }

167:     /* Test MatDuplicate()        */
168:     /*----------------------------*/
169:     MatDuplicate(C,MAT_COPY_VALUES,&C1);
170:     MatDuplicate(C1,MAT_COPY_VALUES,&C2);
171:     MatDestroy(C1);
172:     MatDestroy(C2);

174:     /* Create vector x that is compatible with P */
175:     VecCreate(PETSC_COMM_WORLD,&x);
176:     MatGetLocalSize(P,PETSC_NULL,&n);
177:     VecSetSizes(x,n,PETSC_DECIDE);
178:     VecSetFromOptions(x);

180:     norm = 0.0;
181:     for (i=0; i<10; i++) {
182:       VecSetRandom(x,rdm);
183:       MatMult(P,x,v1);
184:       MatMult(A_tmp,v1,v2);  /* v2 = A*P*x */
185:       MatMult(C,x,v1);       /* v1 = C*x   */
186:       VecAXPY(v1,none,v2);
187:       VecNorm(v1,NORM_1,&norm_tmp);
188:       VecNorm(v2,NORM_1,&norm_tmp1);
189:       norm_tmp /= norm_tmp1;
190:       if (norm_tmp > norm) norm = norm_tmp;
191:     }
192:     if (norm >= tol && !rank) {
193:       PetscPrintf(PETSC_COMM_SELF,"Error: MatMatMult(), |v1 - v2|/|v2|: %G\n",norm);
194:     }
195: 
196:     VecDestroy(x);
197:     MatDestroy(C);
198:     MatDestroy(A_tmp);
199:   }

201:   /* Test P^T * A * P - MatPtAP() */
202:   /*------------------------------*/
203:   if (Test_MatPtAP){
204:     MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&C);
205:     MatGetLocalSize(C,&m,&n);
206: 
207:     /* Test MAT_REUSE_MATRIX - reuse symbolic C */
208:     alpha=1.0;
209:     for (i=0; i<1; i++){
210:       alpha -=0.1;
211:       MatScale(A,alpha);
212:       MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&C);
213:     }

215:         /* Test MatDuplicate()        */
216:     /*----------------------------*/
217:     MatDuplicate(C,MAT_COPY_VALUES,&C1);
218:     MatDuplicate(C1,MAT_COPY_VALUES,&C2);
219:     MatDestroy(C1);
220:     MatDestroy(C2);

222:     /* Create vector x that is compatible with P */
223:     VecCreate(PETSC_COMM_WORLD,&x);
224:     MatGetLocalSize(P,&m,&n);
225:     VecSetSizes(x,n,PETSC_DECIDE);
226:     VecSetFromOptions(x);
227: 
228:     VecCreate(PETSC_COMM_WORLD,&v3);
229:     VecSetSizes(v3,n,PETSC_DECIDE);
230:     VecSetFromOptions(v3);
231:     VecDuplicate(v3,&v4);

233:     norm = 0.0;
234:     for (i=0; i<10; i++) {
235:       VecSetRandom(x,rdm);
236:       MatMult(P,x,v1);
237:       MatMult(A,v1,v2);  /* v2 = A*P*x */

239:       MatMultTranspose(P,v2,v3); /* v3 = Pt*A*P*x */
240:       MatMult(C,x,v4);           /* v3 = C*x   */
241:       VecAXPY(v4,none,v3);
242:       VecNorm(v4,NORM_1,&norm_tmp);
243:       VecNorm(v3,NORM_1,&norm_tmp1);
244:       norm_tmp /= norm_tmp1;
245:       if (norm_tmp > norm) norm = norm_tmp;
246:     }
247:     if (norm >= tol && !rank) {
248:       PetscPrintf(PETSC_COMM_SELF,"Error: MatPtAP(), |v3 - v4|/|v3|: %G\n",norm);
249:     }
250: 
251:     MatDestroy(C);
252:     VecDestroy(v3);
253:     VecDestroy(v4);
254:     VecDestroy(x);
255: 
256:   }

258:   /* Clean up */
259:    MatDestroy(A);
260:   PetscRandomDestroy(rdm);
261:   VecDestroy(v1);
262:   VecDestroy(v2);
263:   DADestroy(user.fine.da);
264:   DADestroy(user.coarse.da);
265:   MatDestroy(P);

267:   PetscFinalize();

269:   return 0;
270: }