Actual source code: ex40.c

  2: static char help[] = "Tests the parallel case for MatIncreaseOverlap(). Input arguments are:\n\
  3:   -f <input_file> : file to load.  For a 5X5 example of the 5-pt. stencil,\n\
  4:                        use the file petsc/src/mat/examples/matbinary.ex\n\
  5:   -nd <size>      : > 0  number of domains per processor \n\
  6:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

 8:  #include petscksp.h

 12: int main(int argc,char **args)
 13: {
 15:   PetscInt       nd = 2,ov=1,i,start,m,n,end,lsize;
 16:   PetscMPIInt    rank;
 17:   PetscTruth     flg;
 18:   Mat            A,B;
 19:   char           file[PETSC_MAX_PATH_LEN];
 20:   PetscViewer    fd;
 21:   IS             *is1,*is2;
 22:   PetscRandom    r;
 23:   PetscScalar    rand;

 25:   PetscInitialize(&argc,&args,(char *)0,help);
 26: #if defined(PETSC_USE_COMPLEX)
 27:   SETERRQ(1,"This example does not work with complex numbers");
 28: #else
 29: 
 30:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 31:   PetscOptionsGetString(PETSC_NULL,"-f",file,PETSC_MAX_PATH_LEN-1,&flg);
 32:   if (!flg) SETERRQ(1,"Must use -f filename to indicate a file containing a PETSc binary matrix");
 33:   PetscOptionsGetInt(PETSC_NULL,"-nd",&nd,PETSC_NULL);
 34:   PetscOptionsGetInt(PETSC_NULL,"-ov",&ov,PETSC_NULL);

 36:   /* Read matrix and RHS */
 37:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
 38:   MatLoad(fd,MATMPIAIJ,&A);
 39:   PetscViewerDestroy(fd);

 41:   /* Read the matrix again as a sequential matrix */
 42:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
 43:   MatLoad(fd,MATSEQAIJ,&B);
 44:   PetscViewerDestroy(fd);
 45: 
 46:   /* Create the IS corresponding to subdomains */
 47:   PetscMalloc(nd*sizeof(IS **),&is1);
 48:   PetscMalloc(nd*sizeof(IS **),&is2);

 50:   /* Create the random Index Sets */
 51:   MatGetSize(A,&m,&n);
 52:   PetscRandomCreate(PETSC_COMM_SELF,&r);
 53:   PetscRandomSetFromOptions(r);
 54:   for (i=0; i<nd; i++) {
 55:     PetscRandomGetValue(r,&rand);
 56:     start = (PetscInt)(rand*m);
 57:     PetscRandomGetValue(r,&rand);
 58:     end   = (PetscInt)(rand*m);
 59:     lsize =  end - start;
 60:     if (start > end) { start = end; lsize = -lsize ;}
 61:     ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is1+i);
 62:     ISCreateStride(PETSC_COMM_SELF,lsize,start,1,is2+i);
 63:   }
 64:   MatIncreaseOverlap(A,nd,is1,ov);
 65:   MatIncreaseOverlap(B,nd,is2,ov);



 69:   /* Now see if the serial and parallel case have the same answers */
 70:   for (i=0; i<nd; ++i) {
 71:     ISEqual(is1[i],is2[i],&flg);
 72:     PetscPrintf(PETSC_COMM_SELF,"proc:[%d], i=%D, flg =%d\n",rank,i,(int)flg);
 73:   }

 75:   /* Free allocated memory */
 76:   for (i=0; i<nd; ++i) {
 77:     ISDestroy(is1[i]);
 78:     ISDestroy(is2[i]);
 79:   }
 80:   PetscFree(is1);
 81:   PetscFree(is2);
 82:   PetscRandomDestroy(r);
 83:   MatDestroy(A);
 84:   MatDestroy(B);

 86:   PetscFinalize();
 87: #endif
 88:   return 0;
 89: }