Actual source code: ex41.c

  2: static char help[] = "Tests MatIncreaseOverlap() - the parallel case. This example\n\
  3: is similar to ex40.c; here the index sets used are random. Input arguments are:\n\
  4:   -f <input_file> : file to load.  For a 5X5 example of the 5-pt. stencil,\n\
  5:                        use the file petsc/src/mat/examples/matbinary.ex\n\
  6:   -nd <size>      : > 0  no of domains per processor \n\
  7:   -ov <overlap>   : >=0  amount of overlap between domains\n\n";

 9:  #include petscksp.h

 13: int main(int argc,char **args)
 14: {
 15:   PetscInt       nd = 2,ov=1,i,j,m,n,*idx,lsize;
 17:   PetscMPIInt    rank;
 18:   PetscTruth     flg;
 19:   Mat            A,B;
 20:   char           file[PETSC_MAX_PATH_LEN];
 21:   PetscViewer    fd;
 22:   IS             *is1,*is2;
 23:   PetscRandom    r;
 24:   PetscScalar    rand;

 26:   PetscInitialize(&argc,&args,(char *)0,help);
 27: #if defined(PETSC_USE_COMPLEX)
 28:   SETERRQ(1,"This example does not work with complex numbers");
 29: #else
 30: 
 31:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 32:   PetscOptionsGetString(PETSC_NULL,"-f",file,PETSC_MAX_PATH_LEN-1,PETSC_NULL);
 33:   PetscOptionsGetInt(PETSC_NULL,"-nd",&nd,PETSC_NULL);
 34:   PetscOptionsGetInt(PETSC_NULL,"-ov",&ov,PETSC_NULL);

 36:   /* Read matrix and RHS */
 37:   PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
 38:   MatLoad(fd,MATMPIAIJ,&A);
 39:   PetscViewerDestroy(fd);

 41:   /* Read the matrix again as a seq matrix */
 42:   PetscViewerBinaryOpen(PETSC_COMM_SELF,file,FILE_MODE_READ,&fd);
 43:   MatLoad(fd,MATSEQAIJ,&B);
 44:   PetscViewerDestroy(fd);
 45: 
 46:   /* Create the Random no generator */
 47:   MatGetSize(A,&m,&n);
 48:   PetscRandomCreate(PETSC_COMM_SELF,&r);
 49:   PetscRandomSetFromOptions(r);
 50: 
 51:   /* Create the IS corresponding to subdomains */
 52:   PetscMalloc(nd*sizeof(IS **),&is1);
 53:   PetscMalloc(nd*sizeof(IS **),&is2);
 54:   PetscMalloc(m *sizeof(PetscInt),&idx);

 56:   /* Create the random Index Sets */
 57:   for (i=0; i<nd; i++) {
 58:     for (j=0; j<rank; j++) {
 59:       PetscRandomGetValue(r,&rand);
 60:     }
 61:     PetscRandomGetValue(r,&rand);
 62:     lsize   = (PetscInt)(rand*m);
 63:     for (j=0; j<lsize; j++) {
 64:       PetscRandomGetValue(r,&rand);
 65:       idx[j] = (PetscInt)(rand*m);
 66:     }
 67:     ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,is1+i);
 68:     ISCreateGeneral(PETSC_COMM_SELF,lsize,idx,is2+i);
 69:   }
 70: 
 71:   MatIncreaseOverlap(A,nd,is1,ov);
 72:   MatIncreaseOverlap(B,nd,is2,ov);
 73: 
 74:   /* Now see if the serial and parallel case have the same answers */
 75:   for (i=0; i<nd; ++i) {
 76:     PetscInt sz1,sz2;
 77:     ISEqual(is1[i],is2[i],&flg);
 78:     ISGetSize(is1[i],&sz1);
 79:     ISGetSize(is2[i],&sz2);
 80:     PetscPrintf(PETSC_COMM_SELF,"[%d], i=%D, flg =%d sz1 = %D sz2 = %D\n",rank,i,(int)flg,sz1,sz2);
 81:     /* ISView(is1[i],PETSC_VIEWER_STDOUT_SELF);
 82:     ISView(is2[i],PETSC_VIEWER_STDOUT_SELF); */
 83:   }

 85:   /* Free Allocated Memory */
 86:   for (i=0; i<nd; ++i) {
 87:     ISDestroy(is1[i]);
 88:     ISDestroy(is2[i]);
 89:   }
 90:   PetscRandomDestroy(r);
 91:   PetscFree(is1);
 92:   PetscFree(is2);
 93:   MatDestroy(A);
 94:   MatDestroy(B);
 95:   PetscFree(idx);

 97:   PetscFinalize();
 98: #endif
 99:   return 0;
100: }