Actual source code: ex13.c

  2: static char help[] = "Tests PetscObjectPublish().\n\n";

  4: /*T
  5:    Concepts: vectors^assembling vectors;
  6:    Processors: n
  7: T*/

  9: /* 
 10:   Include "petscvec.h" so that we can use vectors.  Note that this file
 11:   automatically includes:
 12:      petsc.h       - base PETSc routines   petscis.h     - index sets
 13:      petscsys.h    - system routines       petscviewer.h - viewers
 14: */
 15:  #include petscvec.h

 19: int main(int argc,char **argv)
 20: {
 21:   int     i,n,ierr,rank;
 22:   PetscScalar  one = 1.0,*array;
 23:   Vec     x,xlocal;

 25:   PetscInitialize(&argc,&argv,(char *)0,help);
 26:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 28:   /*
 29:      Create a parallel vector.
 30:       - In this case, we specify the size of each processor's local
 31:         portion, and PETSc computes the global size.  Alternatively,
 32:         if we pass the global size and use PETSC_DECIDE for the 
 33:         local size PETSc will choose a reasonable partition trying 
 34:         to put nearly an equal number of elements on each processor.
 35:   */
 36:   VecCreateMPI(PETSC_COMM_WORLD,rank+4,PETSC_DECIDE,&x);
 37:   PetscObjectPublish((PetscObject)x);
 38:   VecGetLocalSize(x,&n);
 39:   VecSet(x,one);

 41:   VecCreateSeq(PETSC_COMM_SELF,rank+4,&xlocal);
 42:   PetscObjectPublish((PetscObject)xlocal);
 43:   VecSet(xlocal,one);

 45:   while (1) {

 47:     /*
 48:        Access the vector entries and add to them
 49:     */
 50:     PetscBarrier((PetscObject)x);
 51:     VecGetArray(x,&array);
 52:     for (i=0; i<n; i++) {
 53:       array[i]++;
 54:     }
 55:     VecRestoreArray(x,&array);

 57:     VecGetArray(xlocal,&array);
 58:     for (i=0; i<n; i++) {
 59:       array[i]++;
 60:     }
 61:     VecRestoreArray(xlocal,&array);
 62:   }

 64:   /*
 65:         Destroy the vectors
 66:   */
 67:   VecDestroy(x);
 68:   VecDestroy(xlocal);

 70:   PetscFinalize();
 71:   return 0;
 72: }
 73: