Actual source code: psplit.c

  1: #define PETSC_DLL

 3:  #include petsc.h

  7: /*@
  8:     PetscSplitOwnershipBlock - Given a global (or local) length determines a local 
  9:         (or global) length via a simple formula. Splits so each processors local size
 10:         is divisible by the block size.

 12:    Collective on MPI_Comm (if N is PETSC_DECIDE)

 14:    Input Parameters:
 15: +    comm - MPI communicator that shares the object being divided
 16: .    bs - block size
 17: .    n - local length (or PETSC_DECIDE to have it set)
 18: -    N - global length (or PETSC_DECIDE)

 20:   Level: developer

 22:    Notes:
 23:      n and N cannot be both PETSC_DECIDE

 25:      If one processor calls this with N of PETSC_DECIDE then all processors
 26:      must, otherwise the program will hang.

 28: .seealso: PetscSplitOwnership()

 30: @*/
 31: PetscErrorCode  PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
 32: {
 34:   PetscMPIInt    size,rank;

 37:   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");

 39:   if (*N == PETSC_DECIDE) {
 40:     if (*n % bs != 0) SETERRQ2(PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs);
 41:     MPI_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
 42:   } else if (*n == PETSC_DECIDE) {
 43:     PetscInt Nbs = *N/bs;
 44:     MPI_Comm_size(comm,&size);
 45:     MPI_Comm_rank(comm,&rank);
 46:     *n = bs*(Nbs/size + ((Nbs % size) > rank));
 47:   }
 48:   return(0);
 49: }


 54: /*@
 55:     PetscSplitOwnership - Given a global (or local) length determines a local 
 56:         (or global) length via a simple formula

 58:    Collective on MPI_Comm (if N is PETSC_DECIDE)

 60:    Input Parameters:
 61: +    comm - MPI communicator that shares the object being divided
 62: .    n - local length (or PETSC_DECIDE to have it set)
 63: -    N - global length (or PETSC_DECIDE)

 65:   Level: developer

 67:    Notes:
 68:      n and N cannot be both PETSC_DECIDE

 70:      If one processor calls this with N of PETSC_DECIDE then all processors
 71:      must, otherwise the program will hang.

 73: .seealso: PetscSplitOwnershipBlock()

 75: @*/
 76: PetscErrorCode  PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
 77: {
 79:   PetscMPIInt    size,rank;

 82:   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/petsc-as/documentation/troubleshooting.html#PetscSplitOwnership");

 84:   if (*N == PETSC_DECIDE) {
 85:     MPI_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);
 86:   } else if (*n == PETSC_DECIDE) {
 87:     MPI_Comm_size(comm,&size);
 88:     MPI_Comm_rank(comm,&rank);
 89:     *n = *N/size + ((*N % size) > rank);
 90: #if defined(PETSC_USE_DEBUG)
 91:   } else {
 92:     PetscInt tmp;
 93:     MPI_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);
 94:     if (tmp != *N) SETERRQ3(PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/petsc-as/documentation/troubleshooting.html#PetscSplitOwnership",tmp,*N,*n);
 95: #endif
 96:   }

 98:   return(0);
 99: }