#define PETSCVEC_DLL #include "petscvec.h" /*I "petscsys.h" I*/ #include "src/vec/is/isimpl.h" /*I "petscis.h" I*/ PetscCookie PETSCVEC_DLLEXPORT IS_LTOGM_COOKIE = -1; #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingGetSize" /*@C ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping. Not Collective Input Parameter: . ltog - local to global mapping Output Parameter: . n - the number of entries in the local mapping Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n) { PetscFunctionBegin; PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1); PetscValidIntPointer(n,2); *n = mapping->n; PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingView" /*@C ISLocalToGlobalMappingView - View a local to global mapping Not Collective Input Parameters: + ltog - local to global mapping - viewer - viewer Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer) { PetscInt i; PetscMPIInt rank; PetscTruth iascii; PetscErrorCode ierr; PetscFunctionBegin; PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1); if (!viewer) { ierr = PetscViewerASCIIGetStdout(mapping->comm,&viewer);CHKERRQ(ierr); } PetscValidHeaderSpecific(viewer,PETSC_VIEWER_COOKIE,2); ierr = MPI_Comm_rank(mapping->comm,&rank);CHKERRQ(ierr); ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); if (iascii) { for (i=0; in; i++) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr); } ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); } else { SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name); } PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingCreateIS" /*@ ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n) ordering and a global parallel ordering. Not collective Input Parameter: . is - index set containing the global numbers for each local Output Parameter: . mapping - new mapping data structure Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping) { PetscErrorCode ierr; PetscInt n,*indices; MPI_Comm comm; PetscFunctionBegin; PetscValidHeaderSpecific(is,IS_COOKIE,1); PetscValidPointer(mapping,2); ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); ierr = ISGetIndices(is,&indices);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr); ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr); PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingCreate" /*@ ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n) ordering and a global parallel ordering. Not Collective, but communicator may have more than one process Input Parameters: + comm - MPI communicator . n - the number of local elements - indices - the global index for each local element Output Parameter: . mapping - new mapping data structure Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreateNC() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],ISLocalToGlobalMapping *mapping) { PetscErrorCode ierr; PetscInt *in; PetscFunctionBegin; PetscValidIntPointer(indices,3); PetscValidPointer(mapping,4); ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr); ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr); ierr = ISLocalToGlobalMappingCreateNC(cm,n,in,mapping);CHKERRQ(ierr); PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingCreateNC" /*@C ISLocalToGlobalMappingCreateNC - Creates a mapping between a local (0 to n) ordering and a global parallel ordering. Not Collective, but communicator may have more than one process Input Parameters: + comm - MPI communicator . n - the number of local elements - indices - the global index for each local element Output Parameter: . mapping - new mapping data structure Level: developer Notes: Does not copy the indices, just keeps the pointer to the indices. The ISLocalToGlobalMappingDestroy() will free the space so it must be obtained with PetscMalloc() and it must not be freed elsewhere. Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateNC(MPI_Comm cm,PetscInt n,const PetscInt indices[],ISLocalToGlobalMapping *mapping) { PetscErrorCode ierr; PetscFunctionBegin; if (n) { PetscValidIntPointer(indices,3); } PetscValidPointer(mapping,4); *mapping = PETSC_NULL; #ifndef PETSC_USE_DYNAMIC_LIBRARIES ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr); #endif if (IS_LTOGM_COOKIE == -1) { ierr = PetscLogClassRegister(&IS_LTOGM_COOKIE,"IS Local to global mapping");CHKERRQ(ierr); } ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_COOKIE,0,"ISLocalToGlobalMapping", cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr); ierr = PetscLogObjectMemory(*mapping,sizeof(struct _p_ISLocalToGlobalMapping)+n*sizeof(PetscInt));CHKERRQ(ierr); (*mapping)->n = n; (*mapping)->indices = (PetscInt*)indices; /* Do not create the global to local mapping. This is only created if ISGlobalToLocalMapping() is called */ (*mapping)->globals = 0; PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingBlock" /*@ ISLocalToGlobalMappingBlock - Creates a blocked index version of an ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock() and VecSetLocalToGlobalMappingBlock(). Not Collective, but communicator may have more than one process Input Parameters: + inmap - original point-wise mapping - bs - block size Output Parameter: . outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries. Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap) { PetscErrorCode ierr; PetscInt *ii,i,n; PetscFunctionBegin; if (bs > 1) { n = inmap->n/bs; if (n*bs != inmap->n) SETERRQ(PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size"); ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr); for (i=0; iindices[bs*i]/bs; } ierr = ISLocalToGlobalMappingCreate(inmap->comm,n,ii,outmap);CHKERRQ(ierr); ierr = PetscFree(ii);CHKERRQ(ierr); } else { *outmap = inmap; ierr = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr); } PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingDestroy" /*@ ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n) ordering and a global parallel ordering. Note Collective Input Parameters: . mapping - mapping data structure Level: advanced .seealso: ISLocalToGlobalMappingCreate() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping) { PetscErrorCode ierr; PetscFunctionBegin; PetscValidPointer(mapping,1); if (--mapping->refct > 0) PetscFunctionReturn(0); if (mapping->refct < 0) { SETERRQ(PETSC_ERR_PLIB,"Mapping already destroyed"); } ierr = PetscFree(mapping->indices);CHKERRQ(ierr); ierr = PetscFree(mapping->globals);CHKERRQ(ierr); ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr); PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingApplyIS" /*@ ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering a new index set using the global numbering defined in an ISLocalToGlobalMapping context. Not collective Input Parameters: + mapping - mapping between local and global numbering - is - index set in local numbering Output Parameters: . newis - index set in global numbering Level: advanced Concepts: mapping^local to global .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis) { PetscErrorCode ierr; PetscInt n,i,*idxin,*idxmap,*idxout,Nmax = mapping->n; PetscFunctionBegin; PetscValidPointer(mapping,1); PetscValidHeaderSpecific(is,IS_COOKIE,2); PetscValidPointer(newis,3); ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr); idxmap = mapping->indices; ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr); for (i=0; i= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i); idxout[i] = idxmap[idxin[i]]; } ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr); ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr); ierr = PetscFree(idxout);CHKERRQ(ierr); PetscFunctionReturn(0); } /*MC ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering and converts them to the global numbering. Not collective Input Parameters: + mapping - the local to global mapping context . N - number of integers - in - input indices in local numbering Output Parameter: . out - indices in global numbering Synopsis: PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[]) Notes: The in and out array parameters may be identical. Level: advanced .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(), AOPetscToApplication(), ISGlobalToLocalMappingApply() Concepts: mapping^local to global M*/ /* -----------------------------------------------------------------------------------------*/ #undef __FUNCT__ #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private" /* Creates the global fields in the ISLocalToGlobalMapping structure */ static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping) { PetscErrorCode ierr; PetscInt i,*idx = mapping->indices,n = mapping->n,end,start,*globals; PetscFunctionBegin; end = 0; start = 100000000; for (i=0; i end) end = idx[i]; } if (start > end) {start = 0; end = -1;} mapping->globalstart = start; mapping->globalend = end; ierr = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr); mapping->globals = globals; for (i=0; iglobals) { ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr); } globals = mapping->globals; start = mapping->globalstart; end = mapping->globalend; if (type == IS_GTOLM_MASK) { if (idxout) { for (i=0; i end) idxout[i] = -1; else idxout[i] = globals[idx[i] - start]; } } if (nout) *nout = n; } else { if (idxout) { for (i=0; i end) continue; tmp = globals[idx[i] - start]; if (tmp < 0) continue; idxout[nf++] = tmp; } } else { for (i=0; i end) continue; tmp = globals[idx[i] - start]; if (tmp < 0) continue; nf++; } } if (nout) *nout = nf; } PetscFunctionReturn(0); } #undef __FUNCT__ #define __FUNCT__ "ISLocalToGlobalMappingGetInfo" /*@C ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and each index shared by more than one processor Collective on ISLocalToGlobalMapping Input Parameters: . mapping - the mapping from local to global indexing Output Parameter: + nproc - number of processors that are connected to this one . proc - neighboring processors . numproc - number of indices for each subdomain (processor) - indices - indices of local nodes shared with neighbor (sorted by global numbering) Level: advanced Concepts: mapping^local to global Fortran Usage: $ ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by $ ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc], PetscInt indices[nproc][numprocmax],ierr) There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough. .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingRestoreInfo() @*/ PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[]) { PetscErrorCode ierr; PetscMPIInt size,rank,tag1,tag2,tag3,*len,*source,imdex; PetscInt i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices; PetscInt *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc; PetscInt cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned; PetscInt node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp; PetscInt first_procs,first_numprocs,*first_indices; MPI_Request *recv_waits,*send_waits; MPI_Status recv_status,*send_status,*recv_statuses; MPI_Comm comm = mapping->comm; PetscTruth debug = PETSC_FALSE; PetscFunctionBegin; ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); if (size == 1) { *nproc = 0; *procs = PETSC_NULL; ierr = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr); (*numprocs)[0] = 0; ierr = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr); (*indices)[0] = PETSC_NULL; PetscFunctionReturn(0); } ierr = PetscOptionsHasName(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug);CHKERRQ(ierr); /* Notes on ISLocalToGlobalMappingGetInfo globally owned node - the nodes that have been assigned to this processor in global numbering, just for this routine. nontrivial globally owned node - node assigned to this processor that is on a subdomain boundary (i.e. is has more than one local owner) locally owned node - node that exists on this processors subdomain nontrivial locally owned node - node that is not in the interior (i.e. has more than one local subdomain */ ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr); ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr); ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr); for (i=0; i max) max = lindices[i]; } ierr = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr); Ng++; ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); scale = Ng/size + 1; ng = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng); rstart = scale*rank; /* determine ownership ranges of global indices */ ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); /* determine owners of each local node */ ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr); for (i=0; i 1) {nownedm += nownedsenders[i]; nowned++;} } /* create single array to contain rank of all local owners of each globally owned index */ ierr = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr); ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); starts[0] = 0; for (i=1; i 1) starts[i] = starts[i-1] + nownedsenders[i-1]; else starts[i] = starts[i-1]; } /* for each nontrival globally owned node list all arriving processors */ for (i=0; i 1) { ownedsenders[starts[node]++] = source[i]; } } } if (debug) { /* ----------------------------------- */ starts[0] = 0; for (i=1; i 1) starts[i] = starts[i-1] + nownedsenders[i-1]; else starts[i] = starts[i-1]; } for (i=0; i 1) { ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr); for (j=0; j 1) starts[i] = starts[i-1] + nownedsenders[i-1]; else starts[i] = starts[i-1]; } nsends2 = nrecvs; ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */ for (i=0; i 1) { nprocs[i] += 2 + nownedsenders[node]; } } } nt = 0; for (i=0; i 1) { sends2[starts2[i]]++; sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1]; sends2[starts2[i]+cnt++] = nownedsenders[node]; ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr); cnt += nownedsenders[node]; } } } /* receive the message lengths */ nrecvs2 = nsends; ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr); ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr); ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); for (i=0; i 0); *nproc = nt; ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr); ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr); ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr); ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr); cnt = 0; for (i=0; i 0) { bprocs[i] = cnt; (*procs)[cnt] = i; (*numprocs)[cnt] = nprocs[i]; ierr = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr); cnt++; } } /* make the list of subdomains for each nontrivial local node */ ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr); cnt = 0; for (i=0; i