Actual source code: pmetis.c
1: #define PETSCMAT_DLL
2:
3: #include src/mat/impls/adj/mpi/mpiadj.h
5: /*
6: Currently using ParMetis-2.0. The following include file has
7: to be changed to par_kmetis.h for ParMetis-1.0
8: */
10: #include "parmetis.h"
13: /*
14: The first 5 elements of this structure are the input control array to Metis
15: */
16: typedef struct {
17: int cuts; /* number of cuts made (output) */
18: int foldfactor;
19: int parallel; /* use parallel partitioner for coarse problem */
20: int indexing; /* 0 indicates C indexing, 1 Fortran */
21: int printout; /* indicates if one wishes Metis to print info */
22: MPI_Comm comm_pmetis;
23: } MatPartitioning_Parmetis;
25: /*
26: Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
27: */
30: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
31: {
32: MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis*)part->data;
33: PetscErrorCode ierr;
34: int *locals,size,rank;
35: int *vtxdist,*xadj,*adjncy,itmp = 0;
36: int wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[3], i,j;
37: Mat mat = part->adj,newmat;
38: Mat_MPIAdj *adj = (Mat_MPIAdj *)mat->data;
39: PetscTruth flg;
40: float *tpwgts,*ubvec;
43: MPI_Comm_size(mat->comm,&size);
45: PetscTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
46: if (!flg) {
47: MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&newmat);
48: adj = (Mat_MPIAdj *)newmat->data;
49: }
51: vtxdist = mat->rmap.range;
52: xadj = adj->i;
53: adjncy = adj->j;
54: MPI_Comm_rank(part->comm,&rank);
55: if (!(vtxdist[rank+1] - vtxdist[rank])) {
56: SETERRQ(PETSC_ERR_LIB,"Does not support any processor with no entries");
57: }
58: #if defined(PETSC_USE_DEBUG)
59: /* check that matrix has no diagonal entries */
60: {
61: int rstart;
62: MatGetOwnershipRange(mat,&rstart,PETSC_NULL);
63: for (i=0; i<mat->rmap.n; i++) {
64: for (j=xadj[i]; j<xadj[i+1]; j++) {
65: if (adjncy[j] == i+rstart) SETERRQ1(PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart);
66: }
67: }
68: }
69: #endif
71: PetscMalloc((mat->rmap.n+1)*sizeof(int),&locals);
73: if (PetscLogPrintInfo) {itmp = parmetis->printout; parmetis->printout = 127;}
74: PetscMalloc(ncon*nparts*sizeof(float),&tpwgts);
75: for (i=0; i<ncon; i++) {
76: for (j=0; j<nparts; j++) {
77: if (part->part_weights) {
78: tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
79: } else {
80: tpwgts[i*nparts+j] = 1./nparts;
81: }
82: }
83: }
84: PetscMalloc(ncon*sizeof(float),&ubvec);
85: for (i=0; i<ncon; i++) {
86: ubvec[i] = 1.05;
87: }
88: options[0] = 0;
89: /* ParMETIS has no error conditions ??? */
90: ParMETIS_V3_PartKway(vtxdist,xadj,adjncy,part->vertex_weights,adj->values,&wgtflag,&numflag,&ncon,&nparts,tpwgts,ubvec,
91: options,&parmetis->cuts,locals,&parmetis->comm_pmetis);
92: PetscFree(tpwgts);
93: PetscFree(ubvec);
94: if (PetscLogPrintInfo) {parmetis->printout = itmp;}
96: ISCreateGeneral(part->comm,mat->rmap.n,locals,partitioning);
97: PetscFree(locals);
99: if (!flg) {
100: MatDestroy(newmat);
101: }
102: return(0);
103: }
108: PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
109: {
110: MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
112: int rank;
113: PetscTruth iascii;
116: MPI_Comm_rank(part->comm,&rank);
117: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
118: if (iascii) {
119: if (parmetis->parallel == 2) {
120: PetscViewerASCIIPrintf(viewer," Using parallel coarse grid partitioner\n");
121: } else {
122: PetscViewerASCIIPrintf(viewer," Using sequential coarse grid partitioner\n");
123: }
124: PetscViewerASCIIPrintf(viewer," Using %d fold factor\n",parmetis->foldfactor);
125: PetscViewerASCIISynchronizedPrintf(viewer," [%d]Number of cuts found %d\n",rank,parmetis->cuts);
126: PetscViewerFlush(viewer);
127: } else {
128: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)->type_name);
129: }
131: return(0);
132: }
136: /*@
137: MatPartitioningParmetisSetCoarseSequential - Use the sequential code to
138: do the partitioning of the coarse grid.
140: Collective on MatPartitioning
142: Input Parameter:
143: . part - the partitioning context
145: Level: advanced
147: @*/
148: PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
149: {
150: MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
153: parmetis->parallel = 1;
154: return(0);
155: }
158: PetscErrorCode MatPartitioningSetFromOptions_Parmetis(MatPartitioning part)
159: {
161: PetscTruth flag;
164: PetscOptionsHead("Set ParMeTiS partitioning options");
165: PetscOptionsName("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",&flag);
166: if (flag) {
167: MatPartitioningParmetisSetCoarseSequential(part);
168: }
169: PetscOptionsTail();
170: return(0);
171: }
176: PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
177: {
178: MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
182: MPI_Comm_free(&(parmetis->comm_pmetis));
183: PetscFree(parmetis);
184: return(0);
185: }
191: /*@C
192: MAT_PARTITIONING_PARMETIS - Creates a partitioning context via the external package PARMETIS.
194: Collective on MPI_Comm
196: Input Parameter:
197: . part - the partitioning context
199: Options Database Keys:
200: + -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner
202: Level: beginner
204: Notes: See http://www-users.cs.umn.edu/~karypis/metis/
206: .keywords: Partitioning, create, context
208: .seealso: MatPartitioningSetType(), MatPartitioningType
210: @*/
211: PetscErrorCode MatPartitioningCreate_Parmetis(MatPartitioning part)
212: {
214: MatPartitioning_Parmetis *parmetis;
217: PetscNew(MatPartitioning_Parmetis,&parmetis);
219: parmetis->cuts = 0; /* output variable */
220: parmetis->foldfactor = 150; /*folding factor */
221: parmetis->parallel = 2; /* use parallel partitioner for coarse grid */
222: parmetis->indexing = 0; /* index numbering starts from 0 */
223: parmetis->printout = 0; /* print no output while running */
225: MPI_Comm_dup(part->comm,&(parmetis->comm_pmetis));
227: part->ops->apply = MatPartitioningApply_Parmetis;
228: part->ops->view = MatPartitioningView_Parmetis;
229: part->ops->destroy = MatPartitioningDestroy_Parmetis;
230: part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
231: part->data = (void*)parmetis;
232: return(0);
233: }