Actual source code: mpiadj.c

  1: #define PETSCMAT_DLL

  3: /*
  4:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 
  5: */
 6:  #include src/mat/impls/adj/mpi/mpiadj.h
 7:  #include petscsys.h

 11: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 12: {
 13:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 14:   PetscErrorCode    ierr;
 15:   PetscInt          i,j,m = A->rmap.n;
 16:   const char        *name;
 17:   PetscViewerFormat format;

 20:   PetscObjectGetName((PetscObject)A,&name);
 21:   PetscViewerGetFormat(viewer,&format);
 22:   if (format == PETSC_VIEWER_ASCII_INFO) {
 23:     return(0);
 24:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
 25:     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
 26:   } else {
 27:     PetscViewerASCIIUseTabs(viewer,PETSC_NO);
 28:     for (i=0; i<m; i++) {
 29:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap.rstart);
 30:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 31:         PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
 32:       }
 33:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
 34:     }
 35:     PetscViewerASCIIUseTabs(viewer,PETSC_YES);
 36:   }
 37:   PetscViewerFlush(viewer);
 38:   return(0);
 39: }

 43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
 44: {
 46:   PetscTruth     iascii;

 49:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
 50:   if (iascii) {
 51:     MatView_MPIAdj_ASCII(A,viewer);
 52:   } else {
 53:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
 54:   }
 55:   return(0);
 56: }

 60: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
 61: {
 62:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

 66: #if defined(PETSC_USE_LOG)
 67:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap.n,mat->cmap.n,a->nz);
 68: #endif
 69:   PetscFree(a->diag);
 70:   if (a->freeaij) {
 71:     PetscFree(a->i);
 72:     PetscFree(a->j);
 73:     PetscFree(a->values);
 74:   }
 75:   PetscFree(a);
 76:   PetscObjectChangeTypeName((PetscObject)mat,0);
 77:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
 78:   return(0);
 79: }

 83: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
 84: {
 85:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

 89:   switch (op) {
 90:   case MAT_SYMMETRIC:
 91:   case MAT_STRUCTURALLY_SYMMETRIC:
 92:   case MAT_HERMITIAN:
 93:     a->symmetric = PETSC_TRUE;
 94:     break;
 95:   case MAT_NOT_SYMMETRIC:
 96:   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
 97:   case MAT_NOT_HERMITIAN:
 98:     a->symmetric = PETSC_FALSE;
 99:     break;
100:   case MAT_SYMMETRY_ETERNAL:
101:   case MAT_NOT_SYMMETRY_ETERNAL:
102:     break;
103:   default:
104:     PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
105:     break;
106:   }
107:   return(0);
108: }


111: /*
112:      Adds diagonal pointers to sparse matrix structure.
113: */

117: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118: {
119:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
121:   PetscInt       i,j,m = A->rmap.n;

124:   PetscMalloc(m*sizeof(PetscInt),&a->diag);
125:   PetscLogObjectMemory(A,m*sizeof(PetscInt));
126:   for (i=0; i<A->rmap.n; i++) {
127:     for (j=a->i[i]; j<a->i[i+1]; j++) {
128:       if (a->j[j] == i) {
129:         a->diag[i] = j;
130:         break;
131:       }
132:     }
133:   }
134:   return(0);
135: }

139: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140: {
141:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142:   PetscInt   *itmp;

145:   row -= A->rmap.rstart;

147:   if (row < 0 || row >= A->rmap.n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");

149:   *nz = a->i[row+1] - a->i[row];
150:   if (v) *v = PETSC_NULL;
151:   if (idx) {
152:     itmp = a->j + a->i[row];
153:     if (*nz) {
154:       *idx = itmp;
155:     }
156:     else *idx = 0;
157:   }
158:   return(0);
159: }

163: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164: {
166:   return(0);
167: }

171: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
172: {
173:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
175:   PetscTruth     flag;

178:   /* If the  matrix dimensions are not equal,or no of nonzeros */
179:   if ((A->rmap.n != B->rmap.n) ||(a->nz != b->nz)) {
180:     flag = PETSC_FALSE;
181:   }
182: 
183:   /* if the a->i are the same */
184:   PetscMemcmp(a->i,b->i,(A->rmap.n+1)*sizeof(PetscInt),&flag);
185: 
186:   /* if a->j are the same */
187:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

189:   MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
190:   return(0);
191: }

195: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
196: {
198:   PetscMPIInt    size;
199:   PetscInt       i;
200:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;

203:   MPI_Comm_size(A->comm,&size);
204:   if (size > 1) {*done = PETSC_FALSE; return(0);}
205:   *m    = A->rmap.n;
206:   *ia   = a->i;
207:   *ja   = a->j;
208:   *done = PETSC_TRUE;
209:   if (oshift) {
210:     for (i=0; i<(*ia)[*m]; i++) {
211:       (*ja)[i]++;
212:     }
213:     for (i=0; i<=(*m); i++) (*ia)[i]++;
214:   }
215:   return(0);
216: }

220: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
221: {
222:   PetscInt   i;
223:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

226:   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227:   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228:   if (oshift) {
229:     for (i=0; i<=(*m); i++) (*ia)[i]--;
230:     for (i=0; i<(*ia)[*m]; i++) {
231:       (*ja)[i]--;
232:     }
233:   }
234:   return(0);
235: }

239: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
240: {
241:   Mat               B;
242:   PetscErrorCode    ierr;
243:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
244:   const PetscInt    *rj;
245:   const PetscScalar *ra;
246:   MPI_Comm          comm;

249:   MatGetSize(A,PETSC_NULL,&N);
250:   MatGetLocalSize(A,&m,PETSC_NULL);
251:   MatGetOwnershipRange(A,&rstart,PETSC_NULL);
252: 
253:   /* count the number of nonzeros per row */
254:   for (i=0; i<m; i++) {
255:     MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
256:     for (j=0; j<len; j++) {
257:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
258:     }
259:     MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
260:     nzeros += len;
261:   }

263:   /* malloc space for nonzeros */
264:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
265:   PetscMalloc((N+1)*sizeof(PetscInt),&ia);
266:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);

268:   nzeros = 0;
269:   ia[0]  = 0;
270:   for (i=0; i<m; i++) {
271:     MatGetRow(A,i+rstart,&len,&rj,&ra);
272:     cnt     = 0;
273:     for (j=0; j<len; j++) {
274:       if (rj[j] != i+rstart) { /* if not diagonal */
275:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
276:         ja[nzeros+cnt++] = rj[j];
277:       }
278:     }
279:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
280:     nzeros += cnt;
281:     ia[i+1] = nzeros;
282:   }

284:   PetscObjectGetComm((PetscObject)A,&comm);
285:   MatCreate(comm,&B);
286:   MatSetSizes(B,m,N,PETSC_DETERMINE,N);
287:   MatSetType(B,type);
288:   MatMPIAdjSetPreallocation(B,ia,ja,a);

290:   if (reuse == MAT_REUSE_MATRIX) {
291:     MatHeaderReplace(A,B);
292:   } else {
293:     *newmat = B;
294:   }
295:   return(0);
296: }

298: /* -------------------------------------------------------------------*/
299: static struct _MatOps MatOps_Values = {0,
300:        MatGetRow_MPIAdj,
301:        MatRestoreRow_MPIAdj,
302:        0,
303: /* 4*/ 0,
304:        0,
305:        0,
306:        0,
307:        0,
308:        0,
309: /*10*/ 0,
310:        0,
311:        0,
312:        0,
313:        0,
314: /*15*/ 0,
315:        MatEqual_MPIAdj,
316:        0,
317:        0,
318:        0,
319: /*20*/ 0,
320:        0,
321:        0,
322:        MatSetOption_MPIAdj,
323:        0,
324: /*25*/ 0,
325:        0,
326:        0,
327:        0,
328:        0,
329: /*30*/ 0,
330:        0,
331:        0,
332:        0,
333:        0,
334: /*35*/ 0,
335:        0,
336:        0,
337:        0,
338:        0,
339: /*40*/ 0,
340:        0,
341:        0,
342:        0,
343:        0,
344: /*45*/ 0,
345:        0,
346:        0,
347:        0,
348:        0,
349: /*50*/ 0,
350:        MatGetRowIJ_MPIAdj,
351:        MatRestoreRowIJ_MPIAdj,
352:        0,
353:        0,
354: /*55*/ 0,
355:        0,
356:        0,
357:        0,
358:        0,
359: /*60*/ 0,
360:        MatDestroy_MPIAdj,
361:        MatView_MPIAdj,
362:        MatConvertFrom_MPIAdj,
363:        0,
364: /*65*/ 0,
365:        0,
366:        0,
367:        0,
368:        0,
369: /*70*/ 0,
370:        0,
371:        0,
372:        0,
373:        0,
374: /*75*/ 0,
375:        0,
376:        0,
377:        0,
378:        0,
379: /*80*/ 0,
380:        0,
381:        0,
382:        0,
383:        0,
384: /*85*/ 0,
385:        0,
386:        0,
387:        0,
388:        0,
389: /*90*/ 0,
390:        0,
391:        0,
392:        0,
393:        0,
394: /*95*/ 0,
395:        0,
396:        0,
397:        0};

402: PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
403: {
404:   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
406: #if defined(PETSC_USE_DEBUG)
407:   PetscInt       ii;
408: #endif

411:   B->preallocated = PETSC_TRUE;
412: #if defined(PETSC_USE_DEBUG)
413:   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
414:   for (ii=1; ii<B->rmap.n; ii++) {
415:     if (i[ii] < 0 || i[ii] < i[ii-1]) {
416:       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
417:     }
418:   }
419:   for (ii=0; ii<i[B->rmap.n]; ii++) {
420:     if (j[ii] < 0 || j[ii] >= B->cmap.N) {
421:       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
422:     }
423:   }
424: #endif

426:   b->j      = j;
427:   b->i      = i;
428:   b->values = values;

430:   b->nz               = i[B->rmap.n];
431:   b->diag             = 0;
432:   b->symmetric        = PETSC_FALSE;
433:   b->freeaij          = PETSC_TRUE;

435:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
436:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
437:   return(0);
438: }

441: /*MC
442:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
443:    intended for use constructing orderings and partitionings.

445:   Level: beginner

447: .seealso: MatCreateMPIAdj
448: M*/

453: PetscErrorCode  MatCreate_MPIAdj(Mat B)
454: {
455:   Mat_MPIAdj     *b;
457:   PetscMPIInt    size,rank;

460:   MPI_Comm_size(B->comm,&size);
461:   MPI_Comm_rank(B->comm,&rank);

463:   PetscNew(Mat_MPIAdj,&b);
464:   B->data             = (void*)b;
465:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
466:   B->factor           = 0;
467:   B->mapping          = 0;
468:   B->assembled        = PETSC_FALSE;
469: 
470:   PetscMapInitialize(B->comm,&B->rmap);
471:   if (B->cmap.n < 0) B->cmap.n = B->cmap.N;
472:   if (B->cmap.N < 0) B->cmap.N = B->cmap.n;

474:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
475:                                     "MatMPIAdjSetPreallocation_MPIAdj",
476:                                      MatMPIAdjSetPreallocation_MPIAdj);
477:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
478:   return(0);
479: }

484: /*@C
485:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

487:    Collective on MPI_Comm

489:    Input Parameters:
490: +  A - the matrix
491: .  i - the indices into j for the start of each row
492: .  j - the column indices for each row (sorted for each row).
493:        The indices in i and j start with zero (NOT with one).
494: -  values - [optional] edge weights

496:    Level: intermediate

498: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
499: @*/
500: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
501: {
502:   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);

505:   PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
506:   if (f) {
507:     (*f)(B,i,j,values);
508:   }
509:   return(0);
510: }

514: /*@C
515:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
516:    The matrix does not have numerical values associated with it, but is
517:    intended for ordering (to reduce bandwidth etc) and partitioning.

519:    Collective on MPI_Comm

521:    Input Parameters:
522: +  comm - MPI communicator
523: .  m - number of local rows
524: .  n - number of columns
525: .  i - the indices into j for the start of each row
526: .  j - the column indices for each row (sorted for each row).
527:        The indices in i and j start with zero (NOT with one).
528: -  values -[optional] edge weights

530:    Output Parameter:
531: .  A - the matrix 

533:    Level: intermediate

535:    Notes: This matrix object does not support most matrix operations, include
536:    MatSetValues().
537:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
538:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 
539:     call from Fortran you need not create the arrays with PetscMalloc().
540:    Should not include the matrix diagonals.

542:    If you already have a matrix, you can create its adjacency matrix by a call
543:    to MatConvert, specifying a type of MATMPIADJ.

545:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

547: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
548: @*/
549: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
550: {

554:   MatCreate(comm,A);
555:   MatSetSizes(*A,m,n,PETSC_DETERMINE,n);
556:   MatSetType(*A,MATMPIADJ);
557:   MatMPIAdjSetPreallocation(*A,i,j,values);
558:   return(0);
559: }