Actual source code: bdiag.c
1: #define PETSCMAT_DLL
3: /* Block diagonal matrix format */
5: #include src/mat/impls/bdiag/seq/bdiag.h
6: #include src/inline/ilu.h
10: PetscErrorCode MatDestroy_SeqBDiag(Mat A)
11: {
12: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
14: PetscInt i,bs = A->rmap.bs;
17: #if defined(PETSC_USE_LOG)
18: PetscLogObjectState((PetscObject)A,"Rows=%D, Cols=%D, NZ=%D, BSize=%D, NDiag=%D",A->rmap.N,A->cmap.n,a->nz,A->rmap.bs,a->nd);
19: #endif
20: if (!a->user_alloc) { /* Free the actual diagonals */
21: for (i=0; i<a->nd; i++) {
22: if (a->diag[i] > 0) {
23: PetscScalar *dummy = a->diagv[i] + bs*bs*a->diag[i];
24: PetscFree(dummy);
25: } else {
26: PetscFree(a->diagv[i]);
27: }
28: }
29: }
30: PetscFree(a->pivot);
31: PetscFree(a->diagv);
32: PetscFree(a->diag);
33: PetscFree(a->colloc);
34: PetscFree(a->dvalue);
35: PetscFree(a->solvework);
36: PetscFree(a);
38: PetscObjectChangeTypeName((PetscObject)A,0);
39: PetscObjectComposeFunction((PetscObject)A,"MatSeqBDiagSetPreallocation_C","",PETSC_NULL);
40: return(0);
41: }
45: PetscErrorCode MatAssemblyEnd_SeqBDiag(Mat A,MatAssemblyType mode)
46: {
47: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
48: PetscInt i,k,temp,*diag = a->diag,*bdlen = a->bdlen;
49: PetscScalar *dtemp,**dv = a->diagv;
53: if (mode == MAT_FLUSH_ASSEMBLY) return(0);
55: /* Sort diagonals */
56: for (i=0; i<a->nd; i++) {
57: for (k=i+1; k<a->nd; k++) {
58: if (diag[i] < diag[k]) {
59: temp = diag[i];
60: diag[i] = diag[k];
61: diag[k] = temp;
62: temp = bdlen[i];
63: bdlen[i] = bdlen[k];
64: bdlen[k] = temp;
65: dtemp = dv[i];
66: dv[i] = dv[k];
67: dv[k] = dtemp;
68: }
69: }
70: }
72: /* Set location of main diagonal */
73: for (i=0; i<a->nd; i++) {
74: if (!a->diag[i]) {a->mainbd = i; break;}
75: }
76: PetscInfo3(A,"Number diagonals %D,memory used %D, block size %D\n",a->nd,a->maxnz,A->rmap.bs);
77: return(0);
78: }
82: PetscErrorCode MatSetOption_SeqBDiag(Mat A,MatOption op)
83: {
84: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
88: switch (op) {
89: case MAT_NO_NEW_NONZERO_LOCATIONS:
90: a->nonew = 1;
91: break;
92: case MAT_YES_NEW_NONZERO_LOCATIONS:
93: a->nonew = 0;
94: break;
95: case MAT_NO_NEW_DIAGONALS:
96: a->nonew_diag = 1;
97: break;
98: case MAT_YES_NEW_DIAGONALS:
99: a->nonew_diag = 0;
100: break;
101: case MAT_COLUMN_ORIENTED:
102: a->roworiented = PETSC_FALSE;
103: break;
104: case MAT_ROW_ORIENTED:
105: a->roworiented = PETSC_TRUE;
106: break;
107: case MAT_ROWS_SORTED:
108: case MAT_ROWS_UNSORTED:
109: case MAT_COLUMNS_SORTED:
110: case MAT_COLUMNS_UNSORTED:
111: case MAT_IGNORE_OFF_PROC_ENTRIES:
112: case MAT_NEW_NONZERO_LOCATION_ERR:
113: case MAT_NEW_NONZERO_ALLOCATION_ERR:
114: case MAT_USE_HASH_TABLE:
115: case MAT_SYMMETRIC:
116: case MAT_STRUCTURALLY_SYMMETRIC:
117: case MAT_NOT_SYMMETRIC:
118: case MAT_NOT_STRUCTURALLY_SYMMETRIC:
119: case MAT_HERMITIAN:
120: case MAT_NOT_HERMITIAN:
121: case MAT_SYMMETRY_ETERNAL:
122: case MAT_NOT_SYMMETRY_ETERNAL:
123: PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
124: break;
125: default:
126: SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op);
127: }
128: return(0);
129: }
133: static PetscErrorCode MatGetDiagonal_SeqBDiag_N(Mat A,Vec v)
134: {
135: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
137: PetscInt i,j,n,len,ibase,bs = A->rmap.bs,iloc;
138: PetscScalar *x,*dd,zero = 0.0;
141: if (A->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
142: VecSet(v,zero);
143: VecGetLocalSize(v,&n);
144: if (n != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
145: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
146: len = PetscMin(a->mblock,a->nblock);
147: dd = a->diagv[a->mainbd];
148: VecGetArray(v,&x);
149: for (i=0; i<len; i++) {
150: ibase = i*bs*bs; iloc = i*bs;
151: for (j=0; j<bs; j++) x[j + iloc] = dd[ibase + j*(bs+1)];
152: }
153: VecRestoreArray(v,&x);
154: return(0);
155: }
159: static PetscErrorCode MatGetDiagonal_SeqBDiag_1(Mat A,Vec v)
160: {
161: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
163: PetscInt i,n,len;
164: PetscScalar *x,*dd,zero = 0.0;
167: VecSet(v,zero);
168: VecGetLocalSize(v,&n);
169: if (n != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
170: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
171: dd = a->diagv[a->mainbd];
172: len = PetscMin(A->rmap.n,A->cmap.n);
173: VecGetArray(v,&x);
174: for (i=0; i<len; i++) x[i] = dd[i];
175: VecRestoreArray(v,&x);
176: return(0);
177: }
181: PetscErrorCode MatZeroEntries_SeqBDiag(Mat A)
182: {
183: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
184: PetscInt d,i,len,bs = A->rmap.bs;
185: PetscScalar *dv;
188: for (d=0; d<a->nd; d++) {
189: dv = a->diagv[d];
190: if (a->diag[d] > 0) {
191: dv += bs*bs*a->diag[d];
192: }
193: len = a->bdlen[d]*bs*bs;
194: for (i=0; i<len; i++) dv[i] = 0.0;
195: }
196: return(0);
197: }
201: PetscErrorCode MatZeroRows_SeqBDiag(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag)
202: {
203: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
205: PetscInt i,m = A->rmap.N - 1,nz;
206: PetscScalar *dd;
207: PetscScalar *val;
210: for (i=0; i<N; i++) {
211: if (rows[i]<0 || rows[i]>m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"row out of range");
212: MatGetRow_SeqBDiag(A,rows[i],&nz,PETSC_NULL,&val);
213: PetscMemzero((void*)val,nz*sizeof(PetscScalar));
214: MatRestoreRow_SeqBDiag(A,rows[i],&nz,PETSC_NULL,&val);
215: }
216: if (diag != 0.0) {
217: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal does not exist");
218: dd = a->diagv[a->mainbd];
219: for (i=0; i<N; i++) dd[rows[i]] = diag;
220: }
221: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
222: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
223: return(0);
224: }
228: PetscErrorCode MatGetSubMatrix_SeqBDiag(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *submat)
229: {
231: PetscInt nznew,*smap,i,j,oldcols = A->cmap.n;
232: PetscInt *irow,*icol,newr,newc,*cwork,nz,bs;
233: PetscInt *col;
234: PetscScalar *vwork;
235: PetscScalar *val;
236: Mat newmat;
239: if (scall == MAT_REUSE_MATRIX) { /* no support for reuse so simply destroy all */
240: MatDestroy(*submat);
241: }
243: ISGetIndices(isrow,&irow);
244: ISGetIndices(iscol,&icol);
245: ISGetLocalSize(isrow,&newr);
246: ISGetLocalSize(iscol,&newc);
248: PetscMalloc((oldcols+1)*sizeof(PetscInt),&smap);
249: PetscMalloc((newc+1)*sizeof(PetscInt),&cwork);
250: PetscMalloc((newc+1)*sizeof(PetscScalar),&vwork);
251: PetscMemzero((char*)smap,oldcols*sizeof(PetscInt));
252: for (i=0; i<newc; i++) smap[icol[i]] = i+1;
254: /* Determine diagonals; then create submatrix */
255: bs = A->rmap.bs; /* Default block size remains the same */
256: MatCreate(A->comm,&newmat);
257: MatSetSizes(newmat,newr,newc,newr,newc);
258: MatSetType(newmat,A->type_name);
259: MatSeqBDiagSetPreallocation(newmat,0,bs,PETSC_NULL,PETSC_NULL);
261: /* Fill new matrix */
262: for (i=0; i<newr; i++) {
263: MatGetRow_SeqBDiag(A,irow[i],&nz,&col,&val);
264: nznew = 0;
265: for (j=0; j<nz; j++) {
266: if (smap[col[j]]) {
267: cwork[nznew] = smap[col[j]] - 1;
268: vwork[nznew++] = val[j];
269: }
270: }
271: MatSetValues(newmat,1,&i,nznew,cwork,vwork,INSERT_VALUES);
272: MatRestoreRow_SeqBDiag(A,i,&nz,&col,&val);
273: }
274: MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);
275: MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);
277: /* Free work space */
278: PetscFree(smap);
279: PetscFree(cwork);
280: PetscFree(vwork);
281: ISRestoreIndices(isrow,&irow);
282: ISRestoreIndices(iscol,&icol);
283: *submat = newmat;
284: return(0);
285: }
289: PetscErrorCode MatGetSubMatrices_SeqBDiag(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
290: {
292: PetscInt i;
295: if (scall == MAT_INITIAL_MATRIX) {
296: PetscMalloc((n+1)*sizeof(Mat),B);
297: }
299: for (i=0; i<n; i++) {
300: MatGetSubMatrix_SeqBDiag(A,irow[i],icol[i],scall,&(*B)[i]);
301: }
302: return(0);
303: }
307: PetscErrorCode MatScale_SeqBDiag(Mat inA,PetscScalar alpha)
308: {
309: Mat_SeqBDiag *a = (Mat_SeqBDiag*)inA->data;
310: PetscInt i,bs = inA->rmap.bs;
311: PetscScalar oalpha = alpha;
312: PetscBLASInt one = 1,len;
316: for (i=0; i<a->nd; i++) {
317: len = (PetscBLASInt)bs*bs*a->bdlen[i];
318: if (a->diag[i] > 0) {
319: BLASscal_(&len,&oalpha,a->diagv[i] + bs*bs*a->diag[i],&one);
320: } else {
321: BLASscal_(&len,&oalpha,a->diagv[i],&one);
322: }
323: }
324: PetscLogFlops(a->nz);
325: return(0);
326: }
330: PetscErrorCode MatDiagonalScale_SeqBDiag(Mat A,Vec ll,Vec rr)
331: {
332: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
333: PetscScalar *l,*r,*dv;
335: PetscInt d,j,len;
336: PetscInt nd = a->nd,bs = A->rmap.bs,diag,m,n;
339: if (ll) {
340: VecGetSize(ll,&m);
341: if (m != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Left scaling vector wrong length");
342: if (bs == 1) {
343: VecGetArray(ll,&l);
344: for (d=0; d<nd; d++) {
345: dv = a->diagv[d];
346: diag = a->diag[d];
347: len = a->bdlen[d];
348: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= l[j+diag];
349: else for (j=0; j<len; j++) dv[j] *= l[j];
350: }
351: VecRestoreArray(ll,&l);
352: PetscLogFlops(a->nz);
353: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
354: }
355: if (rr) {
356: VecGetSize(rr,&n);
357: if (n != A->cmap.n) SETERRQ(PETSC_ERR_ARG_SIZ,"Right scaling vector wrong length");
358: if (bs == 1) {
359: VecGetArray(rr,&r);
360: for (d=0; d<nd; d++) {
361: dv = a->diagv[d];
362: diag = a->diag[d];
363: len = a->bdlen[d];
364: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= r[j];
365: else for (j=0; j<len; j++) dv[j] *= r[j-diag];
366: }
367: VecRestoreArray(rr,&r);
368: PetscLogFlops(a->nz);
369: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
370: }
371: return(0);
372: }
374: static PetscErrorCode MatDuplicate_SeqBDiag(Mat,MatDuplicateOption,Mat *);
378: PetscErrorCode MatSetUpPreallocation_SeqBDiag(Mat A)
379: {
383: MatSeqBDiagSetPreallocation(A,PETSC_DEFAULT,PETSC_DEFAULT,0,0);
384: return(0);
385: }
387: /* -------------------------------------------------------------------*/
388: static struct _MatOps MatOps_Values = {MatSetValues_SeqBDiag_N,
389: MatGetRow_SeqBDiag,
390: MatRestoreRow_SeqBDiag,
391: MatMult_SeqBDiag_N,
392: /* 4*/ MatMultAdd_SeqBDiag_N,
393: MatMultTranspose_SeqBDiag_N,
394: MatMultTransposeAdd_SeqBDiag_N,
395: MatSolve_SeqBDiag_N,
396: 0,
397: 0,
398: /*10*/ 0,
399: 0,
400: 0,
401: MatRelax_SeqBDiag_N,
402: MatTranspose_SeqBDiag,
403: /*15*/ MatGetInfo_SeqBDiag,
404: 0,
405: MatGetDiagonal_SeqBDiag_N,
406: MatDiagonalScale_SeqBDiag,
407: MatNorm_SeqBDiag,
408: /*20*/ 0,
409: MatAssemblyEnd_SeqBDiag,
410: 0,
411: MatSetOption_SeqBDiag,
412: MatZeroEntries_SeqBDiag,
413: /*25*/ MatZeroRows_SeqBDiag,
414: 0,
415: MatLUFactorNumeric_SeqBDiag_N,
416: 0,
417: 0,
418: /*30*/ MatSetUpPreallocation_SeqBDiag,
419: MatILUFactorSymbolic_SeqBDiag,
420: 0,
421: 0,
422: 0,
423: /*35*/ MatDuplicate_SeqBDiag,
424: 0,
425: 0,
426: MatILUFactor_SeqBDiag,
427: 0,
428: /*40*/ 0,
429: MatGetSubMatrices_SeqBDiag,
430: 0,
431: MatGetValues_SeqBDiag_N,
432: 0,
433: /*45*/ 0,
434: MatScale_SeqBDiag,
435: 0,
436: 0,
437: 0,
438: /*50*/ 0,
439: 0,
440: 0,
441: 0,
442: 0,
443: /*55*/ 0,
444: 0,
445: 0,
446: 0,
447: 0,
448: /*60*/ 0,
449: MatDestroy_SeqBDiag,
450: MatView_SeqBDiag,
451: 0,
452: 0,
453: /*65*/ 0,
454: 0,
455: 0,
456: 0,
457: 0,
458: /*70*/ 0,
459: 0,
460: 0,
461: 0,
462: 0,
463: /*75*/ 0,
464: 0,
465: 0,
466: 0,
467: 0,
468: /*80*/ 0,
469: 0,
470: 0,
471: 0,
472: MatLoad_SeqBDiag,
473: /*85*/ 0,
474: 0,
475: 0,
476: 0,
477: 0,
478: /*90*/ 0,
479: 0,
480: 0,
481: 0,
482: 0,
483: /*95*/ 0,
484: 0,
485: 0,
486: 0};
490: /*@C
491: MatSeqBDiagSetPreallocation - Sets the nonzero structure and (optionally) arrays.
493: Collective on MPI_Comm
495: Input Parameters:
496: + B - the matrix
497: . nd - number of block diagonals (optional)
498: . bs - each element of a diagonal is an bs x bs dense matrix
499: . diag - optional array of block diagonal numbers (length nd).
500: For a matrix element A[i,j], where i=row and j=column, the
501: diagonal number is
502: $ diag = i/bs - j/bs (integer division)
503: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
504: needed (expensive).
505: - diagv - pointer to actual diagonals (in same order as diag array),
506: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
507: to control memory allocation.
509: Options Database Keys:
510: . -mat_block_size <bs> - Sets blocksize
511: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
513: Notes:
514: See the users manual for further details regarding this storage format.
516: Fortran Note:
517: Fortran programmers cannot set diagv; this value is ignored.
519: Level: intermediate
521: .keywords: matrix, block, diagonal, sparse
523: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
524: @*/
525: PetscErrorCode MatSeqBDiagSetPreallocation(Mat B,PetscInt nd,PetscInt bs,const PetscInt diag[],PetscScalar *diagv[])
526: {
527: PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[],PetscScalar*[]);
530: PetscObjectQueryFunction((PetscObject)B,"MatSeqBDiagSetPreallocation_C",(void (**)(void))&f);
531: if (f) {
532: (*f)(B,nd,bs,diag,diagv);
533: }
534: return(0);
535: }
540: PetscErrorCode MatSeqBDiagSetPreallocation_SeqBDiag(Mat B,PetscInt nd,PetscInt bs,PetscInt *diag,PetscScalar **diagv)
541: {
542: Mat_SeqBDiag *b;
544: PetscInt i,nda,sizetot, nd2 = 128,idiag[128];
545: PetscTruth flg1;
549: B->preallocated = PETSC_TRUE;
550: if (bs == PETSC_DEFAULT) bs = 1;
551: if (!bs) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Blocksize cannot be zero");
552: if (nd == PETSC_DEFAULT) nd = 0;
553: PetscOptionsGetInt(PETSC_NULL,"-mat_block_size",&bs,PETSC_NULL);
554: PetscOptionsGetIntArray(PETSC_NULL,"-mat_bdiag_diags",idiag,&nd2,&flg1);
555: if (flg1) {
556: diag = idiag;
557: nd = nd2;
558: }
560: B->rmap.bs = B->cmap.bs = bs;
561: PetscMapInitialize(B->comm,&B->rmap);
562: PetscMapInitialize(B->comm,&B->cmap);
564: if ((B->cmap.n%bs) || (B->rmap.N%bs)) SETERRQ(PETSC_ERR_ARG_SIZ,"Invalid block size");
565: if (!nd) nda = nd + 1;
566: else nda = nd;
567: b = (Mat_SeqBDiag*)B->data;
569: PetscOptionsHasName(PETSC_NULL,"-mat_no_unroll",&flg1);
570: if (!flg1) {
571: switch (bs) {
572: case 1:
573: B->ops->setvalues = MatSetValues_SeqBDiag_1;
574: B->ops->getvalues = MatGetValues_SeqBDiag_1;
575: B->ops->getdiagonal = MatGetDiagonal_SeqBDiag_1;
576: B->ops->mult = MatMult_SeqBDiag_1;
577: B->ops->multadd = MatMultAdd_SeqBDiag_1;
578: B->ops->multtranspose = MatMultTranspose_SeqBDiag_1;
579: B->ops->multtransposeadd= MatMultTransposeAdd_SeqBDiag_1;
580: B->ops->relax = MatRelax_SeqBDiag_1;
581: B->ops->solve = MatSolve_SeqBDiag_1;
582: B->ops->lufactornumeric = MatLUFactorNumeric_SeqBDiag_1;
583: break;
584: case 2:
585: B->ops->mult = MatMult_SeqBDiag_2;
586: B->ops->multadd = MatMultAdd_SeqBDiag_2;
587: B->ops->solve = MatSolve_SeqBDiag_2;
588: break;
589: case 3:
590: B->ops->mult = MatMult_SeqBDiag_3;
591: B->ops->multadd = MatMultAdd_SeqBDiag_3;
592: B->ops->solve = MatSolve_SeqBDiag_3;
593: break;
594: case 4:
595: B->ops->mult = MatMult_SeqBDiag_4;
596: B->ops->multadd = MatMultAdd_SeqBDiag_4;
597: B->ops->solve = MatSolve_SeqBDiag_4;
598: break;
599: case 5:
600: B->ops->mult = MatMult_SeqBDiag_5;
601: B->ops->multadd = MatMultAdd_SeqBDiag_5;
602: B->ops->solve = MatSolve_SeqBDiag_5;
603: break;
604: }
605: }
607: b->mblock = B->rmap.N/bs;
608: b->nblock = B->cmap.n/bs;
609: b->nd = nd;
610: B->rmap.bs = bs;
611: b->ndim = 0;
612: b->mainbd = -1;
613: b->pivot = 0;
615: PetscMalloc(2*nda*sizeof(PetscInt),&b->diag);
616: b->bdlen = b->diag + nda;
617: PetscMalloc((B->cmap.n+1)*sizeof(PetscInt),&b->colloc);
618: PetscMalloc(nda*sizeof(PetscScalar*),&b->diagv);
619: sizetot = 0;
621: if (diagv) { /* user allocated space */
622: b->user_alloc = PETSC_TRUE;
623: for (i=0; i<nd; i++) b->diagv[i] = diagv[i];
624: } else b->user_alloc = PETSC_FALSE;
626: for (i=0; i<nd; i++) {
627: b->diag[i] = diag[i];
628: if (diag[i] > 0) { /* lower triangular */
629: b->bdlen[i] = PetscMin(b->nblock,b->mblock - diag[i]);
630: } else { /* upper triangular */
631: b->bdlen[i] = PetscMin(b->mblock,b->nblock + diag[i]);
632: }
633: sizetot += b->bdlen[i];
634: }
635: sizetot *= bs*bs;
636: b->maxnz = sizetot;
637: PetscMalloc((B->cmap.n+1)*sizeof(PetscScalar),&b->dvalue);
638: PetscLogObjectMemory(B,(nda*(bs+2))*sizeof(PetscInt) + bs*nda*sizeof(PetscScalar)
639: + nda*sizeof(PetscScalar*) + sizeof(Mat_SeqBDiag)
640: + sizeof(struct _p_Mat) + sizetot*sizeof(PetscScalar));
642: if (!b->user_alloc) {
643: for (i=0; i<nd; i++) {
644: PetscMalloc(bs*bs*b->bdlen[i]*sizeof(PetscScalar),&b->diagv[i]);
645: PetscMemzero(b->diagv[i],bs*bs*b->bdlen[i]*sizeof(PetscScalar));
646: }
647: b->nonew = 0; b->nonew_diag = 0;
648: } else { /* diagonals are set on input; don't allow dynamic allocation */
649: b->nonew = 1; b->nonew_diag = 1;
650: }
652: /* adjust diagv so one may access rows with diagv[diag][row] for all rows */
653: for (i=0; i<nd; i++) {
654: if (diag[i] > 0) {
655: b->diagv[i] -= bs*bs*diag[i];
656: }
657: }
659: b->nz = b->maxnz; /* Currently not keeping track of exact count */
660: b->roworiented = PETSC_TRUE;
661: B->info.nz_unneeded = (double)b->maxnz;
662: return(0);
663: }
668: static PetscErrorCode MatDuplicate_SeqBDiag(Mat A,MatDuplicateOption cpvalues,Mat *matout)
669: {
670: Mat_SeqBDiag *newmat,*a = (Mat_SeqBDiag*)A->data;
672: PetscInt i,len,diag,bs = A->rmap.bs;
673: Mat mat;
676: MatCreate(A->comm,matout);
677: MatSetSizes(*matout,A->rmap.N,A->cmap.n,A->rmap.N,A->cmap.n);
678: MatSetType(*matout,A->type_name);
679: MatSeqBDiagSetPreallocation(*matout,a->nd,bs,a->diag,PETSC_NULL);
681: /* Copy contents of diagonals */
682: mat = *matout;
683: newmat = (Mat_SeqBDiag*)mat->data;
684: if (cpvalues == MAT_COPY_VALUES) {
685: for (i=0; i<a->nd; i++) {
686: len = a->bdlen[i] * bs * bs * sizeof(PetscScalar);
687: diag = a->diag[i];
688: if (diag > 0) {
689: PetscMemcpy(newmat->diagv[i]+bs*bs*diag,a->diagv[i]+bs*bs*diag,len);
690: } else {
691: PetscMemcpy(newmat->diagv[i],a->diagv[i],len);
692: }
693: }
694: }
695: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
696: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
697: return(0);
698: }
702: PetscErrorCode MatLoad_SeqBDiag(PetscViewer viewer, MatType type,Mat *A)
703: {
704: Mat B;
706: PetscMPIInt size;
707: int fd;
708: PetscInt *scols,i,nz,header[4],nd = 128;
709: PetscInt bs,*rowlengths = 0,M,N,*cols,extra_rows,*diag = 0;
710: PetscInt idiag[128];
711: PetscScalar *vals,*svals;
712: MPI_Comm comm;
713: PetscTruth flg;
714:
716: PetscObjectGetComm((PetscObject)viewer,&comm);
717: MPI_Comm_size(comm,&size);
718: if (size > 1) SETERRQ(PETSC_ERR_ARG_SIZ,"view must have one processor");
719: PetscViewerBinaryGetDescriptor(viewer,&fd);
720: PetscBinaryRead(fd,header,4,PETSC_INT);
721: if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Not matrix object");
722: M = header[1]; N = header[2]; nz = header[3];
723: if (M != N) SETERRQ(PETSC_ERR_SUP,"Can only load square matrices");
724: if (header[3] < 0) {
725: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as SeqBDiag");
726: }
728: /*
729: This code adds extra rows to make sure the number of rows is
730: divisible by the blocksize
731: */
732: bs = 1;
733: PetscOptionsGetInt(PETSC_NULL,"-matload_block_size",&bs,PETSC_NULL);
734: extra_rows = bs - M + bs*(M/bs);
735: if (extra_rows == bs) extra_rows = 0;
736: if (extra_rows) {
737: PetscInfo(0,"Padding loaded matrix to match blocksize\n");
738: }
740: /* read row lengths */
741: PetscMalloc((M+extra_rows)*sizeof(PetscInt),&rowlengths);
742: PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
743: for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
745: /* load information about diagonals */
746: PetscOptionsGetIntArray(PETSC_NULL,"-matload_bdiag_diags",idiag,&nd,&flg);
747: if (flg) {
748: diag = idiag;
749: }
751: /* create our matrix */
752: MatCreate(comm,A);
753: MatSetSizes(*A,M+extra_rows,M+extra_rows,M+extra_rows,M+extra_rows);
754: MatSetType(*A,type);
755: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,PETSC_NULL);
756: B = *A;
758: /* read column indices and nonzeros */
759: PetscMalloc(nz*sizeof(PetscInt),&scols);
760: cols = scols;
761: PetscBinaryRead(fd,cols,nz,PETSC_INT);
762: PetscMalloc(nz*sizeof(PetscScalar),&svals);
763: vals = svals;
764: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
765: /* insert into matrix */
767: for (i=0; i<M; i++) {
768: MatSetValues(B,1,&i,rowlengths[i],scols,svals,INSERT_VALUES);
769: scols += rowlengths[i]; svals += rowlengths[i];
770: }
771: vals[0] = 1.0;
772: for (i=M; i<M+extra_rows; i++) {
773: MatSetValues(B,1,&i,1,&i,vals,INSERT_VALUES);
774: }
776: PetscFree(cols);
777: PetscFree(vals);
778: PetscFree(rowlengths);
780: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
781: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
782: return(0);
783: }
785: /*MC
786: MATSEQBDIAG - MATSEQBDIAG = "seqbdiag" - A matrix type to be used for sequential block diagonal matrices.
788: Options Database Keys:
789: . -mat_type seqbdiag - sets the matrix type to "seqbdiag" during a call to MatSetFromOptions()
791: Level: beginner
793: .seealso: MatCreateSeqBDiag
794: M*/
799: PetscErrorCode MatCreate_SeqBDiag(Mat B)
800: {
801: Mat_SeqBDiag *b;
803: PetscMPIInt size;
806: MPI_Comm_size(B->comm,&size);
807: if (size > 1) SETERRQ(PETSC_ERR_ARG_WRONG,"Comm must be of size 1");
810: PetscNew(Mat_SeqBDiag,&b);
811: B->data = (void*)b;
812: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
813: B->factor = 0;
814: B->mapping = 0;
816: b->ndim = 0;
817: b->mainbd = -1;
818: b->pivot = 0;
820: b->roworiented = PETSC_TRUE;
821: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSeqBDiagSetPreallocation_C",
822: "MatSeqBDiagSetPreallocation_SeqBDiag",
823: MatSeqBDiagSetPreallocation_SeqBDiag);
825: PetscObjectChangeTypeName((PetscObject)B,MATSEQBDIAG);
826: return(0);
827: }
832: /*@C
833: MatCreateSeqBDiag - Creates a sequential block diagonal matrix.
835: Collective on MPI_Comm
837: Input Parameters:
838: + comm - MPI communicator, set to PETSC_COMM_SELF
839: . m - number of rows
840: . n - number of columns
841: . nd - number of block diagonals (optional)
842: . bs - each element of a diagonal is an bs x bs dense matrix
843: . diag - optional array of block diagonal numbers (length nd).
844: For a matrix element A[i,j], where i=row and j=column, the
845: diagonal number is
846: $ diag = i/bs - j/bs (integer division)
847: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
848: needed (expensive).
849: - diagv - pointer to actual diagonals (in same order as diag array),
850: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
851: to control memory allocation.
853: Output Parameters:
854: . A - the matrix
856: Options Database Keys:
857: . -mat_block_size <bs> - Sets blocksize
858: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
860: Notes:
861: See the users manual for further details regarding this storage format.
863: Fortran Note:
864: Fortran programmers cannot set diagv; this value is ignored.
866: Level: intermediate
868: .keywords: matrix, block, diagonal, sparse
870: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
871: @*/
872: PetscErrorCode MatCreateSeqBDiag(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt nd,PetscInt bs,const PetscInt diag[],PetscScalar *diagv[],Mat *A)
873: {
877: MatCreate(comm,A);
878: MatSetSizes(*A,m,n,m,n);
879: MatSetType(*A,MATSEQBDIAG);
880: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,diagv);
881: return(0);
882: }