Actual source code: bdiag.c
1: #define PETSCMAT_DLL
3: /* Block diagonal matrix format */
5: #include src/mat/impls/bdiag/seq/bdiag.h
6: #include src/inline/ilu.h
10: PetscErrorCode MatDestroy_SeqBDiag(Mat A)
11: {
12: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
14: PetscInt i,bs = A->rmap.bs;
17: #if defined(PETSC_USE_LOG)
18: PetscLogObjectState((PetscObject)A,"Rows=%D, Cols=%D, NZ=%D, BSize=%D, NDiag=%D",A->rmap.N,A->cmap.n,a->nz,A->rmap.bs,a->nd);
19: #endif
20: if (!a->user_alloc) { /* Free the actual diagonals */
21: for (i=0; i<a->nd; i++) {
22: if (a->diag[i] > 0) {
23: PetscScalar *dummy = a->diagv[i] + bs*bs*a->diag[i];
24: PetscFree(dummy);
25: } else {
26: PetscFree(a->diagv[i]);
27: }
28: }
29: }
30: PetscFree(a->pivot);
31: PetscFree(a->diagv);
32: PetscFree(a->diag);
33: PetscFree(a->colloc);
34: PetscFree(a->dvalue);
35: PetscFree(a->solvework);
36: PetscFree(a);
38: PetscObjectChangeTypeName((PetscObject)A,0);
39: PetscObjectComposeFunction((PetscObject)A,"MatSeqBDiagSetPreallocation_C","",PETSC_NULL);
40: return(0);
41: }
45: PetscErrorCode MatAssemblyEnd_SeqBDiag(Mat A,MatAssemblyType mode)
46: {
47: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
48: PetscInt i,k,temp,*diag = a->diag,*bdlen = a->bdlen;
49: PetscScalar *dtemp,**dv = a->diagv;
53: if (mode == MAT_FLUSH_ASSEMBLY) return(0);
55: /* Sort diagonals */
56: for (i=0; i<a->nd; i++) {
57: for (k=i+1; k<a->nd; k++) {
58: if (diag[i] < diag[k]) {
59: temp = diag[i];
60: diag[i] = diag[k];
61: diag[k] = temp;
62: temp = bdlen[i];
63: bdlen[i] = bdlen[k];
64: bdlen[k] = temp;
65: dtemp = dv[i];
66: dv[i] = dv[k];
67: dv[k] = dtemp;
68: }
69: }
70: }
72: /* Set location of main diagonal */
73: for (i=0; i<a->nd; i++) {
74: if (!a->diag[i]) {a->mainbd = i; break;}
75: }
76: PetscInfo3(A,"Number diagonals %D,memory used %D, block size %D\n",a->nd,a->maxnz,A->rmap.bs);
77: return(0);
78: }
82: PetscErrorCode MatSetOption_SeqBDiag(Mat A,MatOption op)
83: {
84: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
88: switch (op) {
89: case MAT_NO_NEW_NONZERO_LOCATIONS:
90: a->nonew = 1;
91: break;
92: case MAT_YES_NEW_NONZERO_LOCATIONS:
93: a->nonew = 0;
94: break;
95: case MAT_NO_NEW_DIAGONALS:
96: a->nonew_diag = 1;
97: break;
98: case MAT_YES_NEW_DIAGONALS:
99: a->nonew_diag = 0;
100: break;
101: case MAT_COLUMN_ORIENTED:
102: a->roworiented = PETSC_FALSE;
103: break;
104: case MAT_ROW_ORIENTED:
105: a->roworiented = PETSC_TRUE;
106: break;
107: case MAT_ROWS_SORTED:
108: case MAT_ROWS_UNSORTED:
109: case MAT_COLUMNS_SORTED:
110: case MAT_COLUMNS_UNSORTED:
111: case MAT_IGNORE_OFF_PROC_ENTRIES:
112: case MAT_NEW_NONZERO_LOCATION_ERR:
113: case MAT_NEW_NONZERO_ALLOCATION_ERR:
114: case MAT_USE_HASH_TABLE:
115: PetscInfo1(A,"Option %d ignored\n",op);
116: break;
117: case MAT_SYMMETRIC:
118: case MAT_STRUCTURALLY_SYMMETRIC:
119: case MAT_NOT_SYMMETRIC:
120: case MAT_NOT_STRUCTURALLY_SYMMETRIC:
121: case MAT_HERMITIAN:
122: case MAT_NOT_HERMITIAN:
123: case MAT_SYMMETRY_ETERNAL:
124: case MAT_NOT_SYMMETRY_ETERNAL:
125: break;
126: default:
127: SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op);
128: }
129: return(0);
130: }
134: static PetscErrorCode MatGetDiagonal_SeqBDiag_N(Mat A,Vec v)
135: {
136: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
138: PetscInt i,j,n,len,ibase,bs = A->rmap.bs,iloc;
139: PetscScalar *x,*dd,zero = 0.0;
142: if (A->factor) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
143: VecSet(v,zero);
144: VecGetLocalSize(v,&n);
145: if (n != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
146: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
147: len = PetscMin(a->mblock,a->nblock);
148: dd = a->diagv[a->mainbd];
149: VecGetArray(v,&x);
150: for (i=0; i<len; i++) {
151: ibase = i*bs*bs; iloc = i*bs;
152: for (j=0; j<bs; j++) x[j + iloc] = dd[ibase + j*(bs+1)];
153: }
154: VecRestoreArray(v,&x);
155: return(0);
156: }
160: static PetscErrorCode MatGetDiagonal_SeqBDiag_1(Mat A,Vec v)
161: {
162: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
164: PetscInt i,n,len;
165: PetscScalar *x,*dd,zero = 0.0;
168: VecSet(v,zero);
169: VecGetLocalSize(v,&n);
170: if (n != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec");
171: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal not set");
172: dd = a->diagv[a->mainbd];
173: len = PetscMin(A->rmap.n,A->cmap.n);
174: VecGetArray(v,&x);
175: for (i=0; i<len; i++) x[i] = dd[i];
176: VecRestoreArray(v,&x);
177: return(0);
178: }
182: PetscErrorCode MatZeroEntries_SeqBDiag(Mat A)
183: {
184: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
185: PetscInt d,i,len,bs = A->rmap.bs;
186: PetscScalar *dv;
189: for (d=0; d<a->nd; d++) {
190: dv = a->diagv[d];
191: if (a->diag[d] > 0) {
192: dv += bs*bs*a->diag[d];
193: }
194: len = a->bdlen[d]*bs*bs;
195: for (i=0; i<len; i++) dv[i] = 0.0;
196: }
197: return(0);
198: }
202: PetscErrorCode MatZeroRows_SeqBDiag(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag)
203: {
204: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
206: PetscInt i,m = A->rmap.N - 1,nz;
207: PetscScalar *dd;
208: PetscScalar *val;
211: for (i=0; i<N; i++) {
212: if (rows[i]<0 || rows[i]>m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"row out of range");
213: MatGetRow_SeqBDiag(A,rows[i],&nz,PETSC_NULL,&val);
214: PetscMemzero((void*)val,nz*sizeof(PetscScalar));
215: MatRestoreRow_SeqBDiag(A,rows[i],&nz,PETSC_NULL,&val);
216: }
217: if (diag != 0.0) {
218: if (a->mainbd == -1) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Main diagonal does not exist");
219: dd = a->diagv[a->mainbd];
220: for (i=0; i<N; i++) dd[rows[i]] = diag;
221: }
222: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
223: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
224: return(0);
225: }
229: PetscErrorCode MatGetSubMatrix_SeqBDiag(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *submat)
230: {
232: PetscInt nznew,*smap,i,j,oldcols = A->cmap.n;
233: PetscInt *irow,*icol,newr,newc,*cwork,nz,bs;
234: PetscInt *col;
235: PetscScalar *vwork;
236: PetscScalar *val;
237: Mat newmat;
240: if (scall == MAT_REUSE_MATRIX) { /* no support for reuse so simply destroy all */
241: MatDestroy(*submat);
242: }
244: ISGetIndices(isrow,&irow);
245: ISGetIndices(iscol,&icol);
246: ISGetLocalSize(isrow,&newr);
247: ISGetLocalSize(iscol,&newc);
249: PetscMalloc((oldcols+1)*sizeof(PetscInt),&smap);
250: PetscMalloc((newc+1)*sizeof(PetscInt),&cwork);
251: PetscMalloc((newc+1)*sizeof(PetscScalar),&vwork);
252: PetscMemzero((char*)smap,oldcols*sizeof(PetscInt));
253: for (i=0; i<newc; i++) smap[icol[i]] = i+1;
255: /* Determine diagonals; then create submatrix */
256: bs = A->rmap.bs; /* Default block size remains the same */
257: MatCreate(A->comm,&newmat);
258: MatSetSizes(newmat,newr,newc,newr,newc);
259: MatSetType(newmat,A->type_name);
260: MatSeqBDiagSetPreallocation(newmat,0,bs,PETSC_NULL,PETSC_NULL);
262: /* Fill new matrix */
263: for (i=0; i<newr; i++) {
264: MatGetRow_SeqBDiag(A,irow[i],&nz,&col,&val);
265: nznew = 0;
266: for (j=0; j<nz; j++) {
267: if (smap[col[j]]) {
268: cwork[nznew] = smap[col[j]] - 1;
269: vwork[nznew++] = val[j];
270: }
271: }
272: MatSetValues(newmat,1,&i,nznew,cwork,vwork,INSERT_VALUES);
273: MatRestoreRow_SeqBDiag(A,i,&nz,&col,&val);
274: }
275: MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);
276: MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);
278: /* Free work space */
279: PetscFree(smap);
280: PetscFree(cwork);
281: PetscFree(vwork);
282: ISRestoreIndices(isrow,&irow);
283: ISRestoreIndices(iscol,&icol);
284: *submat = newmat;
285: return(0);
286: }
290: PetscErrorCode MatGetSubMatrices_SeqBDiag(Mat A,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *B[])
291: {
293: PetscInt i;
296: if (scall == MAT_INITIAL_MATRIX) {
297: PetscMalloc((n+1)*sizeof(Mat),B);
298: }
300: for (i=0; i<n; i++) {
301: MatGetSubMatrix_SeqBDiag(A,irow[i],icol[i],scall,&(*B)[i]);
302: }
303: return(0);
304: }
308: PetscErrorCode MatScale_SeqBDiag(Mat inA,PetscScalar alpha)
309: {
310: Mat_SeqBDiag *a = (Mat_SeqBDiag*)inA->data;
311: PetscInt i,bs = inA->rmap.bs;
312: PetscScalar oalpha = alpha;
313: PetscBLASInt one = 1,len;
317: for (i=0; i<a->nd; i++) {
318: len = (PetscBLASInt)bs*bs*a->bdlen[i];
319: if (a->diag[i] > 0) {
320: BLASscal_(&len,&oalpha,a->diagv[i] + bs*bs*a->diag[i],&one);
321: } else {
322: BLASscal_(&len,&oalpha,a->diagv[i],&one);
323: }
324: }
325: PetscLogFlops(a->nz);
326: return(0);
327: }
331: PetscErrorCode MatDiagonalScale_SeqBDiag(Mat A,Vec ll,Vec rr)
332: {
333: Mat_SeqBDiag *a = (Mat_SeqBDiag*)A->data;
334: PetscScalar *l,*r,*dv;
336: PetscInt d,j,len;
337: PetscInt nd = a->nd,bs = A->rmap.bs,diag,m,n;
340: if (ll) {
341: VecGetSize(ll,&m);
342: if (m != A->rmap.N) SETERRQ(PETSC_ERR_ARG_SIZ,"Left scaling vector wrong length");
343: if (bs == 1) {
344: VecGetArray(ll,&l);
345: for (d=0; d<nd; d++) {
346: dv = a->diagv[d];
347: diag = a->diag[d];
348: len = a->bdlen[d];
349: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= l[j+diag];
350: else for (j=0; j<len; j++) dv[j] *= l[j];
351: }
352: VecRestoreArray(ll,&l);
353: PetscLogFlops(a->nz);
354: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
355: }
356: if (rr) {
357: VecGetSize(rr,&n);
358: if (n != A->cmap.n) SETERRQ(PETSC_ERR_ARG_SIZ,"Right scaling vector wrong length");
359: if (bs == 1) {
360: VecGetArray(rr,&r);
361: for (d=0; d<nd; d++) {
362: dv = a->diagv[d];
363: diag = a->diag[d];
364: len = a->bdlen[d];
365: if (diag > 0) for (j=0; j<len; j++) dv[j+diag] *= r[j];
366: else for (j=0; j<len; j++) dv[j] *= r[j-diag];
367: }
368: VecRestoreArray(rr,&r);
369: PetscLogFlops(a->nz);
370: } else SETERRQ(PETSC_ERR_SUP,"Not yet done for bs>1");
371: }
372: return(0);
373: }
375: static PetscErrorCode MatDuplicate_SeqBDiag(Mat,MatDuplicateOption,Mat *);
379: PetscErrorCode MatSetUpPreallocation_SeqBDiag(Mat A)
380: {
384: MatSeqBDiagSetPreallocation(A,PETSC_DEFAULT,PETSC_DEFAULT,0,0);
385: return(0);
386: }
388: /* -------------------------------------------------------------------*/
389: static struct _MatOps MatOps_Values = {MatSetValues_SeqBDiag_N,
390: MatGetRow_SeqBDiag,
391: MatRestoreRow_SeqBDiag,
392: MatMult_SeqBDiag_N,
393: /* 4*/ MatMultAdd_SeqBDiag_N,
394: MatMultTranspose_SeqBDiag_N,
395: MatMultTransposeAdd_SeqBDiag_N,
396: MatSolve_SeqBDiag_N,
397: 0,
398: 0,
399: /*10*/ 0,
400: 0,
401: 0,
402: MatRelax_SeqBDiag_N,
403: MatTranspose_SeqBDiag,
404: /*15*/ MatGetInfo_SeqBDiag,
405: 0,
406: MatGetDiagonal_SeqBDiag_N,
407: MatDiagonalScale_SeqBDiag,
408: MatNorm_SeqBDiag,
409: /*20*/ 0,
410: MatAssemblyEnd_SeqBDiag,
411: 0,
412: MatSetOption_SeqBDiag,
413: MatZeroEntries_SeqBDiag,
414: /*25*/ MatZeroRows_SeqBDiag,
415: 0,
416: MatLUFactorNumeric_SeqBDiag_N,
417: 0,
418: 0,
419: /*30*/ MatSetUpPreallocation_SeqBDiag,
420: MatILUFactorSymbolic_SeqBDiag,
421: 0,
422: 0,
423: 0,
424: /*35*/ MatDuplicate_SeqBDiag,
425: 0,
426: 0,
427: MatILUFactor_SeqBDiag,
428: 0,
429: /*40*/ 0,
430: MatGetSubMatrices_SeqBDiag,
431: 0,
432: MatGetValues_SeqBDiag_N,
433: 0,
434: /*45*/ 0,
435: MatScale_SeqBDiag,
436: 0,
437: 0,
438: 0,
439: /*50*/ 0,
440: 0,
441: 0,
442: 0,
443: 0,
444: /*55*/ 0,
445: 0,
446: 0,
447: 0,
448: 0,
449: /*60*/ 0,
450: MatDestroy_SeqBDiag,
451: MatView_SeqBDiag,
452: 0,
453: 0,
454: /*65*/ 0,
455: 0,
456: 0,
457: 0,
458: 0,
459: /*70*/ 0,
460: 0,
461: 0,
462: 0,
463: 0,
464: /*75*/ 0,
465: 0,
466: 0,
467: 0,
468: 0,
469: /*80*/ 0,
470: 0,
471: 0,
472: 0,
473: MatLoad_SeqBDiag,
474: /*85*/ 0,
475: 0,
476: 0,
477: 0,
478: 0,
479: /*90*/ 0,
480: 0,
481: 0,
482: 0,
483: 0,
484: /*95*/ 0,
485: 0,
486: 0,
487: 0};
491: /*@C
492: MatSeqBDiagSetPreallocation - Sets the nonzero structure and (optionally) arrays.
494: Collective on MPI_Comm
496: Input Parameters:
497: + B - the matrix
498: . nd - number of block diagonals (optional)
499: . bs - each element of a diagonal is an bs x bs dense matrix
500: . diag - optional array of block diagonal numbers (length nd).
501: For a matrix element A[i,j], where i=row and j=column, the
502: diagonal number is
503: $ diag = i/bs - j/bs (integer division)
504: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
505: needed (expensive).
506: - diagv - pointer to actual diagonals (in same order as diag array),
507: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
508: to control memory allocation.
510: Options Database Keys:
511: . -mat_block_size <bs> - Sets blocksize
512: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
514: Notes:
515: See the users manual for further details regarding this storage format.
517: Fortran Note:
518: Fortran programmers cannot set diagv; this value is ignored.
520: Level: intermediate
522: .keywords: matrix, block, diagonal, sparse
524: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
525: @*/
526: PetscErrorCode MatSeqBDiagSetPreallocation(Mat B,PetscInt nd,PetscInt bs,const PetscInt diag[],PetscScalar *diagv[])
527: {
528: PetscErrorCode ierr,(*f)(Mat,PetscInt,PetscInt,const PetscInt[],PetscScalar*[]);
531: PetscObjectQueryFunction((PetscObject)B,"MatSeqBDiagSetPreallocation_C",(void (**)(void))&f);
532: if (f) {
533: (*f)(B,nd,bs,diag,diagv);
534: }
535: return(0);
536: }
541: PetscErrorCode MatSeqBDiagSetPreallocation_SeqBDiag(Mat B,PetscInt nd,PetscInt bs,PetscInt *diag,PetscScalar **diagv)
542: {
543: Mat_SeqBDiag *b;
545: PetscInt i,nda,sizetot, nd2 = 128,idiag[128];
546: PetscTruth flg1;
550: B->preallocated = PETSC_TRUE;
551: if (bs == PETSC_DEFAULT) bs = 1;
552: if (!bs) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Blocksize cannot be zero");
553: if (nd == PETSC_DEFAULT) nd = 0;
554: PetscOptionsGetInt(PETSC_NULL,"-mat_block_size",&bs,PETSC_NULL);
555: PetscOptionsGetIntArray(PETSC_NULL,"-mat_bdiag_diags",idiag,&nd2,&flg1);
556: if (flg1) {
557: diag = idiag;
558: nd = nd2;
559: }
561: B->rmap.bs = B->cmap.bs = bs;
562: PetscMapInitialize(B->comm,&B->rmap);
563: PetscMapInitialize(B->comm,&B->cmap);
565: if ((B->cmap.n%bs) || (B->rmap.N%bs)) SETERRQ(PETSC_ERR_ARG_SIZ,"Invalid block size");
566: if (!nd) nda = nd + 1;
567: else nda = nd;
568: b = (Mat_SeqBDiag*)B->data;
570: PetscOptionsHasName(PETSC_NULL,"-mat_no_unroll",&flg1);
571: if (!flg1) {
572: switch (bs) {
573: case 1:
574: B->ops->setvalues = MatSetValues_SeqBDiag_1;
575: B->ops->getvalues = MatGetValues_SeqBDiag_1;
576: B->ops->getdiagonal = MatGetDiagonal_SeqBDiag_1;
577: B->ops->mult = MatMult_SeqBDiag_1;
578: B->ops->multadd = MatMultAdd_SeqBDiag_1;
579: B->ops->multtranspose = MatMultTranspose_SeqBDiag_1;
580: B->ops->multtransposeadd= MatMultTransposeAdd_SeqBDiag_1;
581: B->ops->relax = MatRelax_SeqBDiag_1;
582: B->ops->solve = MatSolve_SeqBDiag_1;
583: B->ops->lufactornumeric = MatLUFactorNumeric_SeqBDiag_1;
584: break;
585: case 2:
586: B->ops->mult = MatMult_SeqBDiag_2;
587: B->ops->multadd = MatMultAdd_SeqBDiag_2;
588: B->ops->solve = MatSolve_SeqBDiag_2;
589: break;
590: case 3:
591: B->ops->mult = MatMult_SeqBDiag_3;
592: B->ops->multadd = MatMultAdd_SeqBDiag_3;
593: B->ops->solve = MatSolve_SeqBDiag_3;
594: break;
595: case 4:
596: B->ops->mult = MatMult_SeqBDiag_4;
597: B->ops->multadd = MatMultAdd_SeqBDiag_4;
598: B->ops->solve = MatSolve_SeqBDiag_4;
599: break;
600: case 5:
601: B->ops->mult = MatMult_SeqBDiag_5;
602: B->ops->multadd = MatMultAdd_SeqBDiag_5;
603: B->ops->solve = MatSolve_SeqBDiag_5;
604: break;
605: }
606: }
608: b->mblock = B->rmap.N/bs;
609: b->nblock = B->cmap.n/bs;
610: b->nd = nd;
611: B->rmap.bs = bs;
612: b->ndim = 0;
613: b->mainbd = -1;
614: b->pivot = 0;
616: PetscMalloc(2*nda*sizeof(PetscInt),&b->diag);
617: b->bdlen = b->diag + nda;
618: PetscMalloc((B->cmap.n+1)*sizeof(PetscInt),&b->colloc);
619: PetscMalloc(nda*sizeof(PetscScalar*),&b->diagv);
620: sizetot = 0;
622: if (diagv) { /* user allocated space */
623: b->user_alloc = PETSC_TRUE;
624: for (i=0; i<nd; i++) b->diagv[i] = diagv[i];
625: } else b->user_alloc = PETSC_FALSE;
627: for (i=0; i<nd; i++) {
628: b->diag[i] = diag[i];
629: if (diag[i] > 0) { /* lower triangular */
630: b->bdlen[i] = PetscMin(b->nblock,b->mblock - diag[i]);
631: } else { /* upper triangular */
632: b->bdlen[i] = PetscMin(b->mblock,b->nblock + diag[i]);
633: }
634: sizetot += b->bdlen[i];
635: }
636: sizetot *= bs*bs;
637: b->maxnz = sizetot;
638: PetscMalloc((B->cmap.n+1)*sizeof(PetscScalar),&b->dvalue);
639: PetscLogObjectMemory(B,(nda*(bs+2))*sizeof(PetscInt) + bs*nda*sizeof(PetscScalar)
640: + nda*sizeof(PetscScalar*) + sizeof(Mat_SeqBDiag)
641: + sizeof(struct _p_Mat) + sizetot*sizeof(PetscScalar));
643: if (!b->user_alloc) {
644: for (i=0; i<nd; i++) {
645: PetscMalloc(bs*bs*b->bdlen[i]*sizeof(PetscScalar),&b->diagv[i]);
646: PetscMemzero(b->diagv[i],bs*bs*b->bdlen[i]*sizeof(PetscScalar));
647: }
648: b->nonew = 0; b->nonew_diag = 0;
649: } else { /* diagonals are set on input; don't allow dynamic allocation */
650: b->nonew = 1; b->nonew_diag = 1;
651: }
653: /* adjust diagv so one may access rows with diagv[diag][row] for all rows */
654: for (i=0; i<nd; i++) {
655: if (diag[i] > 0) {
656: b->diagv[i] -= bs*bs*diag[i];
657: }
658: }
660: b->nz = b->maxnz; /* Currently not keeping track of exact count */
661: b->roworiented = PETSC_TRUE;
662: B->info.nz_unneeded = (double)b->maxnz;
663: return(0);
664: }
669: static PetscErrorCode MatDuplicate_SeqBDiag(Mat A,MatDuplicateOption cpvalues,Mat *matout)
670: {
671: Mat_SeqBDiag *newmat,*a = (Mat_SeqBDiag*)A->data;
673: PetscInt i,len,diag,bs = A->rmap.bs;
674: Mat mat;
677: MatCreate(A->comm,matout);
678: MatSetSizes(*matout,A->rmap.N,A->cmap.n,A->rmap.N,A->cmap.n);
679: MatSetType(*matout,A->type_name);
680: MatSeqBDiagSetPreallocation(*matout,a->nd,bs,a->diag,PETSC_NULL);
682: /* Copy contents of diagonals */
683: mat = *matout;
684: newmat = (Mat_SeqBDiag*)mat->data;
685: if (cpvalues == MAT_COPY_VALUES) {
686: for (i=0; i<a->nd; i++) {
687: len = a->bdlen[i] * bs * bs * sizeof(PetscScalar);
688: diag = a->diag[i];
689: if (diag > 0) {
690: PetscMemcpy(newmat->diagv[i]+bs*bs*diag,a->diagv[i]+bs*bs*diag,len);
691: } else {
692: PetscMemcpy(newmat->diagv[i],a->diagv[i],len);
693: }
694: }
695: }
696: MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);
697: MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);
698: return(0);
699: }
703: PetscErrorCode MatLoad_SeqBDiag(PetscViewer viewer, MatType type,Mat *A)
704: {
705: Mat B;
707: PetscMPIInt size;
708: int fd;
709: PetscInt *scols,i,nz,header[4],nd = 128;
710: PetscInt bs,*rowlengths = 0,M,N,*cols,extra_rows,*diag = 0;
711: PetscInt idiag[128];
712: PetscScalar *vals,*svals;
713: MPI_Comm comm;
714: PetscTruth flg;
715:
717: PetscObjectGetComm((PetscObject)viewer,&comm);
718: MPI_Comm_size(comm,&size);
719: if (size > 1) SETERRQ(PETSC_ERR_ARG_SIZ,"view must have one processor");
720: PetscViewerBinaryGetDescriptor(viewer,&fd);
721: PetscBinaryRead(fd,header,4,PETSC_INT);
722: if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Not matrix object");
723: M = header[1]; N = header[2]; nz = header[3];
724: if (M != N) SETERRQ(PETSC_ERR_SUP,"Can only load square matrices");
725: if (header[3] < 0) {
726: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix stored in special format, cannot load as SeqBDiag");
727: }
729: /*
730: This code adds extra rows to make sure the number of rows is
731: divisible by the blocksize
732: */
733: bs = 1;
734: PetscOptionsGetInt(PETSC_NULL,"-matload_block_size",&bs,PETSC_NULL);
735: extra_rows = bs - M + bs*(M/bs);
736: if (extra_rows == bs) extra_rows = 0;
737: if (extra_rows) {
738: PetscInfo(0,"Padding loaded matrix to match blocksize\n");
739: }
741: /* read row lengths */
742: PetscMalloc((M+extra_rows)*sizeof(PetscInt),&rowlengths);
743: PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
744: for (i=0; i<extra_rows; i++) rowlengths[M+i] = 1;
746: /* load information about diagonals */
747: PetscOptionsGetIntArray(PETSC_NULL,"-matload_bdiag_diags",idiag,&nd,&flg);
748: if (flg) {
749: diag = idiag;
750: }
752: /* create our matrix */
753: MatCreate(comm,A);
754: MatSetSizes(*A,M+extra_rows,M+extra_rows,M+extra_rows,M+extra_rows);
755: MatSetType(*A,type);
756: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,PETSC_NULL);
757: B = *A;
759: /* read column indices and nonzeros */
760: PetscMalloc(nz*sizeof(PetscInt),&scols);
761: cols = scols;
762: PetscBinaryRead(fd,cols,nz,PETSC_INT);
763: PetscMalloc(nz*sizeof(PetscScalar),&svals);
764: vals = svals;
765: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
766: /* insert into matrix */
768: for (i=0; i<M; i++) {
769: MatSetValues(B,1,&i,rowlengths[i],scols,svals,INSERT_VALUES);
770: scols += rowlengths[i]; svals += rowlengths[i];
771: }
772: vals[0] = 1.0;
773: for (i=M; i<M+extra_rows; i++) {
774: MatSetValues(B,1,&i,1,&i,vals,INSERT_VALUES);
775: }
777: PetscFree(cols);
778: PetscFree(vals);
779: PetscFree(rowlengths);
781: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
782: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
783: return(0);
784: }
786: /*MC
787: MATSEQBDIAG - MATSEQBDIAG = "seqbdiag" - A matrix type to be used for sequential block diagonal matrices.
789: Options Database Keys:
790: . -mat_type seqbdiag - sets the matrix type to "seqbdiag" during a call to MatSetFromOptions()
792: Level: beginner
794: .seealso: MatCreateSeqBDiag
795: M*/
800: PetscErrorCode MatCreate_SeqBDiag(Mat B)
801: {
802: Mat_SeqBDiag *b;
804: PetscMPIInt size;
807: MPI_Comm_size(B->comm,&size);
808: if (size > 1) SETERRQ(PETSC_ERR_ARG_WRONG,"Comm must be of size 1");
811: PetscNew(Mat_SeqBDiag,&b);
812: B->data = (void*)b;
813: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
814: B->factor = 0;
815: B->mapping = 0;
817: b->ndim = 0;
818: b->mainbd = -1;
819: b->pivot = 0;
821: b->roworiented = PETSC_TRUE;
822: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatSeqBDiagSetPreallocation_C",
823: "MatSeqBDiagSetPreallocation_SeqBDiag",
824: MatSeqBDiagSetPreallocation_SeqBDiag);
826: PetscObjectChangeTypeName((PetscObject)B,MATSEQBDIAG);
827: return(0);
828: }
833: /*@C
834: MatCreateSeqBDiag - Creates a sequential block diagonal matrix.
836: Collective on MPI_Comm
838: Input Parameters:
839: + comm - MPI communicator, set to PETSC_COMM_SELF
840: . m - number of rows
841: . n - number of columns
842: . nd - number of block diagonals (optional)
843: . bs - each element of a diagonal is an bs x bs dense matrix
844: . diag - optional array of block diagonal numbers (length nd).
845: For a matrix element A[i,j], where i=row and j=column, the
846: diagonal number is
847: $ diag = i/bs - j/bs (integer division)
848: Set diag=PETSC_NULL on input for PETSc to dynamically allocate memory as
849: needed (expensive).
850: - diagv - pointer to actual diagonals (in same order as diag array),
851: if allocated by user. Otherwise, set diagv=PETSC_NULL on input for PETSc
852: to control memory allocation.
854: Output Parameters:
855: . A - the matrix
857: Options Database Keys:
858: . -mat_block_size <bs> - Sets blocksize
859: . -mat_bdiag_diags <s1,s2,s3,...> - Sets diagonal numbers
861: Notes:
862: See the users manual for further details regarding this storage format.
864: Fortran Note:
865: Fortran programmers cannot set diagv; this value is ignored.
867: Level: intermediate
869: .keywords: matrix, block, diagonal, sparse
871: .seealso: MatCreate(), MatCreateMPIBDiag(), MatSetValues()
872: @*/
873: PetscErrorCode MatCreateSeqBDiag(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt nd,PetscInt bs,const PetscInt diag[],PetscScalar *diagv[],Mat *A)
874: {
878: MatCreate(comm,A);
879: MatSetSizes(*A,m,n,m,n);
880: MatSetType(*A,MATSEQBDIAG);
881: MatSeqBDiagSetPreallocation(*A,nd,bs,diag,diagv);
882: return(0);
883: }