Actual source code: ex73.c
2: static char help[] = "Reads a PETSc matrix from a file partitions it\n\n";
4: /*T
5: Concepts: partitioning
6: Processors: n
7: T*/
9: /*
10: Include "petscmat.h" so that we can use matrices. Note that this file
11: automatically includes:
12: petscsys.h - base PETSc routines petscvec.h - vectors
13: petscmat.h - matrices
14: petscis.h - index sets
15: petscviewer.h - viewers
17: Example of usage:
18: mpiexec -n 3 ex73 -f <matfile> -mat_partitioning_type parmetis/scotch -viewer_binary_skip_info -nox
19: */
20: #include <petscmat.h>
22: int main(int argc,char **args)
23: {
24: MatType mtype = MATMPIAIJ; /* matrix format */
25: Mat A,B; /* matrix */
26: PetscViewer fd; /* viewer */
27: char file[PETSC_MAX_PATH_LEN]; /* input file name */
28: PetscBool flg,viewMats,viewIS,viewVecs,useND,noVecLoad = PETSC_FALSE;
29: PetscInt ierr,*nlocal,m,n;
30: PetscMPIInt rank,size;
31: MatPartitioning part;
32: IS is,isn;
33: Vec xin, xout;
34: VecScatter scat;
36: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
37: MPI_Comm_size(PETSC_COMM_WORLD,&size);
38: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
39: PetscOptionsHasName(NULL,NULL, "-view_mats", &viewMats);
40: PetscOptionsHasName(NULL,NULL, "-view_is", &viewIS);
41: PetscOptionsHasName(NULL,NULL, "-view_vecs", &viewVecs);
42: PetscOptionsHasName(NULL,NULL, "-use_nd", &useND);
43: PetscOptionsHasName(NULL,NULL, "-novec_load", &noVecLoad);
45: /*
46: Determine file from which we read the matrix
47: */
48: PetscOptionsGetString(NULL,NULL,"-f",file,sizeof(file),&flg);
50: /*
51: Open binary file. Note that we use FILE_MODE_READ to indicate
52: reading from this file.
53: */
54: PetscViewerBinaryOpen(PETSC_COMM_WORLD,file,FILE_MODE_READ,&fd);
56: /*
57: Load the matrix and vector; then destroy the viewer.
58: */
59: MatCreate(PETSC_COMM_WORLD,&A);
60: MatSetType(A,mtype);
61: MatLoad(A,fd);
62: if (!noVecLoad) {
63: VecCreate(PETSC_COMM_WORLD,&xin);
64: VecLoad(xin,fd);
65: } else {
66: MatCreateVecs(A,&xin,NULL);
67: VecSetRandom(xin,NULL);
68: }
69: PetscViewerDestroy(&fd);
70: if (viewMats) {
71: PetscPrintf(PETSC_COMM_WORLD,"Original matrix:\n");
72: MatView(A,PETSC_VIEWER_DRAW_WORLD);
73: }
74: if (viewVecs) {
75: PetscPrintf(PETSC_COMM_WORLD,"Original vector:\n");
76: VecView(xin,PETSC_VIEWER_STDOUT_WORLD);
77: }
79: /* Partition the graph of the matrix */
80: MatPartitioningCreate(PETSC_COMM_WORLD,&part);
81: MatPartitioningSetAdjacency(part,A);
82: MatPartitioningSetFromOptions(part);
84: /* get new processor owner number of each vertex */
85: if (useND) {
86: MatPartitioningApplyND(part,&is);
87: } else {
88: MatPartitioningApply(part,&is);
89: }
90: if (viewIS) {
91: PetscPrintf(PETSC_COMM_WORLD,"IS1 - new processor ownership:\n");
92: ISView(is,PETSC_VIEWER_STDOUT_WORLD);
93: }
95: /* get new global number of each old global number */
96: ISPartitioningToNumbering(is,&isn);
97: if (viewIS) {
98: PetscPrintf(PETSC_COMM_WORLD,"IS2 - new global numbering:\n");
99: ISView(isn,PETSC_VIEWER_STDOUT_WORLD);
100: }
102: /* get number of new vertices for each processor */
103: PetscMalloc1(size,&nlocal);
104: ISPartitioningCount(is,size,nlocal);
105: ISDestroy(&is);
107: /* get old global number of each new global number */
108: ISInvertPermutation(isn,useND ? PETSC_DECIDE : nlocal[rank],&is);
109: if (viewIS) {
110: PetscPrintf(PETSC_COMM_WORLD,"IS3=inv(IS2) - old global number of each new global number:\n");
111: ISView(is,PETSC_VIEWER_STDOUT_WORLD);
112: }
114: /* move the matrix rows to the new processes they have been assigned to by the permutation */
115: MatCreateSubMatrix(A,is,is,MAT_INITIAL_MATRIX,&B);
116: PetscFree(nlocal);
117: ISDestroy(&isn);
118: MatDestroy(&A);
119: MatPartitioningDestroy(&part);
120: if (viewMats) {
121: PetscPrintf(PETSC_COMM_WORLD,"Partitioned matrix:\n");
122: MatView(B,PETSC_VIEWER_DRAW_WORLD);
123: }
125: /* move the vector rows to the new processes they have been assigned to */
126: MatGetLocalSize(B,&m,&n);
127: VecCreateMPI(PETSC_COMM_WORLD,m,PETSC_DECIDE,&xout);
128: VecScatterCreate(xin,is,xout,NULL,&scat);
129: VecScatterBegin(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD);
130: VecScatterEnd(scat,xin,xout,INSERT_VALUES,SCATTER_FORWARD);
131: VecScatterDestroy(&scat);
132: if (viewVecs) {
133: PetscPrintf(PETSC_COMM_WORLD,"Mapped vector:\n");
134: VecView(xout,PETSC_VIEWER_STDOUT_WORLD);
135: }
136: VecDestroy(&xout);
137: ISDestroy(&is);
139: {
140: PetscInt rstart,i,*nzd,*nzo,nzl,nzmax = 0,*ncols,nrow,j;
141: Mat J;
142: const PetscInt *cols;
143: const PetscScalar *vals;
144: PetscScalar *nvals;
146: MatGetOwnershipRange(B,&rstart,NULL);
147: PetscCalloc2(2*m,&nzd,2*m,&nzo);
148: for (i=0; i<m; i++) {
149: MatGetRow(B,i+rstart,&nzl,&cols,NULL);
150: for (j=0; j<nzl; j++) {
151: if (cols[j] >= rstart && cols[j] < rstart+n) {
152: nzd[2*i] += 2;
153: nzd[2*i+1] += 2;
154: } else {
155: nzo[2*i] += 2;
156: nzo[2*i+1] += 2;
157: }
158: }
159: nzmax = PetscMax(nzmax,nzd[2*i]+nzo[2*i]);
160: MatRestoreRow(B,i+rstart,&nzl,&cols,NULL);
161: }
162: MatCreateAIJ(PETSC_COMM_WORLD,2*m,2*m,PETSC_DECIDE,PETSC_DECIDE,0,nzd,0,nzo,&J);
163: PetscInfo(0,"Created empty Jacobian matrix\n");
164: PetscFree2(nzd,nzo);
165: PetscMalloc2(nzmax,&ncols,nzmax,&nvals);
166: PetscArrayzero(nvals,nzmax);
167: for (i=0; i<m; i++) {
168: MatGetRow(B,i+rstart,&nzl,&cols,&vals);
169: for (j=0; j<nzl; j++) {
170: ncols[2*j] = 2*cols[j];
171: ncols[2*j+1] = 2*cols[j]+1;
172: }
173: nrow = 2*(i+rstart);
174: MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES);
175: nrow = 2*(i+rstart) + 1;
176: MatSetValues(J,1,&nrow,2*nzl,ncols,nvals,INSERT_VALUES);
177: MatRestoreRow(B,i+rstart,&nzl,&cols,&vals);
178: }
179: MatAssemblyBegin(J,MAT_FINAL_ASSEMBLY);
180: MatAssemblyEnd(J,MAT_FINAL_ASSEMBLY);
181: if (viewMats) {
182: PetscPrintf(PETSC_COMM_WORLD,"Jacobian matrix structure:\n");
183: MatView(J,PETSC_VIEWER_DRAW_WORLD);
184: }
185: MatDestroy(&J);
186: PetscFree2(ncols,nvals);
187: }
189: /*
190: Free work space. All PETSc objects should be destroyed when they
191: are no longer needed.
192: */
193: MatDestroy(&B);
194: VecDestroy(&xin);
195: PetscFinalize();
196: return ierr;
197: }
199: /*TEST
201: test:
202: nsize: 3
203: requires: parmetis datafilespath !complex double !defined(PETSC_USE_64BIT_INDICES)
204: args: -nox -f ${DATAFILESPATH}/matrices/arco1 -mat_partitioning_type parmetis -viewer_binary_skip_info -novec_load
206: test:
207: requires: parmetis !complex double !defined(PETSC_USE_64BIT_INDICES)
208: output_file: output/ex73_1.out
209: suffix: parmetis_nd_32
210: nsize: 3
211: args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load
213: test:
214: requires: parmetis !complex double defined(PETSC_USE_64BIT_INDICES)
215: output_file: output/ex73_1.out
216: suffix: parmetis_nd_64
217: nsize: 3
218: args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type parmetis -viewer_binary_skip_info -use_nd -novec_load
220: test:
221: requires: ptscotch !complex double !defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
222: output_file: output/ex73_1.out
223: suffix: ptscotch_nd_32
224: nsize: 4
225: args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int32-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load
227: test:
228: requires: ptscotch !complex double defined(PETSC_USE_64BIT_INDICES) defined(PETSC_HAVE_SCOTCH_PARMETIS_V3_NODEND)
229: output_file: output/ex73_1.out
230: suffix: ptscotch_nd_64
231: nsize: 4
232: args: -nox -f ${wPETSC_DIR}/share/petsc/datafiles/matrices/spd-real-int64-float64 -mat_partitioning_type ptscotch -viewer_binary_skip_info -use_nd -novec_load
234: TEST*/