Actual source code: ex6f.F
1: !
2: ! Description: This example demonstrates repeated linear solves as
3: ! well as the use of different preconditioner and linear system
4: ! matrices. This example also illustrates how to save PETSc objects
5: ! in common blocks.
6: !
7: !/*T
8: ! Concepts: KSP^repeatedly solving linear systems;
9: ! Concepts: KSP^different matrices for linear system and preconditioner;
10: ! Processors: n
11: !T*/
12: !
13: ! The following include statements are required for KSP Fortran programs:
14: ! petsc.h - base PETSc routines
15: ! petscvec.h - vectors
16: ! petscmat.h - matrices
17: ! petscpc.h - preconditioners
18: ! petscksp.h - Krylov subspace methods
19: ! Other include statements may be needed if using additional PETSc
20: ! routines in a Fortran program, e.g.,
21: ! petscviewer.h - viewers
22: ! petscis.h - index sets
23: !
24: program main
25: #include include/finclude/petsc.h
26: #include include/finclude/petscvec.h
27: #include include/finclude/petscmat.h
28: #include include/finclude/petscpc.h
29: #include include/finclude/petscksp.h
31: ! Variables:
32: !
33: ! A - matrix that defines linear system
34: ! ksp - KSP context
35: ! ksp - KSP context
36: ! x, b, u - approx solution, RHS, exact solution vectors
37: !
38: Vec x,u,b
39: Mat A
40: KSP ksp
41: integer i,j,II,JJ,ierr,m,n
42: integer Istart,Iend,flg,nsteps
43: PetscScalar v
45: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
46: m = 3
47: n = 3
48: nsteps = 2
49: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
50: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
51: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-nsteps',nsteps, &
52: & flg,ierr)
54: ! Create parallel matrix, specifying only its global dimensions.
55: ! When using MatCreate(), the matrix format can be specified at
56: ! runtime. Also, the parallel partitioning of the matrix is
57: ! determined by PETSc at runtime.
59: call MatCreate(PETSC_COMM_WORLD,A,ierr)
60: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
61: call MatSetFromOptions(A,ierr)
63: ! The matrix is partitioned by contiguous chunks of rows across the
64: ! processors. Determine which rows of the matrix are locally owned.
66: call MatGetOwnershipRange(A,Istart,Iend,ierr)
68: ! Set matrix elements.
69: ! - Each processor needs to insert only elements that it owns
70: ! locally (but any non-local elements will be sent to the
71: ! appropriate processor during matrix assembly).
72: ! - Always specify global rows and columns of matrix entries.
74: do 10, II=Istart,Iend-1
75: v = -1.0
76: i = II/n
77: j = II - i*n
78: if (i.gt.0) then
79: JJ = II - n
80: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
81: endif
82: if (i.lt.m-1) then
83: JJ = II + n
84: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
85: endif
86: if (j.gt.0) then
87: JJ = II - 1
88: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
89: endif
90: if (j.lt.n-1) then
91: JJ = II + 1
92: call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
93: endif
94: v = 4.0
95: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
96: 10 continue
98: ! Assemble matrix, using the 2-step process:
99: ! MatAssemblyBegin(), MatAssemblyEnd()
100: ! Computations can be done while messages are in transition
101: ! by placing code between these two statements.
103: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
104: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
106: ! Create parallel vectors.
107: ! - When using VecCreate(), the parallel partitioning of the vector
108: ! is determined by PETSc at runtime.
109: ! - Note: We form 1 vector from scratch and then duplicate as needed.
111: call VecCreate(PETSC_COMM_WORLD,u,ierr)
112: call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
113: call VecSetFromOptions(u,ierr)
114: call VecDuplicate(u,b,ierr)
115: call VecDuplicate(b,x,ierr)
117: ! Create linear solver context
119: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
121: ! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
123: call KSPSetFromOptions(ksp,ierr)
125: ! Solve several linear systems in succession
127: do 100 i=1,nsteps
128: call solve1(ksp,A,x,b,u,i,nsteps,ierr)
129: 100 continue
131: ! Free work space. All PETSc objects should be destroyed when they
132: ! are no longer needed.
134: call VecDestroy(u,ierr)
135: call VecDestroy(x,ierr)
136: call VecDestroy(b,ierr)
137: call MatDestroy(A,ierr)
138: call KSPDestroy(ksp,ierr)
140: call PetscFinalize(ierr)
141: end
143: ! -----------------------------------------------------------------------
144: !
145: subroutine solve1(ksp,A,x,b,u,count,nsteps,ierr)
147: #include include/finclude/petsc.h
148: #include include/finclude/petscvec.h
149: #include include/finclude/petscmat.h
150: #include include/finclude/petscpc.h
151: #include include/finclude/petscksp.h
153: !
154: ! solve1 - This routine is used for repeated linear system solves.
155: ! We update the linear system matrix each time, but retain the same
156: ! preconditioning matrix for all linear solves.
157: !
158: ! A - linear system matrix
159: ! A2 - preconditioning matrix
160: !
161: PetscScalar v,val
162: integer II,ierr,Istart,Iend,count,nsteps
163: Mat A
164: KSP ksp
165: Vec x,b,u
167: ! Use common block to retain matrix between successive subroutine calls
168: Mat A2
169: integer rank,pflag
170: common /my_data/ A2,pflag,rank
172: ! First time thorough: Create new matrix to define the linear system
173: if (count .eq. 1) then
174: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
175: pflag = 0
176: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mat_view', &
177: & pflag,ierr)
178: if (pflag .ne. 0) then
179: if (rank .eq. 0) write(6,100)
180: endif
181: call MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr)
182: ! All other times: Set previous solution as initial guess for next solve.
183: else
184: call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
185: endif
187: ! Alter the matrix A a bit
188: call MatGetOwnershipRange(A,Istart,Iend,ierr)
189: do 20, II=Istart,Iend-1
190: v = 2.0
191: call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
192: 20 continue
193: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
194: if (pflag .ne. 0) then
195: if (rank .eq. 0) write(6,110)
196: endif
197: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
199: ! Set the exact solution; compute the right-hand-side vector
200: val = 1.0*count
201: call VecSet(u,val,ierr)
202: call MatMult(A,u,b,ierr)
204: ! Set operators, keeping the identical preconditioner matrix for
205: ! all linear solves. This approach is often effective when the
206: ! linear systems do not change very much between successive steps.
207: call KSPSetOperators(ksp,A,A2,SAME_PRECONDITIONER,ierr)
209: ! Solve linear system
210: call KSPSolve(ksp,b,x,ierr)
212: ! Destroy the preconditioner matrix on the last time through
213: if (count .eq. nsteps) call MatDestroy(A2,ierr)
215: 100 format('previous matrix: preconditioning')
216: 110 format('next matrix: defines linear system')
218: end