Actual source code: ex8f.F
1: !
2: ! Tests PCMGSetResidual
3: !
4: ! -----------------------------------------------------------------------
6: program main
7: implicit none
9: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
10: ! Include files
11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
12: !
13: !
14: #include include/finclude/petsc.h
15: #include include/finclude/petscvec.h
16: #include include/finclude/petscmat.h
17: #include include/finclude/petscpc.h
18: #include include/finclude/petscksp.h
19: #include include/finclude/petscmg.h
20: #include include/finclude/petscsys.h
21: !
22: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
23: ! Variable declarations
24: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
25: !
26: ! Variables:
27: ! ksp - linear solver context
28: ! x, b, u - approx solution, right-hand-side, exact solution vectors
29: ! A - matrix that defines linear system
30: ! its - iterations for convergence
31: ! norm - norm of error in solution
32: ! rctx - random number context
33: !
35: Mat A
36: Vec x,b,u
37: PC pc
38: PetscInt n,dim,istart,iend
39: PetscInt i,j,jj,ii,one,zero
40: PetscErrorCode ierr
41: double precision v,h2
42: external MyResidual
43: PetscScalar pfive
44: KSP ksp
46: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
47: ! Beginning of program
48: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
50: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
51: pfive = .5d0
52: n = 6
53: dim = n*n
54: one = 1
55: zero = 0
57: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
58: ! Compute the matrix and right-hand-side vector that define
59: ! the linear system, Ax = b.
60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
62: ! Create parallel matrix, specifying only its global dimensions.
63: ! When using MatCreate(), the matrix format can be specified at
64: ! runtime. Also, the parallel partitioning of the matrix is
65: ! determined by PETSc at runtime.
67: call MatCreate(PETSC_COMM_WORLD,A,ierr)
68: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
69: call MatSetFromOptions(A,ierr)
71: ! Currently, all PETSc parallel matrix formats are partitioned by
72: ! contiguous chunks of rows across the processors. Determine which
73: ! rows of the matrix are locally owned.
75: call MatGetOwnershipRange(A,Istart,Iend,ierr)
77: ! Set matrix elements in parallel.
78: ! - Each processor needs to insert only elements that it owns
79: ! locally (but any non-local elements will be sent to the
80: ! appropriate processor during matrix assembly).
81: ! - Always specify global rows and columns of matrix entries.
83: h2 = 1.0/((n+1)*(n+1))
85: do 10, II=Istart,Iend-1
86: v = -1.0
87: i = II/n
88: j = II - i*n
89: if (i.gt.0) then
90: JJ = II - n
91: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
92: endif
93: if (i.lt.n-1) then
94: JJ = II + n
95: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
96: endif
97: if (j.gt.0) then
98: JJ = II - 1
99: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
100: endif
101: if (j.lt.n-1) then
102: JJ = II + 1
103: call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
104: endif
105: v = 4.0
106: call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
107: 10 continue
109: ! Assemble matrix, using the 2-step process:
110: ! MatAssemblyBegin(), MatAssemblyEnd()
111: ! Computations can be done while messages are in transition
112: ! by placing code between these two statements.
114: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
115: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
117: ! Create parallel vectors.
118: ! - Here, the parallel partitioning of the vector is determined by
119: ! PETSc at runtime. We could also specify the local dimensions
120: ! if desired.
121: ! - Note: We form 1 vector from scratch and then duplicate as needed.
123: call VecCreate(PETSC_COMM_WORLD,u,ierr)
124: call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
125: call VecSetFromOptions(u,ierr)
126: call VecDuplicate(u,b,ierr)
127: call VecDuplicate(b,x,ierr)
129: ! Set exact solution; then compute right-hand-side vector.
131: call VecSet(u,pfive,ierr)
132: call MatMult(A,u,b,ierr)
134: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: ! Create the linear solver and set various options
136: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138: ! Create linear solver context
140: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
141: call KSPGetPC(ksp,pc,ierr)
142: call PCSetType(pc,PCMG,ierr)
143: call PCMGSetLevels(pc,one,PETSC_NULL_OBJECT,ierr)
144: call PCMGSetResidual(pc,zero,PCMGDefaultResidual,A,ierr)
146: call PCMGSetResidual(pc,zero,MyResidual,A,ierr)
148: ! Set operators. Here the matrix that defines the linear system
149: ! also serves as the preconditioning matrix.
151: call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)
154: call KSPDestroy(ksp,ierr)
155: call VecDestroy(u,ierr)
156: call VecDestroy(x,ierr)
157: call VecDestroy(b,ierr)
158: call MatDestroy(A,ierr)
160: call PetscFinalize(ierr)
161: end
163: subroutine MyResidual(A,b,x,r,ierr)
164: Mat A
165: Vec b,x,r
166: integer ierr
167: return
168: end