2
! "$Id: ex6f.F,v 1.36 2001/08/07 03:04:00 balay Exp $";
4
! Description: This example demonstrates repeated linear solves as
5
! well as the use of different preconditioner and linear system
6
! matrices. This example also illustrates how to save PETSc objects
10
! Concepts: KSP^repeatedly solving linear systems;
11
! Concepts: KSP^different matrices for linear system and preconditioner;
15
! The following include statements are required for KSP Fortran programs:
16
! petsc.h - base PETSc routines
17
! petscvec.h - vectors
18
! petscmat.h - matrices
19
! petscpc.h - preconditioners
20
! petscksp.h - Krylov subspace methods
21
! Other include statements may be needed if using additional PETSc
22
! routines in a Fortran program, e.g.,
23
! petscviewer.h - viewers
24
! petscis.h - index sets
27
#include "include/finclude/petsc.h"
28
#include "include/finclude/petscvec.h"
29
#include "include/finclude/petscmat.h"
30
#include "include/finclude/petscpc.h"
31
#include "include/finclude/petscksp.h"
35
! A - matrix that defines linear system
38
! x, b, u - approx solution, RHS, exact solution vectors
43
integer i,j,II,JJ,ierr,m,n
44
integer Istart,Iend,flg,nsteps
47
call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
51
call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
52
call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
53
call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-nsteps',nsteps, &
56
! Create parallel matrix, specifying only its global dimensions.
57
! When using MatCreate(), the matrix format can be specified at
58
! runtime. Also, the parallel partitioning of the matrix is
59
! determined by PETSc at runtime.
61
call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n, &
63
call MatSetFromOptions(A,ierr)
65
! The matrix is partitioned by contiguous chunks of rows across the
66
! processors. Determine which rows of the matrix are locally owned.
68
call MatGetOwnershipRange(A,Istart,Iend,ierr)
70
! Set matrix elements.
71
! - Each processor needs to insert only elements that it owns
72
! locally (but any non-local elements will be sent to the
73
! appropriate processor during matrix assembly).
74
! - Always specify global rows and columns of matrix entries.
76
do 10, II=Istart,Iend-1
82
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
86
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
90
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
94
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
97
call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
100
! Assemble matrix, using the 2-step process:
101
! MatAssemblyBegin(), MatAssemblyEnd()
102
! Computations can be done while messages are in transition
103
! by placing code between these two statements.
105
call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
106
call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
108
! Create parallel vectors.
109
! - When using VecCreate(), the parallel partitioning of the vector
110
! is determined by PETSc at runtime.
111
! - Note: We form 1 vector from scratch and then duplicate as needed.
113
call VecCreate(PETSC_COMM_WORLD,u,ierr)
114
call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
115
call VecSetFromOptions(u,ierr)
116
call VecDuplicate(u,b,ierr)
117
call VecDuplicate(b,x,ierr)
119
! Create linear solver context
121
call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
123
! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
125
call KSPSetFromOptions(ksp,ierr)
127
! Solve several linear systems in succession
130
call solve1(ksp,A,x,b,u,i,nsteps,ierr)
133
! Free work space. All PETSc objects should be destroyed when they
134
! are no longer needed.
136
call VecDestroy(u,ierr)
137
call VecDestroy(x,ierr)
138
call VecDestroy(b,ierr)
139
call MatDestroy(A,ierr)
140
call KSPDestroy(ksp,ierr)
142
call PetscFinalize(ierr)
145
! -----------------------------------------------------------------------
147
subroutine solve1(ksp,A,x,b,u,count,nsteps,ierr)
149
#include "include/finclude/petsc.h"
150
#include "include/finclude/petscvec.h"
151
#include "include/finclude/petscmat.h"
152
#include "include/finclude/petscpc.h"
153
#include "include/finclude/petscksp.h"
156
! solve1 - This routine is used for repeated linear system solves.
157
! We update the linear system matrix each time, but retain the same
158
! preconditioning matrix for all linear solves.
160
! A - linear system matrix
161
! A2 - preconditioning matrix
164
integer II,ierr,Istart,Iend,count,nsteps
169
! Use common block to retain matrix between successive subroutine calls
172
common /my_data/ A2,pflag,rank
174
! First time thorough: Create new matrix to define the linear system
175
if (count .eq. 1) then
176
call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
178
call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mat_view', &
180
if (pflag .ne. 0) then
181
if (rank .eq. 0) write(6,100)
183
call MatConvert(A,MATSAME,A2,ierr)
184
! All other times: Set previous solution as initial guess for next solve.
186
call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
189
! Alter the matrix A a bit
190
call MatGetOwnershipRange(A,Istart,Iend,ierr)
191
do 20, II=Istart,Iend-1
193
call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
195
call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
196
if (pflag .ne. 0) then
197
if (rank .eq. 0) write(6,110)
199
call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
201
! Set the exact solution; compute the right-hand-side vector
203
call VecSet(val,u,ierr)
204
call MatMult(A,u,b,ierr)
206
! Set operators, keeping the identical preconditioner matrix for
207
! all linear solves. This approach is often effective when the
208
! linear systems do not change very much between successive steps.
209
call KSPSetOperators(ksp,A,A2,SAME_PRECONDITIONER,ierr)
211
! Solve linear system
212
call KSPSetRhs(ksp,b,ierr)
213
call KSPSetSolution(ksp,x,ierr)
214
call KSPSolve(ksp,ierr)
216
! Destroy the preconditioner matrix on the last time through
217
if (count .eq. nsteps) call MatDestroy(A2,ierr)
219
100 format('previous matrix: preconditioning')
220
110 format('next matrix: defines linear system')