2
! "$Id: ex6f.F,v 1.36 2001/08/07 03:04:00 balay Exp $";
4
! Description: This example demonstrates repeated linear solves as
5
! well as the use of different preconditioner and linear system
6
! matrices. This example also illustrates how to save PETSc objects
10
! Concepts: SLES^repeatedly solving linear systems;
11
! Concepts: SLES^different matrices for linear system and preconditioner;
15
! The following include statements are required for SLES Fortran programs:
16
! petsc.h - base PETSc routines
17
! petscvec.h - vectors
18
! petscmat.h - matrices
19
! petscpc.h - preconditioners
20
! petscksp.h - Krylov subspace methods
21
! petscsles.h - SLES interface
22
! Other include statements may be needed if using additional PETSc
23
! routines in a Fortran program, e.g.,
24
! petscviewer.h - viewers
25
! petscis.h - index sets
28
#include "include/finclude/petsc.h"
29
#include "include/finclude/petscvec.h"
30
#include "include/finclude/petscmat.h"
31
#include "include/finclude/petscsles.h"
32
#include "include/finclude/petscpc.h"
33
#include "include/finclude/petscksp.h"
37
! A - matrix that defines linear system
40
! x, b, u - approx solution, RHS, exact solution vectors
45
integer i,j,II,JJ,ierr,m,n
46
integer Istart,Iend,flg,nsteps
49
call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
53
call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
54
call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
55
call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-nsteps',nsteps, &
58
! Create parallel matrix, specifying only its global dimensions.
59
! When using MatCreate(), the matrix format can be specified at
60
! runtime. Also, the parallel partitioning of the matrix is
61
! determined by PETSc at runtime.
63
call MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n, &
65
call MatSetFromOptions(A,ierr)
67
! The matrix is partitioned by contiguous chunks of rows across the
68
! processors. Determine which rows of the matrix are locally owned.
70
call MatGetOwnershipRange(A,Istart,Iend,ierr)
72
! Set matrix elements.
73
! - Each processor needs to insert only elements that it owns
74
! locally (but any non-local elements will be sent to the
75
! appropriate processor during matrix assembly).
76
! - Always specify global rows and columns of matrix entries.
78
do 10, II=Istart,Iend-1
84
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
88
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
92
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
96
call MatSetValues(A,1,II,1,JJ,v,ADD_VALUES,ierr)
99
call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
102
! Assemble matrix, using the 2-step process:
103
! MatAssemblyBegin(), MatAssemblyEnd()
104
! Computations can be done while messages are in transition
105
! by placing code between these two statements.
107
call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
108
call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
110
! Create parallel vectors.
111
! - When using VecCreate(), the parallel partitioning of the vector
112
! is determined by PETSc at runtime.
113
! - Note: We form 1 vector from scratch and then duplicate as needed.
115
call VecCreate(PETSC_COMM_WORLD,u,ierr)
116
call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
117
call VecSetFromOptions(u,ierr)
118
call VecDuplicate(u,b,ierr)
119
call VecDuplicate(b,x,ierr)
121
! Create linear solver context
123
call SLESCreate(PETSC_COMM_WORLD,sles,ierr)
125
! Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
127
call SLESSetFromOptions(sles,ierr)
129
! Solve several linear systems in succession
132
call solve1(sles,A,x,b,u,i,nsteps,ierr)
135
! Free work space. All PETSc objects should be destroyed when they
136
! are no longer needed.
138
call VecDestroy(u,ierr)
139
call VecDestroy(x,ierr)
140
call VecDestroy(b,ierr)
141
call MatDestroy(A,ierr)
142
call SLESDestroy(sles,ierr)
144
call PetscFinalize(ierr)
147
! -----------------------------------------------------------------------
149
subroutine solve1(sles,A,x,b,u,count,nsteps,ierr)
151
#include "include/finclude/petsc.h"
152
#include "include/finclude/petscvec.h"
153
#include "include/finclude/petscmat.h"
154
#include "include/finclude/petscsles.h"
155
#include "include/finclude/petscpc.h"
156
#include "include/finclude/petscksp.h"
159
! solve1 - This routine is used for repeated linear system solves.
160
! We update the linear system matrix each time, but retain the same
161
! preconditioning matrix for all linear solves.
163
! A - linear system matrix
164
! A2 - preconditioning matrix
167
integer II,ierr,Istart,Iend,count,nsteps,its
173
! Use common block to retain matrix between successive subroutine calls
176
common /my_data/ A2,pflag,rank
178
! First time thorough: Create new matrix to define the linear system
179
if (count .eq. 1) then
180
call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
182
call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mat_view', &
184
if (pflag .ne. 0) then
185
if (rank .eq. 0) write(6,100)
187
call MatConvert(A,MATSAME,A2,ierr)
188
! All other times: Set previous solution as initial guess for next solve.
190
call SLESGetKSP(sles,ksp,ierr)
191
call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
194
! Alter the matrix A a bit
195
call MatGetOwnershipRange(A,Istart,Iend,ierr)
196
do 20, II=Istart,Iend-1
198
call MatSetValues(A,1,II,1,II,v,ADD_VALUES,ierr)
200
call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
201
if (pflag .ne. 0) then
202
if (rank .eq. 0) write(6,110)
204
call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
206
! Set the exact solution; compute the right-hand-side vector
208
call VecSet(val,u,ierr)
209
call MatMult(A,u,b,ierr)
211
! Set operators, keeping the identical preconditioner matrix for
212
! all linear solves. This approach is often effective when the
213
! linear systems do not change very much between successive steps.
214
call SLESSetOperators(sles,A,A2,SAME_PRECONDITIONER,ierr)
216
! Solve linear system
217
call SLESSolve(sles,b,x,its,ierr)
219
! Destroy the preconditioner matrix on the last time through
220
if (count .eq. nsteps) call MatDestroy(A2,ierr)
222
100 format('previous matrix: preconditioning')
223
110 format('next matrix: defines linear system')