2
#include "EpetraExt_config.h"
3
#if defined(HAVE_PETSC) && defined(HAVE_ML_EPETRA) && defined(HAVE_ML_TEUCHOS) && defined(HAVE_ML_AZTECOO) && defined(HAVE_MPI)
5
#include "EpetraExt_PETScAIJMatrix.h"
6
#include "Epetra_Vector.h"
7
#include "Epetra_Map.h"
8
#include "ml_MultiLevelPreconditioner.h"
10
#include "Epetra_LinearProblem.h"
13
This example demonstrates how to apply a Trilinos preconditioner to a PETSc
16
For information on configuring and building Trilinos with the PETSc aij
17
interface enabled, please see EpetraExt's doxygen documentation at
18
http://trilinos.sandia.gov/packages/epetraext, development version
19
or release 9.0 or later.
21
The PETSc matrix is a 2D 5-point Laplace operator stored in AIJ format.
22
This matrix is wrapped as an Epetra_PETScAIJMatrix, and an ML AMG
23
preconditioner is created for it. The associated linear system is solved twice,
24
the first time using AztecOO's preconditioned CG, the second time using PETSc's.
26
To invoke this example, use something like:
28
mpirun -np 5 ./TrilinosCouplings_petsc.exe -m 150 -n 150 -petsc_smoother -ksp_monitor_true_residual
31
static char help[] = "Demonstrates how to solve a PETSc linear system with KSP\
32
and a Trilinos AMG preconditioner. In particular, it shows how to wrap a PETSc\
33
AIJ matrix as an Epetra matrix, create the AMG preconditioner, and wrap it as a\
34
shell preconditioner for a PETSc Krylov method.\
35
Input parameters include:\n\
36
-random_exact_sol : use a random exact solution vector\n\
37
-view_exact_sol : write exact solution vector to stdout\n\
38
-m <mesh_x> : number of mesh points in x-direction\n\
39
-n <mesh_n> : number of mesh points in y-direction\n\n";
41
extern PetscErrorCode ShellApplyML(void*,Vec,Vec);
44
#define __FUNCT__ "main"
45
int main(int argc,char **args)
47
Vec x,b,u; /* approx solution, RHS, exact solution */
48
Mat A; /* linear system matrix */
49
KSP ksp; /* linear solver context */
50
KSP kspSmoother=0; /* solver context for PETSc fine grid smoother */
52
PetscRandom rctx; /* random number generator context */
53
PetscReal norm; /* norm of solution error */
54
PetscInt i,j,Ii,J,Istart,Iend,its;
55
PetscInt m = 50,n = 50; /* #mesh points in x & y directions, resp. */
58
PetscScalar v,one = 1.0,neg_one = -1.0;
62
PetscInitialize(&argc,&args,(char *)0,help);
63
ierr = PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);CHKERRQ(ierr);
64
ierr = PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);CHKERRQ(ierr);
66
ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr);
67
ierr = MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);CHKERRQ(ierr);
68
ierr = MatSetType(A, MATAIJ);CHKERRQ(ierr);
69
ierr = MatSetFromOptions(A);CHKERRQ(ierr);
70
ierr = MatMPIAIJSetPreallocation(A,5,PETSC_NULL,5,PETSC_NULL);CHKERRQ(ierr);
71
PetscObjectGetComm( (PetscObject)A, &comm);
72
ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
73
if (!rank) printf("Matrix has %d (%dx%d) rows\n",m*n,m,n);
75
ierr = MatGetOwnershipRange(A,&Istart,&Iend);CHKERRQ(ierr);
77
for (Ii=Istart; Ii<Iend; Ii++) {
78
v = -1.0; i = Ii/n; j = Ii - i*n;
79
if (i>0) {J = Ii - n; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
80
if (i<m-1) {J = Ii + n; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
81
if (j>0) {J = Ii - 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
82
if (j<n-1) {J = Ii + 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);CHKERRQ(ierr);}
83
v = 4.0; ierr = MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);CHKERRQ(ierr);
86
ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
87
ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
89
/* Wrap the PETSc matrix as an Epetra_PETScAIJMatrix. This is lightweight,
90
i.e., no deep data copies. */
91
Epetra_PETScAIJMatrix epA(A);
93
ierr = VecCreate(PETSC_COMM_WORLD,&u);CHKERRQ(ierr);
94
ierr = VecSetSizes(u,PETSC_DECIDE,m*n);CHKERRQ(ierr);
95
ierr = VecSetFromOptions(u);CHKERRQ(ierr);
96
ierr = VecDuplicate(u,&b);CHKERRQ(ierr);
98
ierr = VecDuplicate(b,&x);CHKERRQ(ierr);
100
ierr = PetscOptionsHasName(PETSC_NULL,"-random_exact_sol",&flg);CHKERRQ(ierr);
102
ierr = PetscRandomCreate(PETSC_COMM_WORLD,&rctx);CHKERRQ(ierr);
103
ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr);
104
ierr = VecSetRandom(u,rctx);CHKERRQ(ierr);
105
ierr = PetscRandomDestroy(rctx);CHKERRQ(ierr);
107
ierr = VecSet(u,one);CHKERRQ(ierr);
109
ierr = MatMult(A,u,b);CHKERRQ(ierr);
110
ierr = VecNorm(b,NORM_2,&norm);CHKERRQ(ierr);
111
if (rank==0) printf("||b|| = %f\n",norm);
113
/* Copy the PETSc vectors to Epetra vectors. */
114
Epetra_Vector epu(epA.Map()), epb(epA.Map());
116
ierr = VecGetArray(u,&vals);CHKERRQ(ierr);
118
ierr = VecGetLocalSize(u,&length);CHKERRQ(ierr);
119
for (int j=0; j<length; j++) epu[j] = vals[j];
120
ierr = VecRestoreArray(u,&vals);CHKERRQ(ierr);
121
epA.Multiply(false, epu, epb);
123
/* Check norms of the Epetra and PETSc vectors. */
125
if (rank == 0) printf("||epetra u||_2 = %f\n",norm);
126
ierr = VecNorm(u,NORM_2,&norm);CHKERRQ(ierr);
127
if (rank == 0) printf("||petsc u||_2 = %f\n",norm);
129
if (rank == 0) printf("||epetra b||_2 = %f\n",norm);
130
ierr = VecNorm(b,NORM_2,&norm);CHKERRQ(ierr);
131
if (rank == 0) printf("||petsc b||_2 = %f\n",norm);
133
/* Create the ML AMG preconditioner. */
135
/* Parameter list that holds options for AMG preconditioner. */
136
Teuchos::ParameterList mlList;
137
/* Set recommended defaults for Poisson-like problems. */
138
ML_Epetra::SetDefaults("SA",mlList);
139
/* Specify how much information ML prints to screen.
140
0 is the minimum (no output), 10 is the maximum. */
141
mlList.set("ML output",10);
142
/* Set the fine grid smoother. PETSc will be much faster for any
143
smoother requiring row access, e.g., SOR. For any smoother whose
144
kernel is a matvec, Trilinos/PETSc performance should be comparable,
145
as Trilinos simply calls the PETSc matvec.
147
To use a PETSc smoother, create a KSP object, set the KSP type to
148
KSPRICHARDSON, and set the desired smoother as the KSP preconditioner.
149
It is important that you call KSPSetInitialGuessNonzero. Otherwise, the
150
post-smoother phase will incorrectly ignore the current approximate
151
solution. The KSP pointer must be cast to void* and passed to ML via
154
You are responsible for freeing the KSP object.
156
ierr = PetscOptionsHasName(PETSC_NULL,"-petsc_smoother",&flg);CHKERRQ(ierr);
158
ierr = KSPCreate(comm,&kspSmoother);CHKERRQ(ierr);
159
ierr = KSPSetOperators(kspSmoother,A,A,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
160
ierr = KSPSetType(kspSmoother,KSPRICHARDSON);CHKERRQ(ierr);
161
ierr = KSPSetTolerances(kspSmoother, 1e-12, 1e-50, 1e7,1);
162
ierr = KSPSetInitialGuessNonzero(kspSmoother,PETSC_TRUE);CHKERRQ(ierr);
163
ierr = KSPGetPC(kspSmoother,&pc);CHKERRQ(ierr);
164
ierr = PCSetType(pc, PCSOR);CHKERRQ(ierr);
165
ierr = PCSORSetSymmetric(pc,SOR_LOCAL_SYMMETRIC_SWEEP);CHKERRQ(ierr);
166
ierr = PCSetFromOptions(pc);CHKERRQ(ierr);
167
ierr = KSPSetUp(kspSmoother);CHKERRQ(ierr);
168
mlList.set("smoother: type (level 0)","petsc");
169
mlList.set("smoother: petsc ksp (level 0)",(void*)kspSmoother);
171
mlList.set("smoother: type (level 0)","symmetric Gauss-Seidel");
174
/* how many fine grid pre- or post-smoothing sweeps to do */
175
mlList.set("smoother: sweeps (level 0)",2);
177
ML_Epetra::MultiLevelPreconditioner *Prec = new ML_Epetra::MultiLevelPreconditioner(epA,mlList);
181
Epetra_LinearProblem Problem(&epA, &epu, &epb);
182
AztecOO solver(Problem);
183
solver.SetAztecOption(AZ_solver, AZ_cg);
184
solver.SetAztecOption(AZ_output, 1);
185
solver.SetAztecOption(AZ_conv, AZ_noscaled);
186
solver.SetPrecOperator(Prec);
187
solver.Iterate(30, 1e-12);
190
ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr);
191
ierr = KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr);
192
ierr = KSPSetTolerances(ksp,1e-12,1.e-50,PETSC_DEFAULT,
193
PETSC_DEFAULT);CHKERRQ(ierr);
194
ierr = KSPSetType(ksp,KSPCG);CHKERRQ(ierr);
196
/* Wrap the ML preconditioner as a PETSc shell preconditioner. */
197
ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr);
198
ierr = PCSetType(pc,PCSHELL);CHKERRQ(ierr);
199
ierr = PCShellSetApply(pc,ShellApplyML);CHKERRQ(ierr);
200
ierr = PCShellSetContext(pc,(void*)Prec);CHKERRQ(ierr);
201
ierr = PCShellSetName(pc,"ML AMG");CHKERRQ(ierr);
203
ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr);
205
ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr);
207
ierr = KSPView(ksp,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
209
ierr = VecAXPY(x,neg_one,u);CHKERRQ(ierr);
210
ierr = VecNorm(x,NORM_2,&norm);CHKERRQ(ierr);
211
ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr);
213
ierr = PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %D\n",
214
norm,its);CHKERRQ(ierr);
216
ierr = KSPDestroy(ksp);CHKERRQ(ierr);
217
ierr = VecDestroy(u);CHKERRQ(ierr); ierr = VecDestroy(x);CHKERRQ(ierr);
218
ierr = VecDestroy(b);CHKERRQ(ierr); ierr = MatDestroy(A);CHKERRQ(ierr);
220
if (kspSmoother) {ierr = KSPDestroy(kspSmoother);CHKERRQ(ierr);}
221
if (Prec) delete Prec;
223
ierr = PetscFinalize();CHKERRQ(ierr);
227
/* ***************************************************************** */
229
PetscErrorCode ShellApplyML(void *ctx,Vec x,Vec y)
232
ML_Epetra::MultiLevelPreconditioner *mlp = (ML_Epetra::MultiLevelPreconditioner*)ctx;
234
/* Wrap x and y as Epetra_Vectors. */
235
PetscScalar *xvals,*yvals;
236
ierr = VecGetArray(x,&xvals);CHKERRQ(ierr);
237
Epetra_Vector epx(View,mlp->OperatorDomainMap(),xvals);
238
ierr = VecGetArray(y,&yvals);CHKERRQ(ierr);
239
Epetra_Vector epy(View,mlp->OperatorRangeMap(),yvals);
242
mlp->ApplyInverse(epx,epy);
244
/* Clean up and return. */
245
ierr = VecRestoreArray(x,&xvals);CHKERRQ(ierr);
246
ierr = VecRestoreArray(y,&yvals);CHKERRQ(ierr);
257
int main(int argc, char *argv[])
260
MPI_Init(&argc,&argv);
263
printf("Please configure EpetraExt with:");
264
#if !defined(HAVE_PETSC)
265
printf("--enable-petsc");
267
#if !defined(HAVE_ML_EPETRA)
268
printf("--enable-epetra");
270
#if !defined(HAVE_ML_TEUCHOS)
271
printf("--enable-teuchos");
273
#if !defined(HAVE_ML_AZTECOO)
274
printf("--enable-aztecoo");
281
return(EXIT_SUCCESS);