61 #include "Trilinos_Util.h" 62 #include "Ifpack_IlukGraph.h" 63 #include "Ifpack_CrsRiluk.h" 79 double Tolerance,
bool verbose);
86 int main(
int argc,
char *argv[]) {
89 MPI_Init(&argc,&argv);
97 int MyPID = Comm.
MyPID();
100 if (MyPID==0) verbose =
true;
103 if (verbose) cerr <<
"Usage: " << argv[0] <<
" HB_data_file" << endl;
116 Trilinos_Util_ReadHb2Epetra(argv[1], Comm, readMap, readA, readx, readb, readxexact);
123 #ifdef EPETRA_MPI // If running in parallel, we need to distribute matrix across all PEs. 133 A.Export(*readA, exporter,
Add);
134 assert(
A.FillComplete()==0);
139 #else // If not running in parallel, we do not need to distribute the matrix 145 A.SetFlopCounter(counter);
152 double totalFlops = counter.
Flops();
153 double MFLOPS = totalFlops/elapsedTime/1000000.0;
157 <<
"*************************************************" << endl
158 <<
" Approximate smallest eigenvalue = " << lambda << endl
159 <<
" Total Time = " << elapsedTime << endl
160 <<
" Total FLOPS = " << totalFlops << endl
161 <<
" Total MFLOPS = " << MFLOPS << endl
162 <<
"*************************************************" << endl;
195 resid.SetFlopCounter(
A);
202 double normz, residual;
205 double tolerance = 1.0E-6;
208 for (
int iter = 0; iter < niters; iter++)
212 <<
" ***** Performing step " << iter <<
" of inverse iteration ***** " << endl;
215 q.Scale(1.0/normz, z);
218 if (iter%10==0 || iter+1==niters)
220 resid.Update(1.0, z, -lambda, q, 0.0);
221 resid.Norm2(&residual);
223 <<
"***** Inverse Iteration Step " << iter+1 << endl
224 <<
" Lambda = " << 1.0/lambda << endl
225 <<
" Residual of A(inv)*q - lambda*q = " 228 if (residual < tolerance) {
237 A.Multiply(
false, q, z);
238 resid.Update(1.0, z, -lambda, q, 0.0);
239 resid.Norm2(&residual);
240 cout <<
" Explicitly computed residual of A*q - lambda*q = " << residual << endl;
249 Ifpack_IlukGraph * IlukGraph =
new Ifpack_IlukGraph(
A.Graph(), LevelFill, Overlap);
250 assert(IlukGraph->ConstructFilledGraph()==0);
251 M =
new Ifpack_CrsRiluk(
A, *IlukGraph);
252 M->SetFlopCounter(
A);
253 assert(M->InitValues()==0);
254 assert(M->Factor()==0);
267 double Tolerance = 1.0E-16;
268 BiCGSTAB(
A, x, b, M, Maxiter, Tolerance, verbose);
279 Ifpack_IlukGraph & IlukGraph = (Ifpack_IlukGraph &) M->Graph();
294 double Tolerance,
bool verbose) {
306 A.Multiply(
false, x, r);
308 r.Update(1.0, b, -1.0);
310 double r_norm, b_norm, scaled_r_norm, rhon, rhonm1 = 1.0;
311 double alpha = 1.0, omega = 1.0, sigma;
312 double omega_num, omega_den;
315 scaled_r_norm = r_norm/b_norm;
318 if (verbose) cout <<
"Initial residual = " << r_norm
319 <<
" Scaled residual = " << scaled_r_norm << endl;
322 for (
int i=0; i<Maxiter; i++) {
324 double beta = (rhon/rhonm1) * (alpha/omega);
331 double dtemp = - beta*omega;
333 p.Update(1.0, r, dtemp, v, beta);
337 M->Solve(
false, p, phat);
338 A.Multiply(
false, phat, v);
341 rtilde.Dot(v,&sigma);
348 s.Update(-alpha, v, 1.0, r, 0.0);
352 M->Solve(
false, s, shat);
353 A.Multiply(
false, shat, r);
355 r.Dot(s, &omega_num);
356 r.Dot(r, &omega_den);
357 omega = omega_num / omega_den;
362 x.
Update(alpha, phat, omega, shat, 1.0);
363 r.Update(1.0, s, -omega);
366 scaled_r_norm = r_norm/b_norm;
369 if (verbose) cout <<
"Iter "<< i <<
" residual = " << r_norm
370 <<
" Scaled residual = " << scaled_r_norm << endl;
372 if (scaled_r_norm < Tolerance)
break;
int Norm2(double *Result) const
Compute 2-norm of each vector in multi-vector.
void SetFlopCounter(const Epetra_Flops &FlopCounter_in)
Set the internal Epetra_Flops() pointer.
const Epetra_BlockMap & Map() const
Returns the address of the Epetra_BlockMap for this multi-vector.
Epetra_Map: A class for partitioning vectors and matrices.
int Random()
Set multi-vector values to random numbers.
void BiCGSTAB(Epetra_CrsMatrix &A, Epetra_Vector &x, Epetra_Vector &b, Ifpack_CrsRiluk *M, int Maxiter, double Tolerance, bool verbose)
int main(int argc, char *argv[])
Epetra_Export: This class builds an export object for efficient exporting of off-processor elements...
Epetra_Vector: A class for constructing and using dense vectors on a parallel computer.
Epetra_MpiComm: The Epetra MPI Communication Class.
Epetra_Time: The Epetra Timing Class.
double Flops() const
Returns the number of floating point operations with this object and resets the count.
int applyInverse(Epetra_CrsMatrix &A, Epetra_Vector &x, Epetra_Vector &b, Ifpack_CrsRiluk *M, bool verbose)
int applyInverseDestroy(Ifpack_CrsRiluk *M)
Epetra_SerialComm: The Epetra Serial Communication Class.
Epetra_Flops: The Epetra Floating Point Operations Class.
int Update(double ScalarA, const Epetra_MultiVector &A, double ScalarThis)
Update multi-vector values with scaled values of A, this = ScalarThis*this + ScalarA*A.
int MyPID() const
Return my process ID.
Epetra_CrsMatrix: A class for constructing and using real-valued double-precision sparse compressed r...
int invIteration(Epetra_CrsMatrix &A, double &lambda, bool verbose)
int NumGlobalElements() const
Number of elements across all processors.
int applyInverseSetup(Epetra_CrsMatrix &A, Ifpack_CrsRiluk *&M)
double ElapsedTime(void) const
Epetra_Time elapsed time function.