41 #include <lifev/core/LifeV.hpp> 45 #include "Epetra_config.h" 51 #include <Epetra_MpiComm.h> 53 #include <lifev/core/filter/GetPot.hpp> 54 #include <lifev/core/array/MatrixEpetra.hpp> 55 #include <lifev/core/array/VectorEpetra.hpp> 56 #include <lifev/core/fem/FESpace.hpp> 57 #include <lifev/core/filter/PartitionIO.hpp> 58 #include <lifev/core/mesh/RegionMesh.hpp> 59 #include <lifev/core/solver/ADRAssembler.hpp> 61 using namespace LifeV;
63 typedef RegionMesh<LinearTetra> mesh_Type;
64 typedef MatrixEpetra<Real> matrix_Type;
65 typedef VectorEpetra vector_Type;
71 main (
int argc,
char** argv )
76 MPI_Init (&argc, &argv);
77 std::shared_ptr<Epetra_MpiComm> comm (
new Epetra_MpiComm (MPI_COMM_WORLD) );
79 const bool verbose (comm->MyPID() == 0);
85 std::cout <<
" -- Reading the data ... " << std::flush;
90 std::cout <<
" done ! " << std::endl;
93 GetPot cl (argc, argv);
96 const std::string partitionerType = cl.follow (
"MeshPartitioner",
97 "--partitioner-type");
98 std::string partsFile;
99 partsFile.reserve (50);
100 partsFile +=
"cube_";
101 partsFile += partitionerType;
104 std::shared_ptr<mesh_Type> mesh;
106 PartitionIO<RegionMesh<LinearTetra> > partitionIO (partsFile, comm);
107 partitionIO.read (mesh);
114 std::cout <<
" -- Building FESpaces ... " << std::flush;
116 std::string uOrder (
"P1");
117 std::string bOrder (
"P1");
118 std::shared_ptr<FESpace<mesh_Type, MapEpetra> >
119 uFESpace (
new FESpace<mesh_Type, MapEpetra> (mesh, uOrder, 1, comm) );
120 std::shared_ptr<FESpace<mesh_Type, MapEpetra> >
121 betaFESpace (
new FESpace<mesh_Type, MapEpetra> (mesh, bOrder, 3, comm) );
124 std::cout <<
" done ! " << std::endl;
126 if (verbose) std::cout <<
" ---> Dofs: " 127 << uFESpace->dof().numTotalDof() << std::endl;
133 std::cout <<
" -- Building assembler ... " << std::flush;
135 ADRAssembler<mesh_Type, matrix_Type, vector_Type> adrAssembler;
138 std::cout <<
" done! " << std::endl;
143 std::cout <<
" -- Setting up assembler ... " << std::flush;
145 adrAssembler.setup (uFESpace, betaFESpace);
148 std::cout <<
" done! " << std::endl;
153 std::cout <<
" -- Defining the matrix ... " << std::flush;
155 std::shared_ptr<matrix_Type>
156 systemMatrix (
new matrix_Type (uFESpace->map() ) );
157 *systemMatrix *= 0.0;
160 std::cout <<
" done! " << std::endl;
167 std::cout <<
" -- Adding the diffusion ... " << std::flush;
169 adrAssembler.addDiffusion (systemMatrix, 1);
172 std::cout <<
" done! " << std::endl;
174 if (verbose) std::cout <<
" Time needed : " 175 << adrAssembler.diffusionAssemblyChrono().diffCumul()
180 std::cout <<
" -- Closing the matrix ... " << std::flush;
182 systemMatrix->globalAssemble();
185 std::cout <<
" done ! " << std::endl;
188 Real matrixNorm (systemMatrix->normFrobenius() );
191 std::cout <<
" ---> Norm 2 : " << matrixNorm << std::endl;
193 if (std::fabs (matrixNorm - 35.908) > 1e-3)
195 std::cout <<
" <!> Matrix has changed !!! <!> " << std::endl;
201 std::cout <<
"End Result: TEST PASSED" << std::endl;
207 std::cout <<
"This test needs MPI to run. Aborting." << std::endl;
208 return (EXIT_FAILURE);
211 std::cout <<
"This test needs HDF5 to run. Aborting." << std::endl;
212 return (EXIT_FAILURE);
215 return ( EXIT_SUCCESS );
int main(int argc, char **argv)