Here it is.

First run is without renumbering, second one with dof renumbering.

In serial mode (mpirun -np 1) both tests complete (just the error from the 
Subscriptor class).
For mpirun -np 2 the first one finishes while the second one fails.

Now it returns an error message that did not occur previously, but I cant 
extract any useful information from that (Trilinos solver returns error 
code -1 in file "trilinos_solver.h" line 485).

-- 
The deal.II project is located at http://www.dealii.org/
For mailing list/forum options, see 
https://groups.google.com/d/forum/dealii?hl=en
--- 
You received this message because you are subscribed to the Google Groups 
"deal.II User Group" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
For more options, visit https://groups.google.com/d/optout.
#include <deal.II/dofs/dof_handler.h>
#include <deal.II/dofs/dof_tools.h>
#include <deal.II/fe/fe.h>
#include <deal.II/fe/fe_q.h>
#include <deal.II/fe/fe_values.h>
#include <deal.II/grid/tria.h>
#include <deal.II/grid/grid_generator.h>
#include <deal.II/distributed/tria.h>
#include <deal.II/distributed/shared_tria.h>
#include <deal.II/lac/constraint_matrix.h>
#include <deal.II/lac/trilinos_solver.h>
#include <deal.II/lac/trilinos_vector.h>
#include <deal.II/lac/trilinos_sparse_matrix.h>
#include <deal.II/numerics/vector_tools.h>
#include <deal.II/numerics/matrix_tools.h>
#include <deal.II/lac/sparsity_tools.h>
#include <deal.II/base/utilities.h>
#include <deal.II/base/quadrature.h>
#include <deal.II/base/quadrature_lib.h>

#include <mpi.h>

#include <vector>
#include <iostream>

using std::vector;
using std::cout;
using std::endl;

using namespace dealii;

class Test
{
   public:
      unsigned int rank;
      unsigned int n_ranks;

      parallel::shared::Triangulation<2> triangulation;

      DoFHandler<2> dof_handler;

      FE_Q<2> fe;
      QGauss<2> quadrature;
      FEValues<2> fe_values;

      ConstraintMatrix constraints;

      IndexSet locally_owned_dofs;
      IndexSet locally_relevant_dofs;

      TrilinosWrappers::SparseMatrix system_matrix;
      TrilinosWrappers::MPI::Vector system_rhs, solution;

      Test(const bool do_renumber) :
            rank(Utilities::MPI::this_mpi_process(MPI_COMM_WORLD)),  //
            n_ranks(Utilities::MPI::n_mpi_processes(MPI_COMM_WORLD)),

            triangulation(MPI_COMM_WORLD), dof_handler(triangulation), fe(1), quadrature(2),  //
            fe_values(fe, quadrature, update_gradients | update_values | update_JxW_values)
      {
         cout << "Start";

         if (do_renumber)
            cout << " with renumbering" << endl;
         else
            cout << " without renumbering" << endl;

         GridGenerator::hyper_cube(triangulation);
         triangulation.refine_global(4);

         dof_handler.distribute_dofs(fe);

         constraints.clear();
         constraints.close();

         if (do_renumber) renumber();

         init_structures();

         assemble();

         solve();

         cout << "Finished";

         if (do_renumber)
            cout << " with renumbering" << endl;
         else
            cout << " without renumbering" << endl;
      }

      void init_structures()
      {
         locally_owned_dofs = dof_handler.locally_owned_dofs();
         DoFTools::extract_locally_relevant_dofs(dof_handler, locally_relevant_dofs);

         solution.reinit(locally_owned_dofs, locally_relevant_dofs, MPI_COMM_WORLD);

         system_rhs.reinit(locally_owned_dofs, MPI_COMM_WORLD);

         DynamicSparsityPattern dsp(dof_handler.n_dofs(), dof_handler.n_dofs());  //(locally_relevant_dofs);
         DoFTools::make_sparsity_pattern(dof_handler, dsp, constraints, false);

         SparsityTools::distribute_sparsity_pattern(dsp, dof_handler.n_locally_owned_dofs_per_processor(), MPI_COMM_WORLD, locally_relevant_dofs);

         system_matrix.reinit(locally_owned_dofs, locally_owned_dofs, dsp, MPI_COMM_WORLD);
      }

      void renumber()
      {
         locally_owned_dofs = dof_handler.locally_owned_dofs();
         DoFTools::extract_locally_relevant_dofs(dof_handler, locally_relevant_dofs);

         vector<unsigned int> new_number(dof_handler.n_dofs());
         for (unsigned int i = 0; i < dof_handler.n_dofs(); i++)
            new_number[i] = dof_handler.n_dofs() - i - 1;

         vector<unsigned int> local_new_number;
         for (unsigned int dof : locally_owned_dofs)
            local_new_number.push_back(new_number[dof]);

         dof_handler.renumber_dofs(local_new_number);
      }

      void assemble()
      {
         for (auto cell = dof_handler.begin_active(); cell != dof_handler.end(); ++cell)
         {
            if ( !cell->is_locally_owned()) continue;

            fe_values.reinit(cell);

            Vector<double> local_rhs(fe.dofs_per_cell);
            local_rhs = 0;

            FullMatrix<double> local_matrix(fe.dofs_per_cell, fe.dofs_per_cell);
            local_matrix = 0;

            for (unsigned int q = 0; q < fe_values.n_quadrature_points; q++)
            {
               for (unsigned int i = 0; i < fe.dofs_per_cell; i++)
               {
                  for (unsigned int j = 0; j < fe.dofs_per_cell; j++)
                  {
                     local_matrix(i, j) += fe_values.shape_value(i, q) * fe_values.shape_value(j, q) * fe_values.JxW(q);
                  }

                  local_rhs(i) += (fe_values.shape_value(i, q) * fe_values.JxW(q));
               }
            }

            vector<unsigned int> local_dofs(fe.dofs_per_cell);
            cell->get_dof_indices(local_dofs);
            constraints.distribute_local_to_global(local_matrix, local_rhs, local_dofs, system_matrix, system_rhs);
         }

         system_matrix.compress(VectorOperation::add);
         system_rhs.compress(VectorOperation::add);
      }

      void solve()
      {
         TrilinosWrappers::MPI::Vector tmp_sol, tmp_rhs;

         tmp_sol.reinit(locally_owned_dofs, MPI_COMM_WORLD);
         tmp_rhs.reinit(locally_owned_dofs, MPI_COMM_WORLD);

         tmp_sol = 0;
         tmp_rhs = system_rhs;

         printf("mat = %e \n", system_matrix.l1_norm());
         printf("rhs = %e \n", tmp_rhs.l2_norm());

         typedef TrilinosWrappers::SolverDirect SOLVER;

         SolverControl sc;
         SOLVER::AdditionalData data;
         SOLVER solver(sc, data);

         solver.solve(system_matrix, tmp_sol, tmp_rhs);

         printf("sol = %e \n", tmp_sol.l2_norm());

         solution = tmp_sol;
      }
};

int main(int argc, char* argv[])
{
   Utilities::MPI::MPI_InitFinalize mpi(argc, argv);

   Test test1(false);

   MPI_Barrier(MPI_COMM_WORLD);

   Test test2(true);

   return 0;
}

Reply via email to