Hello deal.II community,

I wrote a small application which uses a fully distributed (simplicial) 
mesh, PETSc wrappers, and handles a Lagrange multiplier on a boundary with 
the hp machinery. With the non-hp version of this solver, I can checkpoint 
the current and previous solutions and restart without any issue, as done 
in step-83, but the restart fails on the hp version.

As I understand it, I also have to serialize the fe indices with 
dof_handler.prepare_for_serialization_of_active_fe_indices(), but this 
function is only implemented for distributed triangulations.

I have joined a minimal example, for both quads and simplices. It compares 
a checkpoint/restart for both the non-hp and hp setting, for quads and 
simplices. On my end, it succeeds on a single MPI rank, but either reads 
mismatched vectors or segfaults on more ranks, without showing a stack 
trace in debug. I tried to reproduce the exact same behavior as in the 
solver, but they differ slightly and I'm not sure why. In both cases, they 
fail in restart() : the attached example fails in 
SolutionTransfer::deserialize(...) after successfully loading the 
triangulation, in interpolate(...) from solution_transfer.templates.h, 
whereas in the actual solver, it fails when loading the triangulation and 
throws an std::bad_alloc at line 1051 of grid/tria.cc in the current master 
(dest_data_variable.resize(size_on_proc);).

Assuming the issue is indeed that I need to serialize/deserialize the 
fe_indices, is there a workaround that would work for fully distributed 
triangulations? I saw that prepare_for_serialization_of_active_fe_indices() 
uses CellDataTransfer which do not seem to exist for fully distributed 
meshes, so I'm assuming it is not that trivial.

Thank you for your time,
Arthur

-- 
The deal.II project is located at http://www.dealii.org/
For mailing list/forum options, see 
https://groups.google.com/d/forum/dealii?hl=en
--- 
You received this message because you are subscribed to the Google Groups 
"deal.II User Group" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
To view this discussion visit 
https://groups.google.com/d/msgid/dealii/b56b0dc4-ef70-4ba8-9cba-46cb57457739n%40googlegroups.com.
#include <deal.II/base/conditional_ostream.h>
#include <deal.II/distributed/cell_data_transfer.h>
#include <deal.II/distributed/fully_distributed_tria.h>
#include <deal.II/dofs/dof_handler.h>
#include <deal.II/dofs/dof_tools.h>
#include <deal.II/fe/component_mask.h>
#include <deal.II/fe/fe_nothing.h>
#include <deal.II/fe/fe_q.h>
#include <deal.II/fe/fe_simplex_p.h>
#include <deal.II/fe/fe_system.h>
#include <deal.II/fe/fe_values.h>
#include <deal.II/fe/fe_values_extractors.h>
#include <deal.II/fe/mapping_fe.h>
#include <deal.II/grid/grid_generator.h>
#include <deal.II/grid/grid_tools.h>
#include <deal.II/grid/reference_cell.h>
#include <deal.II/hp/fe_collection.h>
#include <deal.II/lac/generic_linear_algebra.h>
#include <deal.II/lac/la_parallel_vector.h>
#include <deal.II/lac/sparsity_tools.h>
#include <deal.II/numerics/data_out.h>
#include <deal.II/numerics/solution_transfer.h>
#include <deal.II/numerics/vector_tools.h>
#include <deal.II/numerics/vector_tools_interpolate.h>

using namespace dealii;

template <int dim>
class MyFun : public Function<dim>
{
public:
  MyFun(const unsigned int n_components = 1)
    : Function<dim>(n_components)
  {}

  virtual double value(const Point<dim>  &p,
                       const unsigned int component = 0) const override
  {
    if (component == 0)
      // x + y
      return p[0] + p[1];
    else
      return 0.;
  }
};

template <int dim, typename VectorType, bool with_simplices>
class MWE
{
public:
  MWE(const bool with_hp_capabilities)
    : with_hp_capabilities(with_hp_capabilities)
    , mpi_communicator(MPI_COMM_WORLD)
    , mpi_rank(Utilities::MPI::this_mpi_process(mpi_communicator))
    , pcout(std::cout, (mpi_rank == 0))
    , triangulation(mpi_communicator)
    , dof_handler(triangulation)
  {
    FEValuesExtractors::Scalar field(0);

    if constexpr (with_simplices)
    {
      mapping = std::make_shared<MappingFE<dim>>(FE_SimplexP<dim>(1));
      fe0     = std::make_shared<FESystem<dim>>(FE_SimplexP<dim>(1));
      fe1     = std::make_shared<FESystem<dim>>(FE_SimplexP<dim>(1));
    }
    else
    {
      mapping = std::make_shared<MappingQ<dim>>(1);
      fe0     = std::make_shared<FESystem<dim>>(FE_Q<dim>(1));
      fe1     = std::make_shared<FESystem<dim>>(FE_Q<dim>(1));
    }

    fe_collection.push_back(*fe0);
    fe_collection.push_back(*fe1);

    if (with_hp_capabilities)
      mask = fe_collection.component_mask(field);
    else
      mask = fe0->component_mask(field);
  }

  void run(const bool with_restart)
  {
    if (with_restart)
    {
      restart();
    }
    else
    {
      make_grid();
      setup_dofs();

      // Set the field
      VectorTools::interpolate(
        *mapping, dof_handler, MyFun<dim>(), local_solution, mask);
      solution = local_solution;
    }

    output_results(with_restart);

    checkpoint();
  }

  void make_grid()
  {
    // Create serial mesh
    Triangulation<dim> serial_triangulation;
    GridGenerator::subdivided_hyper_cube(serial_triangulation, 2);
    if constexpr (with_simplices)
      GridGenerator::convert_hypercube_to_simplex_mesh(serial_triangulation,
                                                       serial_triangulation,
                                                       2);

    // Partition
    GridTools::partition_triangulation(
      Utilities::MPI::n_mpi_processes(mpi_communicator), serial_triangulation);

    // Create description
    const TriangulationDescription::Description<dim> description =
      TriangulationDescription::Utilities::
        create_description_from_triangulation(serial_triangulation,
                                              mpi_communicator);

    // Create a fully distributed triangulation
    triangulation.create_triangulation(description);
  }

  void setup_dofs()
  {
    if (with_hp_capabilities)
      // Set active fe index
      for (const auto &cell : dof_handler.active_cell_iterators())
      {
        if (cell->is_locally_owned())
        {
          Point<dim> center = cell->center();
          if (std::sqrt(center.square()) < 0.5)
            cell->set_active_fe_index(1);
          else
            cell->set_active_fe_index(0);
        }
      }

    // Initialize dof handler
    if (with_hp_capabilities)
      dof_handler.distribute_dofs(fe_collection);
    else
      dof_handler.distribute_dofs(*fe0);

    pcout << "Number of degrees of freedom: " << dof_handler.n_dofs()
          << std::endl;

    locally_owned_dofs = dof_handler.locally_owned_dofs();
    locally_relevant_dofs =
      DoFTools::extract_locally_relevant_dofs(dof_handler);

    // Initialize parallel vectors
    solution.reinit(locally_owned_dofs,
                            locally_relevant_dofs,
                            mpi_communicator);
    local_solution.reinit(locally_owned_dofs, mpi_communicator);
  }

  void checkpoint()
  {
    pcout << "--- Writing checkpoint... ---" << std::endl << std::endl;

    SolutionTransfer<dim, VectorType> solution_transfer(dof_handler);

    // This is not implemented for fullydistributed::Triangulation :
    // dof_handler.prepare_for_serialization_of_active_fe_indices();

    // Give the ghosted vector to serialize
    solution_transfer.prepare_for_serialization(solution);

    triangulation.save("checkpoint");
  }

  void restart()
  {
    pcout << "--- Reading checkpoint... ---" << std::endl << std::endl;

    triangulation.load("checkpoint");

    // This is not implemented for fullydistributed::Triangulation :
    // dof_handler.deserialize_active_fe_indices();

    // Setup the dofhandler and parallel vectors here
    setup_dofs();

    // Create a non-ghosted vector to deserialize into
    VectorType                        tmp(locally_owned_dofs, mpi_communicator);
    SolutionTransfer<dim, VectorType> solution_transfer(dof_handler);
    solution_transfer.deserialize(tmp);

    // Assign the fully distr. vectors to the existing ghosted ones
    solution = tmp;

    // Check the vector that was read
    VectorType local_error(locally_owned_dofs, mpi_communicator);

    VectorTools::interpolate(
      *mapping, dof_handler, MyFun<dim>(), local_error, mask);
    local_error.add(-1., solution);

    VectorType error(locally_owned_dofs,
                     locally_relevant_dofs,
                     mpi_communicator);
    error = local_error;

    pcout << "L2   norm of difference between expected and read solution: "
          << error.l2_norm() << std::endl;
    pcout << "Linf norm of difference between expected and read solution: "
          << error.linfty_norm() << std::endl;
  }

  void output_results(const bool with_restart)
  {
    DataOut<dim> data_out;
    data_out.attach_dof_handler(dof_handler);
    data_out.add_data_vector(solution, "solution");

    // Partition
    Vector<float> subdomain(triangulation.n_active_cells());
    for (unsigned int i = 0; i < subdomain.size(); ++i)
      subdomain(i) = triangulation.locally_owned_subdomain();
    data_out.add_data_vector(subdomain, "subdomain");

    data_out.build_patches(*mapping, 2);
    const std::string filename =
      with_restart ? "with_restart" : "without_restart";
    data_out.write_vtu_with_pvtu_record("./", filename, 0, mpi_communicator, 2);
  }

public:
  const bool with_hp_capabilities;

  MPI_Comm           mpi_communicator;
  unsigned int       mpi_rank;
  ConditionalOStream pcout;

  parallel::fullydistributed::Triangulation<dim> triangulation;
  DoFHandler<dim>                                dof_handler;
  std::shared_ptr<Mapping<dim>>                  mapping;
  std::shared_ptr<FESystem<dim>>                 fe0;
  std::shared_ptr<FESystem<dim>>                 fe1;
  hp::FECollection<dim>                          fe_collection;
  ComponentMask                                  mask;

  IndexSet locally_owned_dofs;
  IndexSet locally_relevant_dofs;

  VectorType solution;
  VectorType local_solution;
};

int main(int argc, char *argv[])
{
  try
  {
    using namespace dealii;
    using VectorType = LinearAlgebraPETSc::MPI::Vector;

    Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv, 1);
    ConditionalOStream               pcout(
      std::cout, (Utilities::MPI::this_mpi_process(MPI_COMM_WORLD) == 0));

    pcout << "Number of MPI processes: "
          << Utilities::MPI::n_mpi_processes(MPI_COMM_WORLD) << std::endl;
    pcout << std::endl;

    // With quads
    pcout << "Testing with quads:" << std::endl;
    for (unsigned int i = 0; i < 2; ++i)
    {
      const bool with_hp_capabilities = i == 1;

      pcout << "With_hp_capabilities = " << with_hp_capabilities << std::endl;

      {
        MWE<2, VectorType, false> problem_save(with_hp_capabilities);
        problem_save.run(false);
      }

      {
        MWE<2, VectorType, false> problem_load(with_hp_capabilities);
        problem_load.run(true);
      }

      pcout << "Done" << std::endl << std::endl;
    }

    // With simplices
    pcout << "Testing with simplices:" << std::endl;
    for (unsigned int i = 0; i < 2; ++i)
    {
      const bool with_hp_capabilities = i == 1;

      pcout << "With_hp_capabilities = " << with_hp_capabilities << std::endl;

      {
        MWE<2, VectorType, true> problem_save(with_hp_capabilities);
        problem_save.run(false);
      }

      {
        MWE<2, VectorType, true> problem_load(with_hp_capabilities);
        problem_load.run(true);
      }

      pcout << "Done" << std::endl << std::endl;
    }
  }
  catch (const std::exception &exc)
  {
    std::cerr << std::endl
              << std::endl
              << "----------------------------------------------------"
              << std::endl;
    std::cerr << "Exception on processing: " << std::endl
              << exc.what() << std::endl
              << "Aborting!" << std::endl
              << "----------------------------------------------------"
              << std::endl;

    return 1;
  }
  catch (...)
  {
    std::cerr << std::endl
              << std::endl
              << "----------------------------------------------------"
              << std::endl;
    std::cerr << "Unknown exception!" << std::endl
              << "Aborting!" << std::endl
              << "----------------------------------------------------"
              << std::endl;
    return 1;
  }

  return 0;
}

Reply via email to