#include <deal.II/base/conditional_ostream.h>
#include <deal.II/base/index_set.h>
#include <deal.II/base/mpi.h>

#include <deal.II/lac/dynamic_sparsity_pattern.h>
#include <deal.II/lac/petsc_sparse_matrix.h>

using namespace dealii;

class OneBodyHamiltonianOperator
{
public:
  /**
   * Declare type for container size.
   */
  using size_type = dealii::types::global_dof_index;

  OneBodyHamiltonianOperator(const dealii::IndexSet &a_local_row_set,
                             const uint32_t          a_my_proc,
                             const uint32_t          a_num_procs);

  /// Destructor
  ~OneBodyHamiltonianOperator();

private:
  dealii::PETScWrappers::MPI::SparseMatrix m_H1;
  dealii::DynamicSparsityPattern           m_dynamic_sparsity_pattern;
};


OneBodyHamiltonianOperator::OneBodyHamiltonianOperator(
  const dealii::IndexSet &a_local_row_set,
  const uint32_t          a_my_proc,
  const uint32_t          a_num_procs)
{
  dealii::IndexSet local_owned(a_local_row_set.size());
  local_owned.add_range(*a_local_row_set.begin(),
                        *a_local_row_set.begin() +
                          a_local_row_set.n_elements());

  m_dynamic_sparsity_pattern.reinit(a_local_row_set.size(),
                                    a_local_row_set.size(),
                                    local_owned);

  int guess = 50;
  m_H1.reinit(MPI_COMM_WORLD,
              a_local_row_set.size(),
              a_local_row_set.size(),
              a_local_row_set.n_elements(),
              a_local_row_set.size(),
              guess);
}

OneBodyHamiltonianOperator::~OneBodyHamiltonianOperator()
{}

void
test(const int &n_proc, const int &my_proc, const ConditionalOStream &pcout)
{
  auto     nbas      = 64;
  auto     nchannels = 2;
  IndexSet local_row_set =
    Utilities::create_evenly_distributed_partitioning(my_proc,
                                                      n_proc,
                                                      nbas * nchannels);

  OneBodyHamiltonianOperator H1(local_row_set, my_proc, n_proc);
}

int
main(int argc, char **argv)
{
  Utilities::MPI::MPI_InitFinalize mpi_initialization(argc, argv, 1);

  MPI_Comm   comm(MPI_COMM_WORLD);
  const auto my_proc = Utilities::MPI::this_mpi_process(comm);
  const auto n_proc  = Utilities::MPI::n_mpi_processes(comm);

  ConditionalOStream pcout(std::cout, (my_proc == 0));

  test(n_proc, my_proc, pcout);

  return 0;
}
 
Let me know if this is okay.  This compiled, ran, and produced the same 
error on my end.

Thank you!

Zachary

On Tuesday, January 5, 2021 at 8:52:16 AM UTC-6 Wolfgang Bangerth wrote:

>
> > My project is in quantum scattering and I would like to have some 
> operators be 
> > distributed PETSc objects.  So inside my OneBodyHamiltonianOperator 
> class (for 
> > example), I would like to create a PETScWrappers::MPI::SparseMatrix and 
> then 
> > use SLEPC to solve for the ground state and excited states.
> > 
> > I tried to add in comments to everything to show my intent.
>
> Zachary -- can you put all of this into one single file and add all of the 
> necessary #includes at the top? Make it easy for us to have something we 
> can 
> compile and run :-)
>
> Cheers
> W.
>
> -- 
> ------------------------------------------------------------------------
> Wolfgang Bangerth email: [email protected]
> www: http://www.math.colostate.edu/~bangerth/
>
>

-- 
The deal.II project is located at http://www.dealii.org/
For mailing list/forum options, see 
https://groups.google.com/d/forum/dealii?hl=en
--- 
You received this message because you are subscribed to the Google Groups 
"deal.II User Group" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
To view this discussion on the web visit 
https://groups.google.com/d/msgid/dealii/1f9a1054-f889-47be-ab46-8d5dba9ce246n%40googlegroups.com.

Reply via email to