Matt, Barry

Thanks for your replies! I've added a call to MatNestSetSubMats()
but something's still wrong, see below.

Chris


$ cat mattry.F90
program mattry

  use petscksp
  implicit none
#include <petsc/finclude/petsckspdef.h>

  PetscInt :: n=4   ! setting 4 cells per process

  PetscErrorCode         :: ierr
  PetscInt               :: size,rank,i
  Mat                    :: A,A02
  MatType                :: type
  IS                     :: isg0,isg1,isg2
  IS                     :: isl0,isl1,isl2
  ISLocalToGlobalMapping :: map

  integer, allocatable, dimension(:) :: idx

  call PetscInitialize(PETSC_NULL_CHARACTER,ierr); CHKERRQ(ierr)
  call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr); CHKERRQ(ierr)
  call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr);CHKERRQ(ierr)

  ! local index sets for 3 fields
  allocate(idx(n))
  idx=(/ (i-1, i=1,n) /)
  call 
ISCreateGeneral(PETSC_COMM_WORLD,n,idx,PETSC_COPY_VALUES,isl0,ierr);CHKERRQ(ierr)
  call 
ISCreateGeneral(PETSC_COMM_WORLD,n,idx+n,PETSC_COPY_VALUES,isl1,ierr);CHKERRQ(ierr)
  call 
ISCreateGeneral(PETSC_COMM_WORLD,n,idx+2*n,PETSC_COPY_VALUES,isl2,ierr);CHKERRQ(ierr)
!  call ISView(isl3,PETSC_VIEWER_STDOUT_WORLD,ierr); CHKERRQ(ierr)
  deallocate(idx)

  ! global index sets for 3 fields
  allocate(idx(n))
  idx=(/ (i-1+rank*3*n, i=1,n) /)
  call 
ISCreateGeneral(PETSC_COMM_WORLD,n,idx,PETSC_COPY_VALUES,isg0,ierr);CHKERRQ(ierr)
  call ISCreateGeneral(PETSC_COMM_WORLD,n,idx+n,PETSC_COPY_VALUES,isg1,ierr); 
CHKERRQ(ierr)
  call ISCreateGeneral(PETSC_COMM_WORLD,n,idx+2*n,PETSC_COPY_VALUES,isg2,ierr); 
CHKERRQ(ierr)
!  call ISView(isg3,PETSC_VIEWER_STDOUT_WORLD,ierr); CHKERRQ(ierr)
  deallocate(idx)

  ! local-to-global mapping
  allocate(idx(3*n))
  idx=(/ (i-1+rank*3*n, i=1,3*n) /)
  call 
ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,1,3*n,idx,PETSC_COPY_VALUES,map,ierr);
 CHKERRQ(ierr)
!  call ISLocalToGlobalMappingView(map,PETSC_VIEWER_STDOUT_WORLD,ierr); 
CHKERRQ(ierr)
  deallocate(idx)

  ! create the 3-by-3 block matrix
  call MatCreate(PETSC_COMM_WORLD,A,ierr); CHKERRQ(ierr)
  call MatSetSizes(A,3*n,3*n,PETSC_DECIDE,PETSC_DECIDE,ierr); CHKERRQ(ierr)
  call MatSetUp(A,ierr); CHKERRQ(ierr)
  call MatSetOptionsPrefix(A,"A_",ierr); CHKERRQ(ierr)
  call MatSetLocalToGlobalMapping(A,map,map,ierr); CHKERRQ(ierr)
  call MatSetFromOptions(A,ierr); CHKERRQ(ierr)

  ! setup nest
  call MatGetType(A,type,ierr); CHKERRQ(ierr)
  if (type.eq."nest") then
     call 
MatNestSetSubMats(A,3,(/isg0,isg1,isg2/),3,(/isg0,isg1,isg2/),PETSC_NULL_OBJECT,ierr);
 CHKERRQ(ierr)
  end if

  ! set diagonal of block A02 to 0.65
  call MatGetLocalSubmatrix(A,isl0,isl2,A02,ierr); CHKERRQ(ierr)
  do i=1,n
     call MatSetValuesLocal(A02,1,i-1,1,i-1,0.65d0,INSERT_VALUES,ierr); 
CHKERRQ(ierr)
  end do
  call MatRestoreLocalSubMatrix(A,isl0,isl2,A02,ierr); CHKERRQ(ierr)
  call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)
  call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr); CHKERRQ(ierr)

  ! verify
  call MatGetSubmatrix(A,isg0,isg2,MAT_INITIAL_MATRIX,A02,ierr); CHKERRQ(ierr)
  call MatView(A02,PETSC_VIEWER_STDOUT_WORLD,ierr);CHKERRQ(ierr)

  call PetscFinalize(ierr)

end program mattry


$ mpiexec -n 2 ./mattry -A_mat_type nest
[0]PETSC ERROR: --------------------- Error Message 
--------------------------------------------------------------
[0]PETSC ERROR: Corrupt argument: 
http://www.mcs.anl.gov/petsc/documentation/faq.html#valgrind
[0]PETSC ERROR: Invalid Pointer to Object: Parameter # 1
[0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for 
trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.7.3, Jul, 24, 2016
[0]PETSC ERROR: ./mattry                                                        
                                                                                
                                                                                
                                     on a linux_64bit_debug named 
lin0322.marin.local by cklaij Mon Aug  1 09:54:07 2016
[0]PETSC ERROR: Configure options 
--with-mpi-dir=/home/cklaij/ReFRESCO/Dev/trunk/Libs/install/openmpi/1.8.7 
--with-clanguage=c++ --with-x=1 --with-debugging=1 
--with-blas-lapack-dir=/opt/intel/composer_xe_2015.1.133/mkl 
--with-shared-libraries=0
[0]PETSC ERROR: #1 PetscObjectReference() line 534 in 
/home/cklaij/ReFRESCO/Dev/trunk/Libs/build/petsc/3.7.3-dbg/src/sys/objects/inherit.c
[0]PETSC ERROR: #2 MatNestSetSubMats_Nest() line 1042 in 
/home/cklaij/ReFRESCO/Dev/trunk/Libs/build/petsc/3.7.3-dbg/src/mat/impls/nest/matnest.c
[0]PETSC ERROR: #3 MatNestSetSubMats() line 1105 in 
/home/cklaij/ReFRESCO/Dev/trunk/Libs/build/petsc/3.7.3-dbg/src/mat/impls/nest/matnest.c
[1]PETSC ERROR: --------------------- Error Message 
--------------------------------------------------------------
[1]PETSC ERROR: Corrupt argument: 
http://www.mcs.anl.gov/petsc/documentation/faq.html#valgrind
[1]PETSC ERROR: Invalid Pointer to Object: Parameter # 1
[1]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for 
trouble shooting.
[1]PETSC ERROR: Petsc Release Version 3.7.3, Jul, 24, 2016
[1]PETSC ERROR: ./mattry                                                        
                                                                                
                                                                                
                                     on a linux_64bit_debug named 
lin0322.marin.local by cklaij Mon Aug  1 09:54:07 2016
[1]PETSC ERROR: Configure options 
--with-mpi-dir=/home/cklaij/ReFRESCO/Dev/trunk/Libs/install/openmpi/1.8.7 
--with-clanguage=c++ --with-x=1 --with-debugging=1 
--with-blas-lapack-dir=/opt/intel/composer_xe_2015.1.133/mkl 
--with-shared-libraries=0
[1]PETSC ERROR: #1 PetscObjectReference() line 534 in 
/home/cklaij/ReFRESCO/Dev/trunk/Libs/build/petsc/3.7.3-dbg/src/sys/objects/inherit.c
[1]PETSC ERROR: #2 MatNestSetSubMats_Nest() line 1042 in 
/home/cklaij/ReFRESCO/Dev/trunk/Libs/build/petsc/3.7.3-dbg/src/mat/impls/nest/matnest.c
[1]PETSC ERROR: #3 MatNestSetSubMats() line 1105 in 
/home/cklaij/ReFRESCO/Dev/trunk/Libs/build/petsc/3.7.3-dbg/src/mat/impls/nest/matnest.c
--------------------------------------------------------------------------
MPI_ABORT was invoked on rank 1 in communicator MPI_COMM_WORLD
with errorcode 64.

NOTE: invoking MPI_ABORT causes Open MPI to kill all MPI processes.
You may or may not see output from other processes, depending on
exactly when Open MPI kills them.
--------------------------------------------------------------------------
[lin0322.marin.local:14326] 1 more process has sent help message 
help-mpi-api.txt / mpi-abort
[lin0322.marin.local:14326] Set MCA parameter "orte_base_help_aggregate" to 0 
to see all help / error messages
$



dr. ir. Christiaan Klaij | CFD Researcher | Research & Development
MARIN | T +31 317 49 33 44 | [email protected]<mailto:[email protected]> | 
www.marin.nl<http://www.marin.nl>

[LinkedIn]<https://www.linkedin.com/company/marin> [YouTube] 
<http://www.youtube.com/marinmultimedia>  [Twitter] 
<https://twitter.com/MARIN_nieuws>  [Facebook] 
<https://www.facebook.com/marin.wageningen>
MARIN news: Joint Industry Project LifeLine kicks 
off<http://www.marin.nl/web/News/News-items/Joint-Industry-Project-LifeLine-kicks-off.htm>


Reply via email to