Hello all,
I'm trying to load my laplacian matrix into a fortran module, and i have
implemented it and it works for the first iteration of laplacian solver,
but when starts the second step the laplacian matrix object becomes
corrupts and looks like it loses one of it's dimensions.
Can you help me understand whats happening?
The modules are attached, the error i get is the following, i bolded the
lines where i detected corruption:
ucmsSeamount Entering MAIN loop.
RHS loaded, size: 213120 / 213120
*CSRMAt loaded, sizes: 213120 x 213120*
8.39198399 s
solveP pass: 1 !Iteration number
RHS loaded, size: 213120 / 213120
[0]PETSC ERROR: --------------------- Error Message
--------------------------------------------------------------
[0]PETSC ERROR: Invalid argument
[0]PETSC ERROR: Wrong type of object: Parameter # 1
[0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for
trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.7.3, unknown
[0]PETSC ERROR: ./ucmsSeamount
�J� on a arch-linux2-c-debug
named valera-HP-xw4600-Workstation by valera Fri Sep 23 10:27:21 2016
[0]PETSC ERROR: Configure options --with-cc=gcc --with-cxx=g++
--with-fc=gfortran --download-fblaslapack=1 --download-mpich=1
--download-ml=1
[0]PETSC ERROR: #1 MatGetSize() line 6295 in
/home/valera/v5PETSc/petsc/petsc/src/mat/interface/matrix.c
* CSRMAt loaded, sizes: 213120 x 0*
[0]PETSC ERROR: --------------------- Error Message
--------------------------------------------------------------
[0]PETSC ERROR: Invalid argument
[0]PETSC ERROR: Wrong type of object: Parameter # 2
[0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for
trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.7.3, unknown
[0]PETSC ERROR: ./ucmsSeamount
�J� on a arch-linux2-c-debug
named valera-HP-xw4600-Workstation by valera Fri Sep 23 10:27:21 2016
[0]PETSC ERROR: Configure options --with-cc=gcc --with-cxx=g++
--with-fc=gfortran --download-fblaslapack=1 --download-mpich=1
--download-ml=1
[0]PETSC ERROR: #2 KSPSetOperators() line 531 in
/home/valera/v5PETSc/petsc/petsc/src/ksp/ksp/interface/itcreate.c
[0]PETSC ERROR: --------------------- Error Message
--------------------------------------------------------------
[0]PETSC ERROR: Nonconforming object sizes
[0]PETSC ERROR: Preconditioner number of local rows -1 does not equal
resulting vector number of rows 213120
[0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for
trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.7.3, unknown
[0]PETSC ERROR: ./ucmsSeamount
�J� on a arch-linux2-c-debug
named valera-HP-xw4600-Workstation by valera Fri Sep 23 10:27:21 2016
[0]PETSC ERROR: Configure options --with-cc=gcc --with-cxx=g++
--with-fc=gfortran --download-fblaslapack=1 --download-mpich=1
--download-ml=1
[0]PETSC ERROR: #3 PCApply() line 474 in
/home/valera/v5PETSc/petsc/petsc/src/ksp/pc/interface/precon.c
[0]PETSC ERROR: --------------------- Error Message
--------------------------------------------------------------
[0]PETSC ERROR: Object is in wrong state
[0]PETSC ERROR: Mat object's type is not set: Argument # 1
[0]PETSC ERROR: See http://www.mcs.anl.gov/petsc/documentation/faq.html for
trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.7.3, unknown
[0]PETSC ERROR: ./ucmsSeamount
�J� on a arch-linux2-c-debug
named valera-HP-xw4600-Workstation by valera Fri Sep 23 10:27:21 2016
[0]PETSC ERROR: Configure options --with-cc=gcc --with-cxx=g++
--with-fc=gfortran --download-fblaslapack=1 --download-mpich=1
--download-ml=1
[0]PETSC ERROR: #4 MatGetFactorAvailable() line 4286 in
/home/valera/v5PETSc/petsc/petsc/src/mat/interface/matrix.c
[0]PETSC ERROR: #5 PCGetDefaultType_Private() line 28 in
/home/valera/v5PETSc/petsc/petsc/src/ksp/pc/interface/precon.c
[0]PETSC ERROR: #6 PCSetFromOptions() line 159 in
/home/valera/v5PETSc/petsc/petsc/src/ksp/pc/interface/pcset.c
[0]PETSC ERROR: #7 KSPSetFromOptions() line 400 in
/home/valera/v5PETSc/petsc/petsc/src/ksp/ksp/interface/itcl.c
application called MPI_Abort(MPI_COMM_WORLD, 73) - process 0
[unset]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, 73) - process 0
MODULE LoadPetscMatrix
! Modules:
USE ProbSize, only :IMax, JMax, Kmax
USE ModelParam
USE Grid
USE Metrics
USE Velocity
USE Scalars
USE SGSVars
USE MultigridVars, only : Acsr, Col_id, row_ptr, nnz, nbnodos, nx, ny, nz
IMPLICIT NONE
!PETSc Matrix Load Implementation, include files:
!Created by Manuel Valera 9/22/16:
#include <petsc/finclude/petscsys.h> !core PETSc
#include <petsc/finclude/petscvec.h> !vectors
#include <petsc/finclude/petscvec.h90> !special vectors f90 functions
#include <petsc/finclude/petscmat.h> !matrixes
#include <petsc/finclude/petscis.h> !index sets
#include <petsc/finclude/petscviewer.h> !viewers
#include <petsc/finclude/petscpc.h> !preconditioners
#include <petsc/finclude/petscksp.h> !Krylov subspaces (Solver)
! Local Variables
INTEGER :: ii,jj,kk
INTEGER :: ff
INTEGER :: l
real (kind(0d0)) :: tol,rms
!real (kind(0d0)), allocatable :: a(:)
!integer, allocatable :: ja(:)
!integer, allocatable :: ia(:)
integer ::iter,iprint
INTEGER :: ierr0
!CHARACTER(LEN=10000) :: FileName
! PETSc IMPLEMENTATION, Variables:
Mat Ap
MatNullSpace nullsp
PetscErrorCode ierr
PetscInt nbdp,nnzp,nel,its,spa
PetscInt, allocatable :: iapi(:),japi(:),ind(:)
PetscScalar, allocatable :: app(:)
PetscScalar rpi,rhsp
PetscMPIInt rankl, sizel
!TODO: IMPORTANT ALL USER-DEFINED ROUTINES SHOULD BE DECLARED AS EXTERNAL
CONTAINS
SUBROUTINE LoadCSRMatrix
call MPI_Comm_rank(PETSC_COMM_WORLD, rankl, ierr)
call MPI_Comm_size(PETSC_COMM_WORLD, sizel, ierr)
! Prepare integers:
nbdp = nbnodos !matrix dimension
nnzp = nnz !nonzero entries in each matrix row
allocate(iapi(nbdp+1), stat=ierr)
allocate(japi(nnzp), stat=ierr)
allocate(app(nnzp), stat=ierr)
iapi = row_ptr
japi = Col_id
app = Acsr
!Reordering the CSR Matrix to Sorted-CSR
do ii=1,nbdp,1
spa = iapi(ii+1)-iapi(ii) !spacing between rows
select case (ii)
case(1)
call PetscSortIntWithScalarArray(spa,japi(ii),app(ii),ierr); !1st row case
case default
call PetscSortIntWithScalarArray(spa,japi(iapi(ii)+1),app(iapi(ii)+1),ierr); !other rows case
end select
enddo
! 2.- SETUP MATRIXES:
! Create matrix from CSR arrays directly:
!Recommended Paradigm:
call MatCreate(PETSC_COMM_WORLD,Ap,ierr)
! call MatSetType(Ap,MATSEQAIJ,ierr) !in parallel, type changes. check.!
call MatSetType(Ap,MATMPIAIJ,ierr) !in parallel, type changes. check.
call MatSetSizes(Ap,PETSC_DECIDE,PETSC_DECIDE,nbdp,nbdp,ierr);
! call MatSeqAIJSetPreallocationCSR(Ap,iapi,japi,app,ierr)
call MatMPIAIJSetPreallocationCSR(Ap,iapi,japi,app,ierr)
call MatSetFromOptions(Ap,ierr)
! call MatCreateSeqAIJWithArrays(PETSC_COMM_WORLD,nbdp,nbdp,iapi,japi,app,Ap,ierr
call MatCreateMPIAIJWithArrays(PETSC_COMM_WORLD,floor(real(nbdp)/sizel),PETSC_DECIDE,nbdp,nbdp,iapi,japi,app,Ap,ierr)
! 2.1.- ASSEMBLE MATRIX:
! Setup Matrix:
! call MatSetBlockSize(Ap,3,ierr) ! NEEDED ? 3 == Freedom degrees *** for pcgamg
call MatSetUp(Ap,ierr) !THIS STEP is about preallocation and can be further optimized
call MatAssemblyBegin(Ap,MAT_FINAL_ASSEMBLY,ierr)
call MatAssemblyEnd(Ap,MAT_FINAL_ASSEMBLY,ierr)
call MatNullSpaceCreate(PETSC_COMM_WORLD,PETSC_TRUE,0,0,nullsp,ierr); !NO IDEA ABOUT THIS. TODO
call MatSetNearNullSpace(Ap,nullsp,ierr) ! NEEDED ? ***for pcgamg
call MatNullSpaceDestroy(nullsp,ierr);
CHKERRQ(ierr)
END SUBROUTINE LoadCSRMatrix
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
SUBROUTINE DeallocCSRMat
call MatDestroy(Ap,ierr)
deallocate(iapi,stat=ierr0)
IF (ierr0 /= 0) STOP "*** iapi ***"
deallocate(japi,stat=ierr0)
IF (ierr0 /= 0) STOP "*** japi ***"
deallocate(app,stat=ierr0)
IF (ierr0 /= 0) STOP "*** app ***"
CHKERRQ(ierr)
END SUBROUTINE DeallocCSRMat
END MODULE LoadPetscMatrix
SUBROUTINE SolvePetscLinear(rhs,sol)
! Modules:
USE ProbSize, only : IMax, JMax, Kmax
USE ModelParam
USE Grid
USE Metrics
USE Velocity
USE Scalars
USE SGSVars
USE MultigridVars, only : nnz, nbnodos, nx, ny, nz
USE LoadPetscMatrix, only : Ap
!USE MultigridVars, only : nbnodos, nz, nx, ny, nx_x_ny
IMPLICIT NONE
!PETSc Implementation, include files:
!Created by Manuel Valera 6/10/16:
#include <petsc/finclude/petscsys.h> !core PETSc
#include <petsc/finclude/petscvec.h> !vectors
#include <petsc/finclude/petscvec.h90> !special vectors f90 functions
#include <petsc/finclude/petscmat.h> !matrixes
#include <petsc/finclude/petscis.h> !index sets
#include <petsc/finclude/petscviewer.h> !viewers
#include <petsc/finclude/petscpc.h> !preconditioners
#include <petsc/finclude/petscksp.h> !Krylov subspaces (Solver)
! Local Variables
DOUBLE PRECISION :: ui,uj,uk
DOUBLE PRECISION :: vi,vj,vk
DOUBLE PRECISION :: wi,wj,wk
DOUBLE PRECISION :: const,maxdiff
DOUBLE PRECISION, DIMENSION(nbnodos), INTENT(IN) :: Rhs
DOUBLE PRECISION, DIMENSION(nbnodos), INTENT(OUT) :: sol
INTEGER :: i,j,k
INTEGER :: ff
INTEGER :: l
real (kind(0d0)) :: tol,rms
integer ::iter,iprint
INTEGER :: ierr0
!CHARACTER(LEN=10000) :: FileName
! PETSc IMPLEMENTATION, Variables:
PC pc,mg
KSP ksp
Vec xp,bp,work
Mat pmat
PetscErrorCode ierr
PetscInt nbdp,nnzp,ii,nel,its,spa
PetscScalar, pointer :: soln(:)
PetscScalar, pointer :: rpia(:)
PetscInt, allocatable :: ind(:)
PetscScalar rpi,rhsp
PetscViewer viewer
PetscMPIInt rank, sizex
KSPConvergedReason reason
!TODO: IMPORTANT ALL USER-DEFINED ROUTINES SHOULD BE DECLARED AS EXTERNAL
! real (kind(0d0)) :: a(nnz)
! integer :: ja(nnz),ia(nbnodos+1)
!PETSc IMPLEMENTATION, MAT-VEC Population:
!by Manuel Valera, June 2016.
! TODO: .- Create own module for this.
! .- Figure out IF matrix Ap (Acsr) is reformed every step or if it stays
! the same. (prolly the same).
!Adapted from Chris Paolini and PETSc official examples:
! 1.- SETUP VECTORS-
call MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr)
call MPI_Comm_size(PETSC_COMM_WORLD, sizex, ierr)
! Prepare integers:
nbdp = nbnodos !matrix dimension
nnzp = nnz !nonzero entries in each matrix row
allocate(ind(nnzp), stat=ierr)
allocate(soln(nbdp), stat=ierr)
call VecCreate(PETSC_COMM_WORLD,xp,ierr)
call VecSetSizes(xp,PETSC_DECIDE,nbdp,ierr) ! Size of xp - nbnodos
!VecSetSizes(Vec v, PetscInt n, PetscInt N)
call VecSetFromOptions(xp,ierr)
call VecDuplicate(xp,bp,ierr) ! duplicate xp (soln vector) into bp(RHS),both have same length
call VecDuplicate(xp,work,ierr)
CHKERRQ(ierr)
! 1.2.- SET VECTORS VALUES:
call VecSet(xp,0.0D0,ierr) !initializing SOLN to zeros
call VecAssemblyBegin(xp,ierr) ; call VecAssemblyEnd(xp,ierr) !Soln
call VecAssemblyBegin(work,ierr) ; call VecAssemblyEnd(work,ierr)
! call VecGetSize(xp,nel,ierr)
! print*, "TrivSoln loaded, size: ", nel, "/",nbdp
do ii=0,nbdp-1,1
ind(ii+1) = ii
enddo
call VecSetValues(bp,nbdp,ind,Rhs,INSERT_VALUES,ierr) !feeding RHS in one call. Instabilities
call VecAssemblyBegin(bp,ierr) ; call VecAssemblyEnd(bp,ierr) !rhs
call VecGetSize(bp,nel,ierr)
! call VecGetArrayF90(bp,soln,ierr) !Checking Rhs is being created correctly. IT IS
! sol = soln
! print*, "rhs maxval:", MAXVAL(sol), "/", MAXVAL(Rhs)
! print*, "rhs minval:", MINVAL(sol), "/", MINVAL(Rhs)
! call VecRestoreArrayF90(bp,soln,ierr)
print*, "RHS loaded, size: ", nel, "/",nbdp
!!!!!! Matrix was loaded here, now comes from LoadPetscMatrix module !!!!!!
call MatGetSize(Ap,nel,its,ierr)
print*, "CSRMAt loaded, sizes:", nel,"x",its
call KSPCreate(PETSC_COMM_WORLD,ksp,ierr) ! Solver (linear, for nonlinear must use SNES)
call KSPSetOperators(ksp,Ap,Ap,ierr)
call KSPGetPC(ksp,pc,ierr)
tol = 1.e-5
call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
!!Calling PCs as example42KSP:
!! Jacobi Preconditioner:
call PCGetOperators(pc,PETSC_NULL_OBJECT,pmat,ierr)
call PCCreate(PETSC_COMM_WORLD,mg,ierr)
call PCSetType(mg,PCJACOBI,ierr)
call PCSetOperators(mg,pmat,pmat,ierr)
call PCSetUp(mg,ierr)
call PCApply(mg,xp,work,ierr)
!! Multigrid Preconditioner:
! call PCGetOperators(pc,Ap,pmat,ierr)
! call PCCreate(PETSC_COMM_WORLD,mg,ierr)
! call PCSetType(mg,PCGAMG,ierr)
! call PCSetOperators(mg,pmat,pmat,ierr)
! call PCGAMGSetNLevels(mg,3,PETSC_NULL_OBJECT,ierr)
! call PCGAMGSetNSmooths(mg,1,ierr)
! call PCGAMGSetThreshold(mg,0.0d0,ierr)
! call PCSetUp(mg,ierr)
! call PCApply(mg,xp,work,ierr)
call KSPSetType(ksp,KSPGCR,ierr) !agmg uses this. Modded GMRES.
!! This iterations have to be checked for tolerance TODO
call KSPSetCheckNormIteration(ksp,100,ierr) !iterations before checking residual.
CHKERRQ(ierr)
call KSPSetFromOptions(ksp,ierr)
CHKERRQ(ierr)
call KSPSetUp(ksp,ierr);
! call KSPSetUpOnBlocks(ksp,ierr);
CHKERRQ(ierr)
!/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
! Solve the linear system
! - - - - - - - - - - - - - - - - - - - - - - - */
call KSPSolve(ksp,bp,xp,ierr)
! call KSPGetConvergedReason(ksp,reason,ierr)
! rms = reason
! print*, "reason:",rms
!CHKERRQ(ierr)
call VecGetArrayF90(xp,soln,ierr)
sol = soln !Copying new solution to regular scalar array
!tried a loop here and does nothing.
! print*, "soln maxval:", MAXVAL(sol)
! print*, "soln minval:", MINVAL(sol)
call VecRestoreArrayF90(xp,soln,ierr)
! call KSPGetIterationNumber(ksp,its,ierr);
! iter = its
! print *, "Its:", iter
! 5.- CLEAN UP OBJECTS:
call MatDestroy(pmat,ierr)
call VecDestroy(bp,ierr)
call VecDestroy(xp,ierr)
call KSPDestroy(ksp,ierr)
call PCDestroy(mg,ierr)
! call PCDestroy(pc,ierr)
call VecDestroy(work,ierr)
CHKERRQ(ierr)
!./././././././// END OF PETSC IMPLEMENTATION | Finished on 9/7/16 by Manuel Valera.
END SUBROUTINE SolvePetscLinear