Hi There,

I have some parallel mpi output codes that works fine without PETSc but crashes when compiled with PETSc. To make the problem easy, I test the following example which has the same problem. This example is modified form http://www.mcs.anl.gov/research/projects/mpi/usingmpi2/examples/starting/io3f_f90.htm. It works without PETSc but if I comment out "use mpi" and add PETSc include, it crashes at MPI_FILE_OPEN because of access violation.

Shall I rewrite all the MPI Parallel output with PetscBinaryOpen <http://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Sys/PetscBinaryOpen.html> or PetscViewerBinaryOpen <http://www.mcs.anl.gov/petsc/petsc-current/docs/manualpages/Viewer/PetscViewerBinaryOpen.html> relative functions? Considering the parallel I/O efficiency, which is more preferable?

Thanks and regards,

Danyang

PROGRAM main
    ! Fortran 90 users can (and should) use
    !use mpi
    ! instead of include 'mpif.h' if their MPI implementation provides a
    ! mpi module.
    !include 'mpif.h'

    !For PETSc, use the following "include"s
#include <finclude/petscsys.h>
#include <finclude/petscviewer.h>
#include <finclude/petscviewer.h90>
    integer ierr, i, myrank, BUFSIZE, thefile
    parameter (BUFSIZE=10)
    integer buf(BUFSIZE)
    integer(kind=MPI_OFFSET_KIND) disp

    call MPI_INIT(ierr)
    call MPI_COMM_RANK(MPI_COMM_WORLD, myrank, ierr)

    do i = 1, BUFSIZE
        buf(i) = myrank * BUFSIZE + i
    enddo

    write(*,'(a,1x,i6,1x,a,1x,10(i6,1x))') "myrank", myrank, "buf",buf

    call MPI_FILE_OPEN(MPI_COMM_WORLD, 'testfile.txt', &
                       MPI_MODE_CREATE + MPI_MODE_WRONLY, &
                       MPI_INFO_NULL, thefile, ierr)
    ! assume 4-byte integers
    disp = myrank * BUFSIZE * 4

    !Use the following two functions
    !call MPI_FILE_SET_VIEW(thefile, disp, MPI_INTEGER, &
    !                       MPI_INTEGER, 'native', &
    !                       MPI_INFO_NULL, ierr)
    !call MPI_FILE_WRITE(thefile, buf, BUFSIZE, MPI_INTEGER, &
    !                    MPI_STATUS_IGNORE, ierr)

    !Or use the following one function
    call MPI_FILE_WRITE_AT(thefile, disp, buf, BUFSIZE, MPI_INTEGER, &
                        MPI_STATUS_IGNORE, ierr)

    call MPI_FILE_CLOSE(thefile, ierr)
    call MPI_FINALIZE(ierr)

END PROGRAM main

Reply via email to