Hello,

I had a question about PETSc installations. On my local computer I
configured PETSc (v 3.4.2) using the options:

./configure --with-cc=mpicc --with-cxx=mpic++ --download-f-blas-lapack
--download-mpich --download-hypre

I wrote a test program that defines a vector using DMDAs, computes a dot
product, exchanges halo elements, and computes a low-order FD derivative of
the vector. Under my installation of PETSc everything works fine. For some
reason, when my colleagues run the program, they get segmentation fault
errors. If they change the y and z boundary types to GHOSTED as well, they
get the program to run until the end (seg faults at the end though) but
they get a local value of the dot product. I've attached the main.cpp file
for this script.

When they installed their versions of PETSc they didn't use the
--download-mpich option but instead used either:
./configure --download-f-blas-lapack --with-scalar-type=complex
or with the option: --with-mpi-dir=/home/kim/anaconda/pkgs/mpich2-1.3-py27_0

Could this be causing a problem with the parallelization under PETSc?

Thanks for the help and sorry for the long question.

Best regards,
John
///////////////////////////////////////////////////////////////////////////////
///
///	\file    main.cpp
///	\author  John Yawney
///	\version July 17, 2014
///
///	<summary>
///		Test case to demonstrate PETSc local and global vector definitions
///		under the DMDA data structures. Uses global vectors to compute the
///		dot product of 2 vectors; one defined as all 1s the other as all 2s.
///		As a second test, we exchange halo elements of the first vector and 
///		define a new derivative vector that stores the FD derivatives of
///		this first vector.
///		Test outputs displayed: dot product, derivative vector min & max
///	</summary>

#include "petsc.h"
#include <mpi.h>

///////////////////////////////////////////////////////////////////////////////

int main(int argc, char** argv) {

	// Initialize Petsc
	PetscInitialize(&argc, &argv, 0, 0);

	// Push an abort error handler
	PetscPushErrorHandler(&PetscAbortErrorHandler, PETSC_NULL);

	//////////////////////////////////////////////////////////////////////////
	// DMDA Environment
	//////////////////////////////////////////////////////////////////////////
	// Boundary Conditions
	DMDABoundaryType bx = DMDA_BOUNDARY_GHOSTED;
	DMDABoundaryType by = DMDA_BOUNDARY_NONE; // no boundary points since nGx = 1
	DMDABoundaryType bz = DMDA_BOUNDARY_NONE; // no boundary points since nGz = 1

	//////////////////////////////////////////////////////////////////////////
	// Stencil type
	DMDAStencilType st = DMDA_STENCIL_BOX;
	//DMDAStencilType st = DMDA_STENCIL_STAR;

	//////////////////////////////////////////////////////////////////////////
	// Number of global elements
	int nGx = 256;
	int nGy = 1;
	int nGz = 1;

	//////////////////////////////////////////////////////////////////////////
	// Number of processors in each direction	
	int nSize;
	MPI_Comm_size(MPI_COMM_WORLD, &nSize);

	int nPx = nSize;
	int nPy = 1;
	int nPz = 1;

	if (nGx % nPx != 0) {
		PetscPrintf(MPI_COMM_WORLD, "\nERROR: Number of processors in x must divide number of elements in x.\n\n");
		MPI_Barrier(MPI_COMM_WORLD);
		return -1;
	}

	//////////////////////////////////////////////////////////////////////////
	// Number of halo elements	
	int nh = 3;

	//////////////////////////////////////////////////////////////////////////
	// Number of degrees of freedom/layers
	int dof = 1;

	//////////////////////////////////////////////////////////////////////////
	// Define 3D distributed array environment
	DM m_3da;
	DMDACreate3d(MPI_COMM_WORLD, bx, by, bz, st, 
				nGx, nGy, nGz, nPx, nPy, nPz, 
				dof, nh, PETSC_NULL, PETSC_NULL, PETSC_NULL, &m_3da);

	//////////////////////////////////////////////////////////////////////////
	// Initialize global temp vectors for exchange
	Vec m_gTempA, m_gTempB;
	DMCreateGlobalVector(m_3da, &m_gTempA);
	DMCreateGlobalVector(m_3da, &m_gTempB);

	//////////////////////////////////////////////////////////////////////////
	// Initialize local vectors (with halo elements accessible)
	Vec m_vecA, m_vecB;
	DMCreateLocalVector(m_3da, &m_vecA);
	VecZeroEntries(m_vecA);

	VecDuplicate(m_vecA, &m_vecB);

	//////////////////////////////////////////////////////////////////////////
	// Store some values in the interior of each processor's data
	int i, si, ei;
	DMDAGetCorners(m_3da, &si, 0, 0, &ei, 0, 0);

	// Access the elements of the local arrays as C++ multi-dim. array structures
	double ***pvecA, ***pvecB;
	DMDAVecGetArray(m_3da, m_vecA, &pvecA);
	DMDAVecGetArray(m_3da, m_vecB, &pvecB);

	// Hold k and j constant at 0 since nGy = nGz = 0
	int k, j;
	k = j = 0;
	for (i = si; i < si+ei; i++) {

		//////////////////////////////////////////////////////////////////////////
		// NOTE: 	Adding in some random values.
		// 		These values will be defined by the problem of interest.
		pvecA[k][j][i] = 1.0;
		pvecB[k][j][i] = 2.0;

	}

	// Release pointers (and memory)
	DMDAVecRestoreArray(m_3da, m_vecA, &pvecA);
	DMDAVecRestoreArray(m_3da, m_vecB, &pvecB);

	// Moves data from local vectors to global vectors
	DMLocalToGlobalBegin(m_3da, m_vecA, INSERT_VALUES, m_gTempA);
	DMLocalToGlobalEnd(m_3da, m_vecA, INSERT_VALUES, m_gTempA);

	DMLocalToGlobalBegin(m_3da, m_vecB, INSERT_VALUES, m_gTempB);
	DMLocalToGlobalEnd(m_3da, m_vecB, INSERT_VALUES, m_gTempB);

	// Assembly for use in PETSc functions
	VecAssemblyBegin(m_gTempA);
	VecAssemblyEnd(m_gTempA);

	VecAssemblyBegin(m_gTempB);
	VecAssemblyEnd(m_gTempB);

	// Take dot product
	double dDotProduct;
	VecDot(m_gTempA, m_gTempB, &dDotProduct);

	// Output dot product to check
	PetscPrintf(MPI_COMM_WORLD, "Dot Product Check... \n\n");
	PetscPrintf(MPI_COMM_WORLD, "\tDot Product (should be 2*%d) = %f\n\n", nGx, dDotProduct);
	PetscPrintf(MPI_COMM_WORLD, "Done.\n\n");

	//////////////////////////////////////////////////////////////////////////
	// Computing centered derivative / first-order at boundaries
	//////////////////////////////////////////////////////////////////////////
	//	In order to compute derivatives we need to move the data from the 
	//	global vectors to the local. This will ensure that the ghost cells
	//	are updated correctly.

	// Moves data from global vectors to local vectors
	DMGlobalToLocalBegin(m_3da, m_gTempA, INSERT_VALUES, m_vecA);
	DMGlobalToLocalEnd(m_3da, m_gTempA, INSERT_VALUES, m_vecA);

	// Define grid spacing for derivatives
	double dLx = 1.0;
	double dDeltaX = dLx / static_cast<double>(nGx);	

	// Define a new global vector to store the derivatives.
	// Alternatively, we could define a local vector if we care about
	// accessing the ghost cells of the derivative vector.
	Vec m_vecDx;
	VecDuplicate(m_gTempA, &m_vecDx);
	// Alternative (local vector): VecDuplicate(m_vecA, &m_vecDx);

	// Access the data in the vectors by using pointers
	double ***pDx;
	DMDAVecGetArray(m_3da, m_vecA, &pvecA);
	DMDAVecGetArray(m_3da, m_vecDx, &pDx);

	//////////////////////////////////////////////////////////////////////////
	// NOTE: 	It is easy to check boundaries in PETSc DMDAs since the local i
	// 		values refer to global positioning. Therefore, we only need to
	//		check if i == 0 or i == nGx-1 in order to determine if we are
	//		at a boundary.
	for (i = si; i < si+ei; i++) {

		if (i == 0) {

			pDx[k][j][i] = (pvecA[k][j][i+1] - pvecA[k][j][i]) / dDeltaX;

		} else if (i == nGx-1) {

			pDx[k][j][i] = (pvecA[k][j][i] - pvecA[k][j][i-1]) / dDeltaX;

		} else {

			pDx[k][j][i] = 0.5 * (pvecA[k][j][i+1] - pvecA[k][j][i-1]) / dDeltaX;

		}

	}

	// Release pointers (and memory)
	DMDAVecRestoreArray(m_3da, m_vecA, &pvecA);
	DMDAVecRestoreArray(m_3da, m_vecDx, &pDx);

	// Compute min and max of derivative vector (should be 0 if halo defined correctly)
	double dMin, dMax;
	VecMin(m_vecDx, PETSC_NULL, &dMin);
	VecMax(m_vecDx, PETSC_NULL, &dMax);

	// Derivative check
	PetscPrintf(MPI_COMM_WORLD, "Derivative Check... \n\n");
	PetscPrintf(MPI_COMM_WORLD, "\tMin (should be 0; if halos not defined properly, would be -1.0/dDeltaX) = %f\n", dMin);
	PetscPrintf(MPI_COMM_WORLD, "\tMax (should be 0; if halos not defined properly, would be +1.0/dDeltaX) = %f\n\n", dMax);
	PetscPrintf(MPI_COMM_WORLD, "Done.\n");

	// Destroy temporary PETSc structures
	// Destroy DMDA Environment variable
	DMDestroy(&m_3da);

	// Destroy vectors
	VecDestroy(&m_vecA);
	VecDestroy(&m_vecB);
	VecDestroy(&m_gTempA);
	VecDestroy(&m_gTempB);
	VecDestroy(&m_vecDx);

	// Finalize Petsc
	PetscFinalize();
}

Reply via email to