Hi,
As I am relatively new to Petsc, I may have misunderstood how to use the
MatCreateSubMatricesMPI function. The attached code is tuned for three
processes and extracts one matrix for each colour of a subcommunicator
that has been created using the MPI_Comm_split function from anĀ MPIAij
matrix. The following error message appears when the code is set to its
default configuration (i.e. when a rectangular matrix is extracted with
more rows than columns for colour 0):
[0]PETSC ERROR: --------------------- Error Message
--------------------------------------------------------------
[0]PETSC ERROR: Argument out of range
[0]PETSC ERROR: Column too large: col 4 max 3
[0]PETSC ERROR: See
https://urldefense.us/v3/__https://petsc.org/release/faq/__;!!G_uCfscf7eWS!ZqH097BZ0G0O3WI7RWrwIKFNpyk0czSWEqfusAeTlgEygAffwpgBUzsLw1TIoGkjZ3mYG-NRQxxFoxU4y8EyY0ofiz9I43Qwe0w$
for trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.22.2, unknown
... petsc git hash 2a89477b25f compiled on a dell i9 computer with Gcc
14.3, mkl 2025.2, .....
[0]PETSC ERROR: #1 MatSetValues_SeqAIJ() at
...petsc/src/mat/impls/aij/seq/aij.c:426
[0]PETSC ERROR: #2 MatSetValues() at
...petsc/src/mat/interface/matrix.c:1543
[0]PETSC ERROR: #3 MatSetSeqMats_MPIAIJ() at
.../petsc/src/mat/impls/aij/mpi/mpiov.c:2965
[0]PETSC ERROR: #4 MatCreateSubMatricesMPI_MPIXAIJ() at
.../petsc/src/mat/impls/aij/mpi/mpiov.c:3163
[0]PETSC ERROR: #5 MatCreateSubMatricesMPI_MPIAIJ() at
.../petsc/src/mat/impls/aij/mpi/mpiov.c:3196
[0]PETSC ERROR: #6 MatCreateSubMatricesMPI() at
.../petsc/src/mat/interface/matrix.c:7293
[0]PETSC ERROR: #7 main() at sub.c:169
When the '-ok' option is selected, the code extracts a square matrix for
colour 0, which runs smoothly in this case. Selecting the '-trans'
option swaps the row and column selection indices, providing a
transposed submatrix smoothly. For colour 1, which uses only one process
and is therefore sequential, rectangular extraction is OK regardless of
the shape.
Is this dependency on the shape expected? Have I missed an important
tuning step somewhere?
Thank you in advance for any clarification.
Regards
A.S.
P.S.: I'm sorry, but as I'm leaving my office for the following weeks
this evening, I won't be very responsive during this period.
static char help[] = "Test MatCreateSubMatricesMPI strange behavior \n Needs 3 processes to work.";
#include <petscmat.h>
int main(int argc, char **args) {
PetscErrorCode ierr;
Mat A;
PetscMPIInt rank, size;
PetscInt mbloc_per_proc = 2, nbloc_per_proc = 2;
const PetscInt bs = 2, bs2 = 4;
PetscInt rstart, rend;
PetscBool ok = PETSC_FALSE, trans = PETSC_FALSE, flg;
PetscFunctionBeginUser;
PetscCall(PetscInitialize(&argc, &args, NULL, help));
MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
MPI_Comm_size(PETSC_COMM_WORLD, &size);
if (size!=3) MPI_Abort(PETSC_COMM_WORLD,-1);
PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "sub", "Mat");
PetscCall(PetscOptionsBool("-ok", "switch to working version", NULL, ok, &ok, &flg));
PetscCall(PetscOptionsBool("-trans", "Try transpose selection", NULL, trans, &trans, &flg));
PetscOptionsEnd();
// dispatch output to files
char no[256];
sprintf(no, "proc_%d_output.txt", rank);
freopen(no, "w", stdout);
PetscInt mbloc = size * mbloc_per_proc, nbloc = size * mbloc_per_proc;
PetscInt M = mbloc * bs, N = nbloc * bs;
// Create matrix
PetscCall(MatCreate(PETSC_COMM_WORLD, &A));
PetscCall(MatSetSizes(A, PETSC_DECIDE, PETSC_DECIDE, M, N));
PetscCall(MatSetFromOptions(A));
PetscCall(MatSetBlockSizes(A,bs,bs));
PetscCall(MatSetUp(A));
// Get ownership range for rows
PetscCall(MatGetOwnershipRange(A, &rstart, &rend));
printf("rstart %d rend %d\n",rstart,rend);
// Insert values so that they lead to a sparse distribution and are easily identified
PetscInt cols[bs2];
PetscScalar vals[bs2] = {0.};
for (PetscInt i = rstart/bs; i < rend/bs; ++i)
{
for (PetscInt j = 0; j < nbloc; ++j)
{
if (!((i + j) % 3))
{
for (PetscInt b = 0; b < bs; ++b)
{
vals[b * (bs + 1)] = 1.0 * (10 * i * bs + b + j * bs + b + 1);
}
PetscCall(MatSetValuesBlocked(A, 1, &i, 1, &j, vals, INSERT_VALUES));
}
}
}
// Final assembly
PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
// Come from original application to see it this is having any influence
// answer: no
//PetscCall(MatSetOption(A, MAT_SYMMETRIC, PETSC_TRUE));
//PetscCall(MatSetOption(A, MAT_NEW_NONZERO_LOCATION_ERR, PETSC_TRUE));
// View for verification
PetscCall(MatView(A, PETSC_VIEWER_STDOUT_WORLD));
// set a sub communicator
// tuned for at least 3 process
// 1 color for proc 0,1
// 1 color for proc 2 and above
int color = (rank > 1) ? 1 : 0;
MPI_Comm subcom;
MPI_Comm_split(MPI_COMM_WORLD,color,0,&subcom);
// set IS on those communicator
// tuned for 3 process only
IS idxc,idxr;
int size_r = 0, size_c = 0;
int selr[6];
int selc[6];
switch (rank)
{
case 0:
{
if (ok == PETSC_TRUE)
{
// square selection
selr[0] = 0;
size_r = 1;
selc[0] = 0;
size_c = 1;
}
else
{
// more rows then column
selr[0] = 0;
selr[1] = 1;
size_r = 2;
selc[0] = 0;
size_c = 1;
}
break;
}
case 1:
{
if (ok == PETSC_TRUE)
{
// square selection
selr[0] = 3;
size_r = 1;
selc[0] = 3;
size_c = 1;
}
else
{
// more rows then column
selr[0] = 2;
selr[1] = 3;
size_r = 2;
selc[0] = 3;
size_c = 1;
}
break;
}
case 2:
{
// select all block lines of 2 block column
// without bothering if non null term exist or not
// ok and transpose also
// show that in sequential more rows then column is ok
selr[0]=0;
selr[1]=1;
selr[2]=2;
selr[3]=3;
selr[4]=4;
selr[5]=5;
size_r=6;
selc[0]=0;
selc[1]=4;
size_c=2;
break;
}
};
if (trans == PETSC_TRUE)
{
PetscCall(ISCreateBlock(subcom, bs, size_r, selr, PETSC_USE_POINTER, &idxc));
PetscCall(ISCreateBlock(subcom, bs, size_c, selc, PETSC_USE_POINTER, &idxr));
}
else
{
PetscCall(ISCreateBlock(subcom, bs, size_r, selr, PETSC_USE_POINTER, &idxr));
PetscCall(ISCreateBlock(subcom, bs, size_c, selc, PETSC_USE_POINTER, &idxc));
}
printf("idxr proc\n");
PetscCall(ISView(idxr, PETSC_VIEWER_STDOUT_(subcom)));
printf("idxc proc\n");
PetscCall(ISView(idxc, PETSC_VIEWER_STDOUT_(subcom)));
// extract sub matrices : 1 per color => 2 in total
Mat *submat[1];
PetscCall(MatCreateSubMatricesMPI(A, 1, &idxr, &idxc, MAT_INITIAL_MATRIX, &submat[0]));
Mat &B=*submat[0];
// View per color
PetscCall(MatView(B, PETSC_VIEWER_STDOUT_(subcom)));
// Get ownership range for rows of sub matrices
PetscCall(MatGetOwnershipRange(B, &rstart, &rend));
printf("rstart %d rend %d\n",rstart,rend);
// inspect local row of sub matrix
PetscInt ncols;
const PetscInt *icols[1];
const PetscScalar *rvals[1];
for (PetscInt r=rstart;r<rend;++r)
{
PetscCall(MatGetRow(B, r, &ncols, icols, rvals));
printf("local row %d:", r);
for (PetscInt i = 0; i < ncols; ++i)
{
printf(" ( %d , %e)", icols[0][i], rvals[0][i]);
}
printf("\n");
PetscCall(MatRestoreRow(B, r, &ncols, icols, rvals));
}
// Cleanup
PetscCall(MatDestroy(submat[0]));
PetscCall(ISDestroy(&idxr));
PetscCall(ISDestroy(&idxc));
PetscCall(MatDestroy(&A));
PetscCall(PetscFinalize());
return 0;
}