Dear PETSc team,
I have a parallel matrix that, in some cases, I want to turn into a
sequential matrix on rank 0. I have been succesfully using
"MatCreateSubMatrices" for this purpose, along the following lines (a
small but complete reproducer is attached):
--
if (MPIrank==0){
nSubMatToExtract=1;
ierr =
ISCreateStride(PETSC_COMM_WORLD,globalSizeOfTheMatrix,0,1,&IS_MPI);
CHKERRQ(ierr);
} else {
nSubMatToExtract=0;
ierr = ISCreateStride(PETSC_COMM_WORLD,0,0,1,&IS_MPI); CHKERRQ(ierr);
}
ierr =
MatCreateSubMatrices(A,nSubMatToExtract,&IS_MPI,&IS_MPI,MAT_INITIAL_MATRIX,Aseq);
--
Unfortunately this does not seem to work anymore with petsc 3.13.3
(while it worked at least up to 3.12.5, the latest version I tried
before 3.13.3).
The error message mainly says:
--
PETSC ERROR: Petsc has generated inconsistent data
PETSC ERROR: MPI_Allreduce() called in different locations (code lines)
on different processors
--
So was I just lucky that it worked before? And/or is there another
(better) way to reach this goal?
For your information, I had this implemented to obtain and then solve a
coarse-level domain decomposition matrix before discovering the
possibility of using the "Telescope" option to agglomerate the unknowns
spread on the subdomains. I managed to use Telescope, but I wish to know
if I could go on using my previous way of doing in case I want to gather
all the coarse-level unknowns on only one rank.
Thanks a lot,
Serge
#include <petscmat.h>
int main(int argc, char **argv)
{
PetscErrorCode ierr;
const PetscInt n = 10;
PetscInt i, istart, iend;
Mat A;
const PetscScalar one = 1.0;
IS IS_MPI;
int nSubMatToExtract, size1, size2;
PetscMPIInt MPIsize, MPIrank;
Mat* Aseq[1];
ierr = PetscInitialize(&argc, &argv, NULL, NULL);
if (ierr) {
printf("Error: Could not initialize PETSc, aborting...\n");
return ierr;
}
ierr = MatCreate(PETSC_COMM_WORLD, &A); CHKERRQ(ierr);
ierr = MatSetType(A, MATMPIAIJ); CHKERRQ(ierr);
ierr = MatSetSizes(A, n, n, PETSC_DETERMINE, PETSC_DETERMINE); CHKERRQ(ierr);
ierr = MatMPIAIJSetPreallocation(A, 1, NULL, 0, NULL); CHKERRQ(ierr);
ierr = MatGetOwnershipRange(A, &istart, &iend); CHKERRQ(ierr);
for (i = istart; i < iend; i++) {
ierr = MatSetValues(A, 1, &i, 1, &i, &one, INSERT_VALUES); CHKERRQ(ierr);
}
ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
ierr = MatView(A, PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
ierr = MPI_Comm_size(PETSC_COMM_WORLD,&MPIsize);CHKERRQ(ierr);
ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&MPIrank);CHKERRQ(ierr);
if (MPIrank==0){
nSubMatToExtract=1;
ierr = ISCreateStride(PETSC_COMM_WORLD,MPIsize*n,0,1,&IS_MPI); CHKERRQ(ierr);
} else {
nSubMatToExtract=0;
ierr = ISCreateStride(PETSC_COMM_WORLD,0,0,1,&IS_MPI); CHKERRQ(ierr);
}
ierr = MatCreateSubMatrices(A,nSubMatToExtract,&IS_MPI,&IS_MPI,MAT_INITIAL_MATRIX,Aseq);
ierr = MatGetSize(**Aseq, &size1, &size2); CHKERRQ(ierr);
if (MPIrank==0){
PetscPrintf(PETSC_COMM_SELF, " size 1 2 %d %d \n", size1, size2 );
//ierr = MatView(**Aseq, PETSC_VIEWER_STDOUT_SELF); CHKERRQ(ierr);
}
ierr = MatDestroy(&A); CHKERRQ(ierr);
ierr = MatDestroySubMatrices(nSubMatToExtract,Aseq);
PetscFinalize();
return 0;
}