Dear Matt,
I guess you're right: thresholding by rank==0 and rank==1 in
paraview reveals that it is indeed the overlap cells that are appear
twice in the output.
The attached file is not exactly minimal but hopefully short enough. If
I run it in serial, all is ok, but with
mpirun -np 2 ./saveDemo
it creates a 10x10 grid, but I get "output.vtu" with a total of 120
cells. However the pointSF of the DMPlex seems correct.
Thanks
Matteo
Il 21/10/24 19:15, Matthew Knepley ha scritto:
On Mon, Oct 21, 2024 at 12:22 PM Matteo Semplice via petsc-users
<petsc-users@mcs.anl.gov> wrote:
Dear petsc-users,
I am having issues with output of parallel data attached to a
DMPlex (or maybe more fundamental ones about DMPlex...).
So I currently
1. create a DMPlex (DMPlexCreateGmshFromFile or DMPlexCreateBoxMesh)
2. partition it
3. and create a section for my data layout with
DMPlexCreateSection(ctx.dmMesh, NULL, numComp, numDof, numBC,
NULL, NULL, NULL, NULL, &sUavg)
4. DMSetLocalSection(ctx.dmMesh, sUavg)
5. create solLoc and solGlob vectors with DMCreateGlobalVector
and DMCreateLocalVector
6. solve ....
7. VecView(ctx.solGlob, vtkViewer) on a .vtu file
but when I load data in ParaView I get more cells than expected
and it is as if the cells in the halo are put twice in output. (I
could create a MWE if the above is not clear)
I think we need an MWE here, because from the explanation above, it
should work.
However, I can try to guess the problem. When you partition the mesh,
I am guessing that you have cells in the overlap. These cells
must be in the point SF in order for the global section to give them a
unique owner. Perhaps something has gone wrong here.
Thanks,
Matt
I guess that the culprit is point (4), but if I replace it with
DMSetGlobalSection then I cannot create the local vector at point (5).
How should I handle this properly? In my code I need to create
both local and global vectors, to perform at least GlobalToLocal
and to save the global data.
(On a side note, I tried also HDF5 but then it complains about the
DM not having a DS...; really, any working solution that allows
data to be explored with Paraview is fine)
Thanks for any advice!
Matteo Semplice
--
What most experimenters take for granted before they begin their
experiments is infinitely more interesting than any results to which
their experiments lead.
-- Norbert Wiener
https://urldefense.us/v3/__https://www.cse.buffalo.edu/*knepley/__;fg!!G_uCfscf7eWS!buvrehNNjQgd0-6_n_Z6VF4Vu2psZ4V9MxJaRNwIIb6jWpe0QCnQJthiMrRDFW2RNEmqkynIU62eShkLj670fPczTBr67mFEnvBJig$
<https://urldefense.us/v3/__http://www.cse.buffalo.edu/*knepley/__;fg!!G_uCfscf7eWS!buvrehNNjQgd0-6_n_Z6VF4Vu2psZ4V9MxJaRNwIIb6jWpe0QCnQJthiMrRDFW2RNEmqkynIU62eShkLj670fPczTBr67mHQkiSkEQ$ >
--
---
Professore Associato in Analisi Numerica
Dipartimento di Scienza e Alta Tecnologia
Università degli Studi dell'Insubria
Via Valleggio, 11 - Como
#include <petscdmplex.h>
#include <petscsf.h>
int main(int argc, char **argv)
{
PetscFunctionBeginUser;
PetscCall( PetscInitialize(&argc, &argv, NULL,"") );
PetscMPIInt rank;
PetscCallMPI( MPI_Comm_rank(PETSC_COMM_WORLD,&rank) );
PetscScalar lower[2]={0.,0.};
PetscScalar upper[2]={1.,1.};
DM dmMesh;
PetscScalar dx=0.1;
PetscInt faces[2];
DMBoundaryType periodicity[2]={DM_BOUNDARY_NONE,DM_BOUNDARY_NONE};
PetscBool simplex=PETSC_FALSE;
PetscCall( PetscOptionsGetBool(NULL,NULL,"-simplex",NULL,&simplex) );
const PetscBool interpolate=PETSC_TRUE;
PetscCall( PetscOptionsGetScalar(NULL,NULL,"-dx",&dx,NULL) );
faces[0] = std::ceil( (upper[0] - lower[0]) / dx );
faces[1] = std::ceil( (upper[1] - lower[1]) / dx );
PetscCall( PetscPrintf( PETSC_COMM_WORLD, "Creating mesh with (%d,%d) faces \n", faces[0],faces[1] ) );
PetscCall( DMPlexCreateBoxMesh(MPI_COMM_WORLD, 2, simplex, faces, lower, upper, periodicity, interpolate, &(dmMesh)) );
PetscCall( PetscObjectSetName((PetscObject) dmMesh, "Mesh") );
PetscCall( DMSetFromOptions(dmMesh) );
//PARTITION THE MESH
PetscPartitioner partitioner;
PetscCall( DMPlexGetPartitioner(dmMesh, &partitioner) );
//PetscCall( PetscPartitionerSetType(partitioner, PETSCPARTITIONERPARMETIS) );
PetscCall( PetscPartitionerSetFromOptions(partitioner) );
DM dmDist=NULL;
PetscCall( DMSetBasicAdjacency(dmMesh, PETSC_TRUE, PETSC_TRUE) );
PetscCall( DMPlexDistribute(dmMesh, 1, NULL, &dmDist) );
if (dmDist) {PetscCall( DMDestroy(&(dmMesh)) ); dmMesh = dmDist; }
PetscInt cStart, cEnd;
PetscCall( DMPlexGetHeightStratum(dmMesh, 0, &cStart, &cEnd) );
PetscSF sfPoint;
PetscCall( DMGetPointSF(dmMesh, &sfPoint) );
//PetscViewerPushFormat(PETSC_VIEWER_STDOUT_WORLD,PETSC_VIEWER_ASCII_INFO_DETAIL);
//PetscSFView(sfPoint,PETSC_VIEWER_STDOUT_WORLD);
//PetscViewerPopFormat(PETSC_VIEWER_STDOUT_WORLD);
PetscInt numLeaves;
const PetscInt *localPoints;
PetscCall( PetscSFGetGraph(sfPoint, NULL, &numLeaves, &localPoints, NULL) );
DMLabel inghostLabel;
PetscCall( DMCreateLabel(dmMesh, "inghosts") );
PetscCall( DMGetLabel(dmMesh, "inghosts", &inghostLabel) );
for (PetscInt i=0; i<numLeaves; i++){
//PetscPrintf(PETSC_COMM_SELF,"[%d] %D is inner ghost\n",user.rank,localPoints[i]);
PetscCall( DMLabelSetValue(inghostLabel, localPoints[i], 1) );
}
PetscInt numRoots;
const PetscSFNode *remotePoints;
PetscCall( PetscSFGetGraph(sfPoint, &numRoots, &numLeaves, &localPoints, &remotePoints) );
for (PetscInt i=0; i<numLeaves; i++)
if (localPoints[i]<cEnd)
PetscCall( PetscPrintf(PETSC_COMM_SELF,"[on rank %d] %d <- (%d,%d)\n",rank,localPoints[i],remotePoints[i].rank,remotePoints[i].index) );
/* Setup the section for cell averages*/
PetscSection sUavg;
{
const PetscInt numFields = 3;
const PetscInt numComp[numFields] = {1,2,1};
const PetscInt numDof[numFields*3] = { 0,0,1 , 0,0,2, 0,0,1};
const PetscInt numBC = 0;
PetscCall(DMSetNumFields(dmMesh, numFields));
PetscCall(DMPlexCreateSection(dmMesh, NULL, numComp, numDof, numBC, NULL, NULL, NULL, NULL, &sUavg));
/* Name the Field variables */
PetscCall(PetscSectionSetFieldName(sUavg, 0, "rho"));
PetscCall(PetscSectionSetFieldName(sUavg, 1, "M"));
PetscCall(PetscSectionSetFieldName(sUavg, 2, "E"));
//PetscCall(PetscSectionView(section, PETSC_VIEWER_STDOUT_WORLD));
}
PetscCall( DMGetSectionSF(dmMesh, &sfPoint) );
PetscCall( DMSetLocalSection(dmMesh, sUavg) );
//maybe we need this? Alas, it does not make a difference...
PetscCall( PetscSectionDestroy(&sUavg) );
Vec solGlob, solLoc;
PetscCall( DMCreateGlobalVector(dmMesh, &solGlob) );
PetscCall( PetscObjectSetName((PetscObject) solGlob , "Uavg") );
PetscCall( DMCreateLocalVector(dmMesh, &solLoc) );
//Fill in local values and then loc2glob
PetscScalar *values, *cellVal;
PetscCall( VecGetArray(solLoc, &values) );
for (PetscInt c=cStart; c<cEnd; ++c){
PetscCall( DMPlexPointLocalRef(dmMesh, c, values, &cellVal) );
for (int m=0; m<4; ++m) cellVal[m] = rank + m/10.;
}
PetscCall( VecRestoreArray(solLoc, &values) );
PetscCall( DMLocalToGlobal(dmMesh, solLoc, INSERT_VALUES, solGlob) );
//output the global solution
PetscViewer vtkViewer;
PetscCall(PetscViewerCreate(PETSC_COMM_WORLD, &vtkViewer));
PetscCall(PetscViewerSetType(vtkViewer, PETSCVIEWERVTK));
PetscCall(PetscViewerFileSetName(vtkViewer, "output.vtu"));
PetscCall(VecView(solGlob, vtkViewer));
PetscCall(PetscViewerDestroy(&vtkViewer));
PetscCall( VecDestroy( &solGlob) );
PetscCall( VecDestroy( &solLoc) );
PetscCall( DMDestroy( &dmMesh) );
PetscCall( PetscFinalize() );
}