Skip to content

Commit

Permalink
Configure PETSc with --with-strict-petscerrorcode (#3962)
Browse files Browse the repository at this point in the history
And necessary fixes to tinyASM
  • Loading branch information
connorjward authored Jan 10, 2025
1 parent a08fc1e commit 5c6e320
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 39 deletions.
1 change: 1 addition & 0 deletions .github/workflows/pip-mac.yml
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ jobs:
--with-shared-libraries=1 \
--with-mpi-dir=/opt/homebrew \
--with-zlib \
--with-strict-petscerrorcode \
--download-bison \
--download-hdf5 \
--download-hwloc \
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/pyop2.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ jobs:
--with-debugging=1 \
--with-shared-libraries=1 \
--with-c2html=0 \
--with-strict-petscerrorcode \
--with-fortran-bindings=0
make
Expand Down
3 changes: 3 additions & 0 deletions docker/Dockerfile.env
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ RUN bash -c 'cd petsc; \
--download-scalapack \
--download-suitesparse \
--download-superlu_dist \
--with-strict-petscerrorcode \
PETSC_ARCH=packages; \
mv packages/include/petscconf.h packages/include/old_petscconf.nope; \
rm -rf /home/firedrake/petsc/**/externalpackages; \
Expand Down Expand Up @@ -105,6 +106,7 @@ RUN bash -c 'export PACKAGES=/home/firedrake/petsc/packages; \
--with-scalapack-dir=$PACKAGES \
--with-suitesparse-dir=$PACKAGES \
--with-superlu_dist-dir=$PACKAGES \
--with-strict-petscerrorcode \
PETSC_ARCH=default; \
make PETSC_DIR=/home/firedrake/petsc PETSC_ARCH=default all;'

Expand Down Expand Up @@ -144,6 +146,7 @@ RUN bash -c 'export PACKAGES=/home/firedrake/petsc/packages; \
--with-scalapack-dir=$PACKAGES \
--with-suitesparse-dir=$PACKAGES \
--with-superlu_dist-dir=$PACKAGES \
--with-strict-petscerrorcode \
PETSC_ARCH=complex; \
make PETSC_DIR=/home/firedrake/petsc PETSC_ARCH=complex all;'

Expand Down
1 change: 1 addition & 0 deletions scripts/firedrake-install
Original file line number Diff line number Diff line change
Expand Up @@ -726,6 +726,7 @@ def get_petsc_options(minimal=False):
"--with-debugging=0",
"--with-shared-libraries=1",
"--with-c2html=0",
"--with-strict-petscerrorcode",
# Parser generator
"--download-bison"}
for pkg in get_minimal_petsc_packages():
Expand Down
2 changes: 1 addition & 1 deletion tinyasm/matinvert.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@ PetscErrorCode mymatinvert(PetscBLASInt* n, PetscScalar* mat, PetscBLASInt* piv,
PetscCheck(!(*info), PETSC_COMM_SELF, PETSC_ERR_LIB, "TinyASM error calling ?getrf in mymatinvert");
PetscCallBLAS("LAPACKgetri", LAPACKgetri_(n, mat, n, piv, work, n, info));
PetscCheck(!(*info), PETSC_COMM_SELF, PETSC_ERR_LIB, "TinyASM error calling ?getri in mymatinvert");
return PETSC_SUCCESS;
PetscFunctionReturn(PETSC_SUCCESS);
}

78 changes: 40 additions & 38 deletions tinyasm/tinyasm.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,31 +63,31 @@ class BlockJacobi {
fwork = vector<PetscScalar>(biggestBlock, 0.);
localmats_aij = NULL;
dofis = vector<IS>(numBlocks);
PetscMalloc1(numBlocks, &localmats);
PetscCallVoid(PetscMalloc1(numBlocks, &localmats));
for(int p=0; p<numBlocks; p++) {
localmats[p] = NULL;
ISCreateGeneral(MPI_COMM_SELF, globalDofsPerBlock[p].size(), globalDofsPerBlock[p].data(), PETSC_USE_POINTER, dofis.data() + p);
PetscCallVoid(ISCreateGeneral(MPI_COMM_SELF, globalDofsPerBlock[p].size(), globalDofsPerBlock[p].data(), PETSC_USE_POINTER, dofis.data() + p));
}
}

~BlockJacobi() {
int numBlocks = dofsPerBlock.size();
for(int p=0; p<numBlocks; p++) {
ISDestroy(&dofis[p]);
PetscCallVoid(ISDestroy(&dofis[p]));
}
if(localmats_aij) {
MatDestroySubMatrices(numBlocks, &localmats_aij);
PetscCallVoid(MatDestroySubMatrices(numBlocks, &localmats_aij));
}
if (localmats) {
for (int p=0; p<numBlocks; p++) {
MatDestroy(&localmats[p]);
PetscCallVoid(MatDestroy(&localmats[p]));
}
PetscFree(localmats);
PetscCallVoid(PetscFree(localmats));
}
PetscSFDestroy(&sf);
PetscCallVoid(PetscSFDestroy(&sf));
}

PetscInt updateValuesPerBlock(Mat P) {
PetscErrorCode updateValuesPerBlock(Mat P) {
PetscBLASInt dof, info;
int numBlocks = dofsPerBlock.size();
PetscCall(MatCreateSubMatrices(P, numBlocks, dofis.data(), dofis.data(), localmats_aij ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &localmats_aij));
Expand All @@ -96,14 +96,14 @@ class BlockJacobi {
PetscCall(MatConvert(localmats_aij[p], MATDENSE, localmats[p] ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX,&localmats[p]));
PetscCall(PetscBLASIntCast(dofsPerBlock[p].size(), &dof));
PetscCall(MatDenseGetArrayWrite(localmats[p],&vv));
if (dof) mymatinvert(&dof, vv, piv.data(), &info, fwork.data());
if (dof) PetscCall(mymatinvert(&dof, vv, piv.data(), &info, fwork.data()));
PetscCall(MatDenseRestoreArrayWrite(localmats[p],&vv));
}
return 0;
PetscFunctionReturn(PETSC_SUCCESS);
}


PetscInt solve(const PetscScalar* __restrict b, PetscScalar* __restrict x) {
PetscErrorCode solve(const PetscScalar* __restrict b, PetscScalar* __restrict x) {
PetscScalar dOne = 1.0;
PetscBLASInt dof, one = 1;
PetscScalar dZero = 0.0;
Expand All @@ -126,13 +126,12 @@ class BlockJacobi {
}
PetscCall(MatDenseRestoreArrayRead(localmats[p],&matvalues));
}
return 0;
PetscFunctionReturn(PETSC_SUCCESS);
}
};

PetscErrorCode CreateCombinedSF(PC pc, const std::vector<PetscSF>& sf, const std::vector<PetscInt>& bs, PetscSF *newsf)
{
PetscInt i;
auto n = sf.size();

PetscFunctionBegin;
Expand All @@ -159,7 +158,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector<PetscSF>& sf, const std
* allRoots: number of owned global dofs;
* allLeaves: number of visible dofs (global + ghosted).
*/
for (i = 0; i < n; ++i) {
for (size_t i = 0; i < n; ++i) {
PetscInt nroots, nleaves;

PetscCall(PetscSFGetGraph(sf[i], &nroots, &nleaves, NULL, NULL));
Expand All @@ -170,7 +169,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector<PetscSF>& sf, const std
PetscCall(PetscMalloc1(allLeaves, &iremote));
// Now build an SF that just contains process connectivity.
PetscCall(PetscHSetICreate(&ranksUniq));
for (i = 0; i < n; ++i) {
for (size_t i = 0; i < n; ++i) {
const PetscMPIInt *ranks = NULL;
PetscMPIInt nranks, j;

Expand All @@ -187,7 +186,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector<PetscSF>& sf, const std
PetscCall(PetscHSetIGetElems(ranksUniq, &index, ranks));

PetscCall(PetscHMapICreate(&rankToIndex));
for (i = 0; i < numRanks; ++i) {
for (PetscInt i = 0; i < numRanks; ++i) {
remote[i].rank = ranks[i];
remote[i].index = 0;
PetscCall(PetscHMapISet(rankToIndex, ranks[i], i));
Expand All @@ -203,16 +202,16 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector<PetscSF>& sf, const std
PetscCall(PetscMalloc1(n*numRanks, &remoteOffsets));

offsets[0] = 0;
for (i = 1; i < n; ++i) {
for (size_t i = 1; i < n; ++i) {
PetscInt nroots;

PetscCall(PetscSFGetGraph(sf[i-1], &nroots, NULL, NULL, NULL));
offsets[i] = offsets[i-1] + nroots*bs[i-1];
}
/* Offsets are the offsets on the current process of the
* global dof numbering for the subspaces. */
PetscCall(MPI_Type_contiguous(n, MPIU_INT, &contig));
PetscCall(MPI_Type_commit(&contig));
PetscCallMPI(MPI_Type_contiguous(n, MPIU_INT, &contig));
PetscCallMPI(MPI_Type_commit(&contig));

#if MY_PETSC_VERSION_LT(3, 14, 4)
PetscCall(PetscSFBcastBegin(rankSF, contig, offsets, remoteOffsets));
Expand All @@ -221,14 +220,14 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector<PetscSF>& sf, const std
PetscCall(PetscSFBcastBegin(rankSF, contig, offsets, remoteOffsets, MPI_REPLACE));
PetscCall(PetscSFBcastEnd(rankSF, contig, offsets, remoteOffsets, MPI_REPLACE));
#endif
PetscCall(MPI_Type_free(&contig));
PetscCallMPI(MPI_Type_free(&contig));
PetscCall(PetscFree(offsets));
PetscCall(PetscSFDestroy(&rankSF));
/* Now remoteOffsets contains the offsets on the remote
* processes who communicate with me. So now we can
* concatenate the list of SFs into a single one. */
index = 0;
for (i = 0; i < n; ++i) {
for (size_t i = 0; i < n; ++i) {
const PetscSFNode *remote = NULL;
const PetscInt *local = NULL;
PetscInt nroots, nleaves, j;
Expand Down Expand Up @@ -256,7 +255,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector<PetscSF>& sf, const std
PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)pc), newsf));
PetscCall(PetscSFSetGraph(*newsf, allRoots, allLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER));
}
PetscFunctionReturn(0);
PetscFunctionReturn(PETSC_SUCCESS);
}


Expand All @@ -266,7 +265,7 @@ PetscErrorCode PCSetup_TinyASM(PC pc) {
auto blockjacobi = (BlockJacobi *)pc->data;
blockjacobi -> updateValuesPerBlock(P);
PetscCall(PetscLogEventEnd(PC_tinyasm_setup, pc, 0, 0, 0));
return 0;
PetscFunctionReturn(PETSC_SUCCESS);
}

PetscErrorCode PCApply_TinyASM(PC pc, Vec b, Vec x) {
Expand Down Expand Up @@ -295,13 +294,13 @@ PetscErrorCode PCApply_TinyASM(PC pc, Vec b, Vec x) {
PetscCall(PetscSFReduceEnd(blockjacobi->sf, MPIU_SCALAR, &(blockjacobi->localx[0]), globalx, MPI_SUM));
PetscCall(VecRestoreArray(x, &globalx));
PetscCall(PetscLogEventEnd(PC_tinyasm_apply, pc, 0, 0, 0));
return 0;
PetscFunctionReturn(PETSC_SUCCESS);
}

PetscErrorCode PCDestroy_TinyASM(PC pc) {
if(pc->data)
delete (BlockJacobi *)pc->data;
return 0;
PetscFunctionReturn(PETSC_SUCCESS);
}

PetscErrorCode PCView_TinyASM(PC pc, PetscViewer viewer) {
Expand All @@ -320,7 +319,7 @@ PetscErrorCode PCView_TinyASM(PC pc, PetscViewer viewer) {
PetscCall(PetscViewerASCIIPrintf(viewer, "Largest block size %" PetscInt_FMT " \n", biggestblock));
PetscCall(PetscViewerASCIIPopTab(viewer));
}
return 0;
PetscFunctionReturn(PETSC_SUCCESS);
}

PetscErrorCode PCCreate_TinyASM(PC pc) {
Expand All @@ -329,7 +328,7 @@ PetscErrorCode PCCreate_TinyASM(PC pc) {
pc->ops->setup = PCSetup_TinyASM;
pc->ops->destroy = PCDestroy_TinyASM;
pc->ops->view = PCView_TinyASM;
return 0;
PetscFunctionReturn(PETSC_SUCCESS);
}
// pybind11 casters for PETSc/petsc4py objects, copied from dolfinx repo
// Import petsc4py on demand
Expand Down Expand Up @@ -385,40 +384,43 @@ PYBIND11_MODULE(_tinyasm, m) {
PetscLogEventRegister("PCTinyASMApply", PC_CLASSID, &PC_tinyasm_apply);
m.def("SetASMLocalSubdomains",
[](PC pc, std::vector<IS> ises, std::vector<PetscSF> sfs, std::vector<PetscInt> blocksizes, int localsize) {
PetscInt i, p, numDofs;
PetscCall(PetscLogEventBegin(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0));
PetscInt p, numDofs;

MPI_Comm comm = PetscObjectComm((PetscObject) pc);

PetscCallAbort(comm, PetscLogEventBegin(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0));
auto P = pc->pmat;
ISLocalToGlobalMapping lgr;
ISLocalToGlobalMapping lgc;
MatGetLocalToGlobalMapping(P, &lgr, &lgc);
PetscCallAbort(comm, MatGetLocalToGlobalMapping(P, &lgr, &lgc));

int numBlocks = ises.size();
vector<vector<PetscInt>> dofsPerBlock(numBlocks);
vector<vector<PetscInt>> globalDofsPerBlock(numBlocks);
const PetscInt* isarray;

for (p = 0; p < numBlocks; p++) {
PetscCall(ISGetSize(ises[p], &numDofs));
PetscCall(ISGetIndices(ises[p], &isarray));
PetscCallAbort(comm, ISGetSize(ises[p], &numDofs));
PetscCallAbort(comm, ISGetIndices(ises[p], &isarray));

dofsPerBlock[p] = vector<PetscInt>();
dofsPerBlock[p].reserve(numDofs);
globalDofsPerBlock[p] = vector<PetscInt>(numDofs, 0);

for (i = 0; i < numDofs; i++) {
for (PetscInt i = 0; i < numDofs; i++) {
dofsPerBlock[p].push_back(isarray[i]);
}
PetscCall(ISRestoreIndices(ises[p], &isarray));
ISLocalToGlobalMappingApply(lgr, numDofs, &dofsPerBlock[p][0], &globalDofsPerBlock[p][0]);
PetscCallAbort(comm, ISRestoreIndices(ises[p], &isarray));
PetscCallAbort(comm, ISLocalToGlobalMappingApply(lgr, numDofs, &dofsPerBlock[p][0], &globalDofsPerBlock[p][0]));
}
DM dm;
PetscCall(PCGetDM(pc, &dm));
PetscCallAbort(comm, PCGetDM(pc, &dm));

PetscSF newsf;
PetscCall(CreateCombinedSF(pc, sfs, blocksizes, &newsf));
PetscCallAbort(comm, CreateCombinedSF(pc, sfs, blocksizes, &newsf));
auto blockjacobi = new BlockJacobi(dofsPerBlock, globalDofsPerBlock, localsize, newsf);
pc->data = (void*)blockjacobi;
PetscCall(PetscLogEventEnd(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0));
PetscCallAbort(comm, PetscLogEventEnd(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0));
return 0;
});
}

0 comments on commit 5c6e320

Please sign in to comment.