diff --git a/.github/workflows/pip-mac.yml b/.github/workflows/pip-mac.yml index 54e2ada7cc..cf89b10724 100644 --- a/.github/workflows/pip-mac.yml +++ b/.github/workflows/pip-mac.yml @@ -53,6 +53,7 @@ jobs: --with-shared-libraries=1 \ --with-mpi-dir=/opt/homebrew \ --with-zlib \ + --with-strict-petscerrorcode \ --download-bison \ --download-hdf5 \ --download-hwloc \ diff --git a/.github/workflows/pyop2.yml b/.github/workflows/pyop2.yml index 5104284d05..d6e5a4acfb 100644 --- a/.github/workflows/pyop2.yml +++ b/.github/workflows/pyop2.yml @@ -51,6 +51,7 @@ jobs: --with-debugging=1 \ --with-shared-libraries=1 \ --with-c2html=0 \ + --with-strict-petscerrorcode \ --with-fortran-bindings=0 make diff --git a/docker/Dockerfile.env b/docker/Dockerfile.env index d40a613f85..5a1803a10e 100644 --- a/docker/Dockerfile.env +++ b/docker/Dockerfile.env @@ -66,6 +66,7 @@ RUN bash -c 'cd petsc; \ --download-scalapack \ --download-suitesparse \ --download-superlu_dist \ + --with-strict-petscerrorcode \ PETSC_ARCH=packages; \ mv packages/include/petscconf.h packages/include/old_petscconf.nope; \ rm -rf /home/firedrake/petsc/**/externalpackages; \ @@ -105,6 +106,7 @@ RUN bash -c 'export PACKAGES=/home/firedrake/petsc/packages; \ --with-scalapack-dir=$PACKAGES \ --with-suitesparse-dir=$PACKAGES \ --with-superlu_dist-dir=$PACKAGES \ + --with-strict-petscerrorcode \ PETSC_ARCH=default; \ make PETSC_DIR=/home/firedrake/petsc PETSC_ARCH=default all;' @@ -144,6 +146,7 @@ RUN bash -c 'export PACKAGES=/home/firedrake/petsc/packages; \ --with-scalapack-dir=$PACKAGES \ --with-suitesparse-dir=$PACKAGES \ --with-superlu_dist-dir=$PACKAGES \ + --with-strict-petscerrorcode \ PETSC_ARCH=complex; \ make PETSC_DIR=/home/firedrake/petsc PETSC_ARCH=complex all;' diff --git a/scripts/firedrake-install b/scripts/firedrake-install index 17a8d7fdbc..8994328f7b 100755 --- a/scripts/firedrake-install +++ b/scripts/firedrake-install @@ -726,6 +726,7 @@ def get_petsc_options(minimal=False): "--with-debugging=0", "--with-shared-libraries=1", "--with-c2html=0", + "--with-strict-petscerrorcode", # Parser generator "--download-bison"} for pkg in get_minimal_petsc_packages(): diff --git a/tinyasm/matinvert.cpp b/tinyasm/matinvert.cpp index 0c1e7025c2..525dcefbd0 100644 --- a/tinyasm/matinvert.cpp +++ b/tinyasm/matinvert.cpp @@ -6,6 +6,6 @@ PetscErrorCode mymatinvert(PetscBLASInt* n, PetscScalar* mat, PetscBLASInt* piv, PetscCheck(!(*info), PETSC_COMM_SELF, PETSC_ERR_LIB, "TinyASM error calling ?getrf in mymatinvert"); PetscCallBLAS("LAPACKgetri", LAPACKgetri_(n, mat, n, piv, work, n, info)); PetscCheck(!(*info), PETSC_COMM_SELF, PETSC_ERR_LIB, "TinyASM error calling ?getri in mymatinvert"); - return PETSC_SUCCESS; + PetscFunctionReturn(PETSC_SUCCESS); } diff --git a/tinyasm/tinyasm.cpp b/tinyasm/tinyasm.cpp index cf583a6fc8..78261b7ac2 100644 --- a/tinyasm/tinyasm.cpp +++ b/tinyasm/tinyasm.cpp @@ -63,31 +63,31 @@ class BlockJacobi { fwork = vector(biggestBlock, 0.); localmats_aij = NULL; dofis = vector(numBlocks); - PetscMalloc1(numBlocks, &localmats); + PetscCallVoid(PetscMalloc1(numBlocks, &localmats)); for(int p=0; p& sf, const std::vector& bs, PetscSF *newsf) { - PetscInt i; auto n = sf.size(); PetscFunctionBegin; @@ -159,7 +158,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector& sf, const std * allRoots: number of owned global dofs; * allLeaves: number of visible dofs (global + ghosted). */ - for (i = 0; i < n; ++i) { + for (size_t i = 0; i < n; ++i) { PetscInt nroots, nleaves; PetscCall(PetscSFGetGraph(sf[i], &nroots, &nleaves, NULL, NULL)); @@ -170,7 +169,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector& sf, const std PetscCall(PetscMalloc1(allLeaves, &iremote)); // Now build an SF that just contains process connectivity. PetscCall(PetscHSetICreate(&ranksUniq)); - for (i = 0; i < n; ++i) { + for (size_t i = 0; i < n; ++i) { const PetscMPIInt *ranks = NULL; PetscMPIInt nranks, j; @@ -187,7 +186,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector& sf, const std PetscCall(PetscHSetIGetElems(ranksUniq, &index, ranks)); PetscCall(PetscHMapICreate(&rankToIndex)); - for (i = 0; i < numRanks; ++i) { + for (PetscInt i = 0; i < numRanks; ++i) { remote[i].rank = ranks[i]; remote[i].index = 0; PetscCall(PetscHMapISet(rankToIndex, ranks[i], i)); @@ -203,7 +202,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector& sf, const std PetscCall(PetscMalloc1(n*numRanks, &remoteOffsets)); offsets[0] = 0; - for (i = 1; i < n; ++i) { + for (size_t i = 1; i < n; ++i) { PetscInt nroots; PetscCall(PetscSFGetGraph(sf[i-1], &nroots, NULL, NULL, NULL)); @@ -211,8 +210,8 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector& sf, const std } /* Offsets are the offsets on the current process of the * global dof numbering for the subspaces. */ - PetscCall(MPI_Type_contiguous(n, MPIU_INT, &contig)); - PetscCall(MPI_Type_commit(&contig)); + PetscCallMPI(MPI_Type_contiguous(n, MPIU_INT, &contig)); + PetscCallMPI(MPI_Type_commit(&contig)); #if MY_PETSC_VERSION_LT(3, 14, 4) PetscCall(PetscSFBcastBegin(rankSF, contig, offsets, remoteOffsets)); @@ -221,14 +220,14 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector& sf, const std PetscCall(PetscSFBcastBegin(rankSF, contig, offsets, remoteOffsets, MPI_REPLACE)); PetscCall(PetscSFBcastEnd(rankSF, contig, offsets, remoteOffsets, MPI_REPLACE)); #endif - PetscCall(MPI_Type_free(&contig)); + PetscCallMPI(MPI_Type_free(&contig)); PetscCall(PetscFree(offsets)); PetscCall(PetscSFDestroy(&rankSF)); /* Now remoteOffsets contains the offsets on the remote * processes who communicate with me. So now we can * concatenate the list of SFs into a single one. */ index = 0; - for (i = 0; i < n; ++i) { + for (size_t i = 0; i < n; ++i) { const PetscSFNode *remote = NULL; const PetscInt *local = NULL; PetscInt nroots, nleaves, j; @@ -256,7 +255,7 @@ PetscErrorCode CreateCombinedSF(PC pc, const std::vector& sf, const std PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)pc), newsf)); PetscCall(PetscSFSetGraph(*newsf, allRoots, allLeaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER)); } - PetscFunctionReturn(0); + PetscFunctionReturn(PETSC_SUCCESS); } @@ -266,7 +265,7 @@ PetscErrorCode PCSetup_TinyASM(PC pc) { auto blockjacobi = (BlockJacobi *)pc->data; blockjacobi -> updateValuesPerBlock(P); PetscCall(PetscLogEventEnd(PC_tinyasm_setup, pc, 0, 0, 0)); - return 0; + PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode PCApply_TinyASM(PC pc, Vec b, Vec x) { @@ -295,13 +294,13 @@ PetscErrorCode PCApply_TinyASM(PC pc, Vec b, Vec x) { PetscCall(PetscSFReduceEnd(blockjacobi->sf, MPIU_SCALAR, &(blockjacobi->localx[0]), globalx, MPI_SUM)); PetscCall(VecRestoreArray(x, &globalx)); PetscCall(PetscLogEventEnd(PC_tinyasm_apply, pc, 0, 0, 0)); - return 0; + PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode PCDestroy_TinyASM(PC pc) { if(pc->data) delete (BlockJacobi *)pc->data; - return 0; + PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode PCView_TinyASM(PC pc, PetscViewer viewer) { @@ -320,7 +319,7 @@ PetscErrorCode PCView_TinyASM(PC pc, PetscViewer viewer) { PetscCall(PetscViewerASCIIPrintf(viewer, "Largest block size %" PetscInt_FMT " \n", biggestblock)); PetscCall(PetscViewerASCIIPopTab(viewer)); } - return 0; + PetscFunctionReturn(PETSC_SUCCESS); } PetscErrorCode PCCreate_TinyASM(PC pc) { @@ -329,7 +328,7 @@ PetscErrorCode PCCreate_TinyASM(PC pc) { pc->ops->setup = PCSetup_TinyASM; pc->ops->destroy = PCDestroy_TinyASM; pc->ops->view = PCView_TinyASM; - return 0; + PetscFunctionReturn(PETSC_SUCCESS); } // pybind11 casters for PETSc/petsc4py objects, copied from dolfinx repo // Import petsc4py on demand @@ -385,12 +384,15 @@ PYBIND11_MODULE(_tinyasm, m) { PetscLogEventRegister("PCTinyASMApply", PC_CLASSID, &PC_tinyasm_apply); m.def("SetASMLocalSubdomains", [](PC pc, std::vector ises, std::vector sfs, std::vector blocksizes, int localsize) { - PetscInt i, p, numDofs; - PetscCall(PetscLogEventBegin(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0)); + PetscInt p, numDofs; + + MPI_Comm comm = PetscObjectComm((PetscObject) pc); + + PetscCallAbort(comm, PetscLogEventBegin(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0)); auto P = pc->pmat; ISLocalToGlobalMapping lgr; ISLocalToGlobalMapping lgc; - MatGetLocalToGlobalMapping(P, &lgr, &lgc); + PetscCallAbort(comm, MatGetLocalToGlobalMapping(P, &lgr, &lgc)); int numBlocks = ises.size(); vector> dofsPerBlock(numBlocks); @@ -398,27 +400,27 @@ PYBIND11_MODULE(_tinyasm, m) { const PetscInt* isarray; for (p = 0; p < numBlocks; p++) { - PetscCall(ISGetSize(ises[p], &numDofs)); - PetscCall(ISGetIndices(ises[p], &isarray)); + PetscCallAbort(comm, ISGetSize(ises[p], &numDofs)); + PetscCallAbort(comm, ISGetIndices(ises[p], &isarray)); dofsPerBlock[p] = vector(); dofsPerBlock[p].reserve(numDofs); globalDofsPerBlock[p] = vector(numDofs, 0); - for (i = 0; i < numDofs; i++) { + for (PetscInt i = 0; i < numDofs; i++) { dofsPerBlock[p].push_back(isarray[i]); } - PetscCall(ISRestoreIndices(ises[p], &isarray)); - ISLocalToGlobalMappingApply(lgr, numDofs, &dofsPerBlock[p][0], &globalDofsPerBlock[p][0]); + PetscCallAbort(comm, ISRestoreIndices(ises[p], &isarray)); + PetscCallAbort(comm, ISLocalToGlobalMappingApply(lgr, numDofs, &dofsPerBlock[p][0], &globalDofsPerBlock[p][0])); } DM dm; - PetscCall(PCGetDM(pc, &dm)); + PetscCallAbort(comm, PCGetDM(pc, &dm)); PetscSF newsf; - PetscCall(CreateCombinedSF(pc, sfs, blocksizes, &newsf)); + PetscCallAbort(comm, CreateCombinedSF(pc, sfs, blocksizes, &newsf)); auto blockjacobi = new BlockJacobi(dofsPerBlock, globalDofsPerBlock, localsize, newsf); pc->data = (void*)blockjacobi; - PetscCall(PetscLogEventEnd(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0)); + PetscCallAbort(comm, PetscLogEventEnd(PC_tinyasm_SetASMLocalSubdomains, pc, 0, 0, 0)); return 0; }); }