diff --git a/Project.toml b/Project.toml index 28462da..e9b4a2c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "GridapPETSc" uuid = "bcdc36c2-0c3e-11ea-095a-c9dadae499f1" authors = ["Francesc Verdugo ", "VĂ­ctor Sande ", "Alberto F. Martin "] -version = "0.5.2" +version = "0.5.3" [deps] Gridap = "56d4f2e9-7ea1-5844-9cf6-b9c51ca7ce8e" diff --git a/README.md b/README.md index f167e8d..56a3873 100644 --- a/README.md +++ b/README.md @@ -17,10 +17,10 @@ It can also be used in the serial case, as shown in this [test](https://github.c `GridapPETSc` julia package requires the `PETSC` library ([Portable, Extensible Toolkit for Scientific Computation](https://www.mcs.anl.gov/petsc/)) and `MPI` to work correctly. You have two main options to install these dependencies. -- **Do nothing [recommended in most cases].** Use the default precompiled `MPI` installation provided by [`MPI.jl`](https://github.com/JuliaParallel/MPI.jl) and the pre-compiled `PETSc` library provided by [`PETSc_jll`](https://github.com/JuliaBinaryWrappers/PETSc_jll.jl). This will happen under the hood when you install `GridapPETSc`. You can also force the installation of these default dependencies by setting the environment variables `JULIA_MPI_BINARY` and `JULIA_PETSC_LIBRARY` to empty values. +- **Do nothing [recommended in most cases].** Use the default precompiled `MPI` installation provided by [`MPI.jl`](https://github.com/JuliaParallel/MPI.jl) and the pre-compiled `PETSc` library provided by [`PETSc_jll`](https://github.com/JuliaBinaryWrappers/PETSc_jll.jl). This will happen under the hood when you install `GridapPETSc`. In the case of `GridapPETSc`, you can also force the installation of these default dependencies by setting the environment variable `JULIA_PETSC_LIBRARY` to an empty value. - **Choose a specific installation of `MPI` and `PETSc` available in the system [recommended in HPC clusters]**. - - First, choose a `MPI` installation. See the documentation of [`MPI.jl`](https://github.com/JuliaParallel/MPI.jl) for further details. An easy way to achieve this is to create the environment variable `JULIA_MPI_BINARY` containing the path to the `MPI` binary. + - First, choose a `MPI` installation. See the documentation of [`MPI.jl`](https://github.com/JuliaParallel/MPI.jl) for further details. - Second, choose a `PETSc` installation. To this end, create an environment variable `JULIA_PETSC_LIBRARY` containing the path to the dynamic library object of the `PETSC` installation (i.e., the `.so` file in linux systems). **Very important: The chosen `PETSc` library needs to be configured with the `MPI` installation considered in the previous step**. diff --git a/src/GridapPETSc.jl b/src/GridapPETSc.jl index defef13..a3a409e 100644 --- a/src/GridapPETSc.jl +++ b/src/GridapPETSc.jl @@ -69,10 +69,10 @@ end include("PETSC.jl") using GridapPETSc.PETSC: @check_error_code -using GridapPETSc.PETSC: PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC, SNES +using GridapPETSc.PETSC: PetscBool, PetscInt, PetscReal, PetscScalar, Vec, Mat, KSP, PC, SNES #export PETSC export @check_error_code -export PetscBool, PetscInt, PetscScalar, Vec, Mat, KSP, PC +export PetscBool, PetscInt, PetscReal, PetscScalar, Vec, Mat, KSP, PC, SNES include("Environment.jl") diff --git a/src/PETSC.jl b/src/PETSC.jl index 7c2ca30..a08a577 100644 --- a/src/PETSC.jl +++ b/src/PETSC.jl @@ -689,6 +689,32 @@ const SNESASPIN = "aspin" const SNESCOMPOSITE = "composite" const SNESPATCH = "patch" +""" +Julia alias to `SNESConvergedReason` C enum. + +See [PETSc manual](https://petsc.org/release/manualpages/SNES/SNESConvergedReason/). +""" +@enum SNESConvergedReason begin + SNES_CONVERGED_FNORM_ABS = 2 # ||F|| < atol + SNES_CONVERGED_FNORM_RELATIVE = 3 # ||F|| < rtol*||F_initial|| + SNES_CONVERGED_SNORM_RELATIVE = 4 # Newton computed step size small; || delta x || < stol || x || + SNES_CONVERGED_ITS = 5 # maximum iterations reached + SNES_BREAKOUT_INNER_ITER = 6 # Flag to break out of inner loop after checking custom convergence. + # it is used in multi-phase flow when state changes diverged + SNES_DIVERGED_FUNCTION_DOMAIN = -1 # the new x location passed the function is not in the domain of F + SNES_DIVERGED_FUNCTION_COUNT = -2 + SNES_DIVERGED_LINEAR_SOLVE = -3 # the linear solve failed + SNES_DIVERGED_FNORM_NAN = -4 + SNES_DIVERGED_MAX_IT = -5 + SNES_DIVERGED_LINE_SEARCH = -6 # the line search failed + SNES_DIVERGED_INNER = -7 # inner solve failed + SNES_DIVERGED_LOCAL_MIN = -8 # || J^T b || is small, implies converged to local minimum of F() + SNES_DIVERGED_DTOL = -9 # || F || > divtol*||F_initial|| + SNES_DIVERGED_JACOBIAN_DOMAIN = -10 # Jacobian calculation does not make sense + SNES_DIVERGED_TR_DELTA = -11 + + SNES_CONVERGED_ITERATING = 0 +end @wrapper(:SNESCreate,PetscErrorCode,(MPI.Comm,Ptr{SNES}),(comm,snes),"https://petsc.org/release/docs/manualpages/SNES/SNESCreate.html") @wrapper(:SNESSetFunction,PetscErrorCode,(SNES,Vec,Ptr{Cvoid},Ptr{Cvoid}),(snes,vec,fptr,ctx),"https://petsc.org/release/docs/manualpages/SNES/SNESSetFunction.html") @@ -704,6 +730,9 @@ const SNESPATCH = "patch" @wrapper(:SNESSetCountersReset,PetscErrorCode,(SNES,PetscBool),(snes,reset),"https://petsc.org/release/docs/manualpages/SNES/SNESSetCountersReset.html") @wrapper(:SNESGetNumberFunctionEvals,PetscErrorCode,(SNES,Ptr{PetscInt}),(snes,nfuncs),"https://petsc.org/release/docs/manualpages/SNES/SNESGetNumberFunctionEvals.html") @wrapper(:SNESGetLinearSolveFailures,PetscErrorCode,(SNES,Ptr{PetscInt}),(snes,nfails),"https://petsc.org/release/docs/manualpages/SNES/SNESGetLinearSolveFailures.html") +@wrapper(:SNESSetConvergenceTest,PetscErrorCode,(SNES,Ptr{Cvoid},Ptr{Cvoid},Ptr{Cvoid}),(snes,convtest,cctx,destroy),"https://petsc.org/release/manualpages/SNES/SNESSetConvergenceTest/") +@wrapper(:SNESConvergedDefault,PetscErrorCode,(SNES,PetscInt,PetscReal,PetscReal,PetscReal,Ptr{SNESConvergedReason},Ptr{Cvoid}),(snes,it,xnorm,gnorm,f,reason,user),"https://petsc.org/release/manualpages/SNES/SNESConvergedDefault/") + # Garbage collection of PETSc objects @wrapper(:PetscObjectRegisterDestroy,PetscErrorCode,(Ptr{Cvoid},),(obj,),"https://petsc.org/release/docs/manualpages/Sys/PetscObjectRegisterDestroy.html") diff --git a/test/PLaplacianTests.jl b/test/PLaplacianTests.jl index bcc84cb..48ba984 100644 --- a/test/PLaplacianTests.jl +++ b/test/PLaplacianTests.jl @@ -8,11 +8,27 @@ using Test using SparseMatricesCSR +function snes_convergence_test(snes::SNES, + it::PetscInt, + xnorm::PetscReal, + gnorm::PetscReal, + f::PetscReal, + reason::Ptr{PETSC.SNESConvergedReason}, + user::Ptr{Cvoid})::PetscInt + PETSC.SNESConvergedDefault(snes, it, xnorm, gnorm, f, reason, user) +end + function mysnessetup(snes) ksp = Ref{GridapPETSc.PETSC.KSP}() pc = Ref{GridapPETSc.PETSC.PC}() mumpsmat = Ref{GridapPETSc.PETSC.Mat}() + + fconvptr = @cfunction($snes_convergence_test, + PetscInt, + (SNES, PetscInt, PetscReal, PetscReal, PetscReal, Ptr{PETSC.SNESConvergedReason}, Ptr{Cvoid})) + @check_error_code GridapPETSc.PETSC.SNESSetFromOptions(snes[]) + @check_error_code GridapPETSc.PETSC.SNESSetConvergenceTest(snes[],fconvptr,C_NULL,C_NULL) @check_error_code GridapPETSc.PETSC.SNESGetKSP(snes[],ksp) #@check_error_code GridapPETSc.PETSC.KSPView(ksp[],C_NULL) @check_error_code GridapPETSc.PETSC.KSPSetType(ksp[],GridapPETSc.PETSC.KSPPREONLY)