Skip to content

Commit

Permalink
Fix a recently introduced bug when combining p-multigrid + Hypre.
Browse files Browse the repository at this point in the history
  • Loading branch information
raback committed Nov 20, 2024
1 parent 5a6d591 commit 90edb60
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 8 deletions.
14 changes: 13 additions & 1 deletion fem/src/Multigrid.F90
Original file line number Diff line number Diff line change
Expand Up @@ -918,6 +918,8 @@ END SUBROUTINE BlockSolveExt
!------------------------------------------------------------------------------
tt = CPUTime()

CALL Info('PMGSolve','Solving multigrid Level '//I2S(Level),Level=20)

!
! Initialize:
! -----------
Expand All @@ -936,7 +938,6 @@ END SUBROUTINE BlockSolveExt
! If at lowest level, solve directly:
! -----------------------------------
IF ( Level <= 1 ) THEN

CALL ListPushNamespace('mglowest:')

CALL ListAddLogical( Params,'mglowest: Linear System Free Factorization', .FALSE. )
Expand All @@ -955,10 +956,21 @@ END SUBROUTINE BlockSolveExt
IF ( LIter ) LowestSolver='iterative'
END IF

! This fixes an issue that comes from having different calling convention for
! serial Hypre.
IF(.NOT. Parallel) THEN
IF(ListGetLogical(Params, 'Linear System Use Hypre', Found) &
.AND. LowestSolver == 'iterative') LowestSolver = 'hypre'
END IF

CALL Info('PMGSolve','Starting lowest linear solver using: '//TRIM(LowestSolver),Level=10 )

SELECT CASE(LowestSolver)

! This one only for serial!
CASE('hypre')
CALL SolveHypre( Matrix1, Solution, ForceVector, Solver )

CASE('block')
CALL BlockSolveExt( Matrix1, Solution, ForceVector, Solver )

Expand Down
10 changes: 4 additions & 6 deletions fem/src/SolverUtils.F90
Original file line number Diff line number Diff line change
Expand Up @@ -14844,27 +14844,25 @@ END SUBROUTINE BlockSolveExt
Method = 'rocalution'

IF ( .NOT. Parallel ) THEN
CALL Info(Caller,'Serial linear System Solver: '//TRIM(Method),Level=8)

IF(ListGetLogical(Params, 'Linear System Use Hypre', Found)) Method = 'hypre'


CALL Info(Caller,'Serial linear System Solver: '//TRIM(Method),Level=8)

SELECT CASE(Method)
CASE('multigrid')
CALL MultiGridSolve( A, x, b, &
DOFs, Solver, Solver % MultiGridLevel )
CASE('iterative')
CALL IterSolver( A, x, b, Solver )
CASE('feti')
CALL Fatal(Caller, &
'Feti solver available only in parallel.')
CALL Fatal(Caller,'Feti solver available only in parallel.')
CASE('block')
CALL BlockSolveExt( A, x, b, Solver )
CASE('amgx')
CALL AMGXSolver( A, x, b, Solver )
CASE('rocalution')
CALL ROCSolver( A, x, b, Solver )
CASE('hypre')
! CALL HypreSolver( A, x, b, Solver )
CALL SolveHypre( A, x, b, Solver )
CASE('direct')
CALL DirectSolver( A, x, b, Solver )
Expand Down
3 changes: 2 additions & 1 deletion fem/tests/ElastPelem2dPmultgBoomer/case.sif
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Header
End

Simulation
Max Output Level = 25
Max Output Level = 5
Coordinate System = Cartesian
Coordinate Mapping(3) = 1 2 3
Simulation Type = Steady state
Expand Down Expand Up @@ -65,6 +65,7 @@ Solver 1
mglowest: Linear System Convergence Tolerance = 1.0E-03
mglowest: Linear System Abort Not Converged = False
mglowest: Linear System Residual Output = 20
mglowest: Linear System Preconditioning = "none"

! BoomerAMG specific keywords
BoomerAMG Relax Type = Integer 3
Expand Down

0 comments on commit 90edb60

Please sign in to comment.