Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
f4545ae
Change variable name
amrueda Nov 12, 2025
5782fbb
GetSFCElementsPartition does not need mesh anymore
amrueda Nov 12, 2025
1135496
Saving some storage in ConstructMeshPartition_FromHDF5File_ if partit…
amrueda Nov 17, 2025
d9b2380
Test if METIS works
amrueda Nov 17, 2025
342618a
Test both METIS and SFC
amrueda Nov 17, 2025
9464308
Try old strategy for sides
amrueda Nov 17, 2025
9a4e586
Some modifications for parallel read in of HDF5 files
amrueda Nov 18, 2025
a3d508b
Minor adjustment
amrueda Nov 18, 2025
93ee083
Individual file_IDs for individual openings of hdf5 file
amrueda Nov 18, 2025
0bca70c
Single h5open_f and h5close_f (only for NS)
amrueda Nov 18, 2025
f230a5e
Enabled serial AND parallel hdf5 read ins
amrueda Nov 18, 2025
9e024e5
Fixed offset problem
amrueda Nov 18, 2025
9540294
Changed ordere of BCs for hdf5 files (now working in parallel)
amrueda Nov 18, 2025
cf1a158
Add h5open_f and h5close_f calls for all other solvers
amrueda Nov 19, 2025
0dd8f30
Update CI to use parallel HDF5 with OpenMPI
loganoz Nov 19, 2025
4dfabbe
What if we install OpenMPI first and then HDF5?
amrueda Nov 19, 2025
fbb5f25
Try using hdf5 compilers
amrueda Nov 19, 2025
352aeb6
Format
amrueda Nov 19, 2025
ccac9f0
Merge branch 'develop' into parallel_readin_hopr
amrueda Nov 19, 2025
3fa9750
Revert "Try using hdf5 compilers"
amrueda Nov 19, 2025
145bb40
Try compiling hdf5 for CI actions
amrueda Nov 20, 2025
4517fe9
Fixed typo
amrueda Nov 20, 2025
f78637d
Fixes for HDF5 compilation
amrueda Nov 20, 2025
cd7e9df
More fixes
amrueda Nov 20, 2025
78eec2d
CI actions: Updating LD_LIBRARY_PATH after installing HDF5
amrueda Nov 20, 2025
464b671
CI actions: attempt 2 of setting LD_LIBRARY_PATH after installing HDF5
amrueda Nov 20, 2025
fc719d8
CI actions: attempt 3 to link the compiled HDF5
amrueda Nov 20, 2025
31a64a9
Fixed typo
amrueda Nov 20, 2025
d4ee9e5
Compiling with MODE=DEBUG temporarily to debug CI fails
amrueda Nov 20, 2025
2d06390
Revert "Compiling with MODE=DEBUG temporarily to debug CI fails"
amrueda Nov 21, 2025
165e981
Merge branch 'develop' into parallel_readin_hopr
amrueda Dec 17, 2025
170197d
Merge branch 'develop' into parallel_readin_hopr
amrueda Jan 20, 2026
6bb8d52
CI: Explicitly request OpenMPI-GNU compilers for parallel hdf5 build
amrueda Jan 20, 2026
35a1057
Test if CI is using wrong MPI library
amrueda Jan 20, 2026
a1157a3
Print compiler info for debugging
amrueda Jan 20, 2026
08fd267
More debugging lines and a possible fix
amrueda Jan 20, 2026
a4cc418
CI: Disable Intel oneAPI for parallel NS tests
amrueda Jan 20, 2026
67e1105
CI: Use oversubscribe to run with 3 processes even if github actions …
amrueda Jan 20, 2026
f579660
Updated reference for CI
amrueda Jan 20, 2026
5cead57
Merge branch 'develop' into parallel_readin_hopr
amrueda Jan 27, 2026
94870c4
Merge branch 'develop' into parallel_readin_hopr
amrueda Jan 27, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
412 changes: 225 additions & 187 deletions .github/workflows/CI_parallel_NS.yml

Large diffs are not rendered by default.

11 changes: 11 additions & 0 deletions Solver/src/AcousticSolver/main.f90
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ PROGRAM HORSES3DMainCAA
#ifdef _HAS_MPI_
use mpi
#endif
#ifdef HAS_HDF5
use HDF5
#endif

IMPLICIT NONE

Expand Down Expand Up @@ -53,7 +56,12 @@ PROGRAM HORSES3DMainCAA
! Initializations
! ---------------
!
! Initialize MPI
call MPI_Process % Init
! Initialize HDF5 predefined datatypes
#ifdef HAS_HDF5
call h5open_f(ierr)
#endif
call CheckIfTheVersionIsRequested
!
! ----------------------------------------------------------------------------------
Expand Down Expand Up @@ -169,6 +177,9 @@ PROGRAM HORSES3DMainCAA

CALL UserDefinedTermination

#ifdef HAS_HDF5
call h5close_f(ierr)
#endif
call MPI_Process % Close

END PROGRAM HORSES3DMainCAA
Expand Down
11 changes: 11 additions & 0 deletions Solver/src/CahnHilliardSolver/main.f90
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ PROGRAM HORSES3DMainCH
#ifdef _HAS_MPI_
use mpi
#endif
#ifdef HAS_HDF5
use HDF5
#endif

IMPLICIT NONE
TYPE( FTValueDictionary) :: controlVariables
Expand Down Expand Up @@ -53,7 +56,12 @@ PROGRAM HORSES3DMainCH
! Initializations
! ---------------
!
! Initialize MPI
call MPI_Process % Init
! Initialize HDF5 predefined datatypes
#ifdef HAS_HDF5
call h5open_f(ierr)
#endif
call CheckIfTheVersionIsRequested
!
! ----------------------------------------------------------------------------------
Expand Down Expand Up @@ -175,6 +183,9 @@ PROGRAM HORSES3DMainCH

CALL UserDefinedTermination

#ifdef HAS_HDF5
call h5close_f(ierr)
#endif
call MPI_Process % Close

END PROGRAM HORSES3DMainCH
Expand Down
12 changes: 12 additions & 0 deletions Solver/src/IncompressibleNSSolver/main.f90
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@ PROGRAM HORSES3DMainiNS
#ifdef _HAS_MPI_
use mpi
#endif
#ifdef HAS_HDF5
use HDF5
#endif

#ifdef _OPENACC
use cudafor
use openacc
Expand Down Expand Up @@ -55,7 +59,12 @@ PROGRAM HORSES3DMainiNS
! Initializations
! ---------------
!
! Initialize MPI
call MPI_Process % Init
! Initialize HDF5 predefined datatypes
#ifdef HAS_HDF5
call h5open_f(ierr)
#endif
call CheckIfTheVersionIsRequested
!
! ----------------------------------------------------------------------------------
Expand Down Expand Up @@ -198,6 +207,9 @@ PROGRAM HORSES3DMainiNS

CALL UserDefinedTermination

#ifdef HAS_HDF5
call h5close_f(ierr)
#endif
call MPI_Process % Close

END PROGRAM HORSES3DMainiNS
Expand Down
11 changes: 11 additions & 0 deletions Solver/src/MultiphaseSolver/main.f90
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,9 @@ PROGRAM HORSES3DMainMU
#ifdef _HAS_MPI_
use mpi
#endif
#ifdef HAS_HDF5
use HDF5
#endif
#ifdef _OPENACC
use cudafor
use openacc
Expand Down Expand Up @@ -56,7 +59,12 @@ PROGRAM HORSES3DMainMU
! Initializations
! ---------------
!
! Initialize MPI
call MPI_Process % Init
! Initialize HDF5 predefined datatypes
#ifdef HAS_HDF5
call h5open_f(ierr)
#endif
call CheckIfTheVersionIsRequested
!
! ----------------------------------------------------------------------------------
Expand Down Expand Up @@ -187,6 +195,9 @@ PROGRAM HORSES3DMainMU

CALL UserDefinedTermination

#ifdef HAS_HDF5
call h5close_f(ierr)
#endif
call MPI_Process % Close

END PROGRAM HORSES3DMainMU
Expand Down
12 changes: 12 additions & 0 deletions Solver/src/NavierStokesSolver/main.f90
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ PROGRAM HORSES3DMainNS
#ifdef _HAS_MPI_
use mpi
#endif
#ifdef HAS_HDF5
use HDF5
#endif
#ifdef _OPENACC
use cudafor
use openacc
Expand Down Expand Up @@ -58,7 +61,13 @@ PROGRAM HORSES3DMainNS
! Initializations
! ---------------
!
! Initialize MPI
call MPI_Process % Init
! Initialize HDF5 predefined datatypes
#ifdef HAS_HDF5
call h5open_f(ierr)
#endif

call CheckIfTheVersionIsRequested
!
! ----------------------------------------------------------------------------------
Expand Down Expand Up @@ -214,6 +223,9 @@ PROGRAM HORSES3DMainNS

CALL UserDefinedTermination

#ifdef HAS_HDF5
call h5close_f(ierr)
#endif
call MPI_Process % Close

END PROGRAM HORSES3DMainNS
Expand Down
11 changes: 11 additions & 0 deletions Solver/src/NavierStokesSolverRANS/main.f90
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ PROGRAM HORSES3DMainNSSA
#ifdef _HAS_MPI_
use mpi
#endif
#ifdef HAS_HDF5
use HDF5
#endif

IMPLICIT NONE

Expand Down Expand Up @@ -54,7 +57,12 @@ PROGRAM HORSES3DMainNSSA
! Initializations
! ---------------
!
! Initialize MPI
call MPI_Process % Init
! Initialize HDF5 predefined datatypes
#ifdef HAS_HDF5
call h5open_f(ierr)
#endif
call CheckIfTheVersionIsRequested
!
! ----------------------------------------------------------------------------------
Expand Down Expand Up @@ -190,6 +198,9 @@ PROGRAM HORSES3DMainNSSA

CALL UserDefinedTermination

#ifdef HAS_HDF5
call h5close_f(ierr)
#endif
call MPI_Process % Close

END PROGRAM HORSES3DMainNSSA
Expand Down
2 changes: 1 addition & 1 deletion Solver/src/libs/discretization/DGSEMClass.f90
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ SUBROUTINE ConstructDGSem( self, meshFileName_, controlVariables, &
!
! Perform the partitioning
! ------------------------
call PerformMeshPartitioning (self % mesh, MPI_Process % nProcs, mpi_allPartitions, useWeightsPartition)
call PerformMeshPartitioning (self % mesh, nTotalElem, MPI_Process % nProcs, mpi_allPartitions, useWeightsPartition, Nx, Ny, Nz)
!
! Send the partitions
! -------------------
Expand Down
34 changes: 19 additions & 15 deletions Solver/src/libs/mesh/MeshPartitioning.f90
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,21 @@ module MeshPartitioning
public PerformMeshPartitioning

contains
subroutine PerformMeshPartitioning(mesh, no_of_domains, partitions, useWeights)
subroutine PerformMeshPartitioning(mesh, no_of_allElements, no_of_domains, partitions, useWeights, Nx, Ny, Nz)
implicit none
type(HexMesh), intent(in) :: mesh
integer, intent(in) :: no_of_allElements
integer, intent(in) :: no_of_domains
type(PartitionedMesh_t) :: partitions(no_of_domains)
logical, intent(in) :: useWeights
integer, intent(in) :: Nx(no_of_allElements), Ny(no_of_allElements), Nz(no_of_allElements)
!
! ---------------
! Local variables
! ---------------
!
integer :: fID, domain
integer :: elementsDomain(mesh % no_of_elements)
integer :: elementsDomain(no_of_allElements)
!
! Initialize partitions
! ---------------------
Expand All @@ -32,7 +34,7 @@ subroutine PerformMeshPartitioning(mesh, no_of_domains, partitions, useWeights)
!
! Get each domain elements and nodes
! ----------------------------------
call GetElementsDomain(mesh, no_of_domains, elementsDomain, partitions, useWeights)
call GetElementsDomain(mesh, no_of_allElements, no_of_domains, elementsDomain, partitions, useWeights, Nx, Ny, Nz)
!
! Get the partition boundary faces
! --------------------------------
Expand All @@ -44,15 +46,17 @@ subroutine PerformMeshPartitioning(mesh, no_of_domains, partitions, useWeights)
call WritePartitionsFile(mesh, elementsDomain)
end subroutine PerformMeshPartitioning

subroutine GetElementsDomain(mesh, no_of_domains, elementsDomain, partitions, useWeights)
subroutine GetElementsDomain(mesh, no_of_allElements, no_of_domains, elementsDomain, partitions, useWeights, Nx, Ny, Nz)
use IntegerDataLinkedList
use MPI_Process_Info
implicit none
type(HexMesh), intent(in) :: mesh
integer, intent(in) :: no_of_allElements
integer, intent(in) :: no_of_domains
integer, intent(out) :: elementsDomain(mesh % no_of_elements)
type(PartitionedMesh_t), intent(inout) :: partitions(no_of_domains)
logical, intent(in) :: useWeights
integer, intent(in) :: Nx(no_of_allElements), Ny(no_of_allElements), Nz(no_of_allElements)
!
! ---------------
! Local variables
Expand All @@ -72,7 +76,7 @@ subroutine GetElementsDomain(mesh, no_of_domains, elementsDomain, partitions, us
! Space-filling curve partitioning
! --------------------------------
case (SFC_PARTITIONING)
call GetSFCElementsPartition(mesh, no_of_domains, mesh % no_of_elements, elementsDomain, useWeights=useWeights)
call GetSFCElementsPartition(no_of_domains, no_of_allElements, elementsDomain, useWeights, Nx, Ny, Nz)
!
! METIS partitioning
! ------------------
Expand Down Expand Up @@ -305,14 +309,14 @@ end subroutine GetPartitionBoundaryFaces
! --------------------------------
! Space-filling curve partitioning
! --------------------------------
subroutine GetSFCElementsPartition(mesh, no_of_domains, nelem, elementsDomain, useWeights)
subroutine GetSFCElementsPartition(no_of_domains, no_of_allElements, elementsDomain, useWeights, Nx, Ny, Nz)
implicit none
!-arguments--------------------------------------------------
type(HexMesh), intent(in) :: mesh
integer, intent(in) :: no_of_domains
integer, intent(in) :: nelem
integer, intent(inout) :: elementsDomain(nelem)
integer, intent(in) :: no_of_allElements
integer, intent(inout) :: elementsDomain(no_of_allElements)
logical, intent(in) :: useWeights
integer, intent(in) :: Nx(no_of_allElements), Ny(no_of_allElements), Nz(no_of_allElements)
!-local-variables--------------------------------------------
integer :: elems_per_domain(no_of_domains)
integer :: biggerdomains
Expand All @@ -324,9 +328,9 @@ subroutine GetSFCElementsPartition(mesh, no_of_domains, nelem, elementsDomain, u
!------------------------------------------------------------

if (useWeights) then
allocate(weights(nelem))
do ielem=1,nelem
weights(ielem) = product(mesh % elements(ielem) % Nxyz + 1)
allocate(weights(no_of_allElements))
do ielem=1,no_of_allElements
weights(ielem) = (Nx(ielem) + 1) * (Ny(ielem) + 1) * (Nz(ielem) + 1)
end do
if (maxval(weights) .eq. minval(weights)) then
neddWeights = .false.
Expand All @@ -337,8 +341,8 @@ subroutine GetSFCElementsPartition(mesh, no_of_domains, nelem, elementsDomain, u
endif
end if

elems_per_domain = nelem / no_of_domains
biggerdomains = mod(nelem,no_of_domains)
elems_per_domain = no_of_allElements / no_of_domains
biggerdomains = mod(no_of_allElements,no_of_domains)
elems_per_domain(1:biggerdomains) = elems_per_domain(1:biggerdomains) + 1

first = 1
Expand All @@ -359,7 +363,7 @@ subroutine GetSFCElementsPartition(mesh, no_of_domains, nelem, elementsDomain, u
do domain = 1, no_of_domains-1
if (start_index(domain) .ge. start_index(domain+1)) start_index(domain+1) = start_index(domain) + 1
dof_in_domain = sum(weights(start_index(domain):start_index(domain+1)))
do ielem=1,nelem
do ielem=1,no_of_allElements
if (dof_in_domain .lt. max_dof) then
start_index(domain+1) = start_index(domain+1) + 1
dof_in_domain = sum(weights(start_index(domain):start_index(domain+1)))
Expand Down
Loading