Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: updated settings for PETSc / MPI for Bouss codes #631

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 14 additions & 18 deletions examples/bouss/petscMPIoptions
Original file line number Diff line number Diff line change
Expand Up @@ -10,33 +10,29 @@
# https://petsc.org/release/manualpages/PC/PCSetFromOptions/
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@BarrySmith: Is there also a petsc webpage we should mention that discusses the mpi_* options? The mpi_ksp-* options are not discussed on the page https://petsc.org/release/manualpages/KSP/KSPSetFromOptions, for example.


# set min numbers of matrix rows per MPI rank (default is 10000)
-pc_mpi_minimum_count_per_rank 5000
-mpi_linear_solver_server_minimum_count_per_rank 5000
-mpi_linear_solver_server
-mpi_linear_solver_server_view
-mpi_linear_solver_server_use_shared_memory false

# convergence criterion for linear solver (larger than PETSc default):
-mpi_ksp_rtol 1.e-9
# Krylov linear solver:
-ksp_type gmres
-ksp_max_it 200
-ksp_reuse_preconditioner

# linear solver:
-mpi_linear_solver_server
-ksp_type preonly
-mpi_ksp_type gmres
-mpi_ksp_max_it 200
-mpi_ksp_reuse_preconditioner
# convergence criterion for linear solver (larger than PETSc default):
-ksp_rtol 1.e-9

# preconditioner:
-pc_type mpi
-mpi_pc_type gamg
-mpi_pc_gamg_symmetrize_graph true
-mpi_pc_gamg_sym_graph true
-mpi_linear_solver_server_view
-pc_type gamg


# debug options:
#-fp_trap off
#-log_view
#-start_in_debugger -debugger_rank 0
#-mpi_ksp_view_mat binary
#-mpi_ksp_view_rhs binary
#-mpi_ksp_monitor_true_residual
#-mpi_ksp_monitor
#-ksp_monitor
#-ksp_view
#-info

# test if any options are not used:
Expand Down
31 changes: 22 additions & 9 deletions examples/bouss/radial_flat/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -36,30 +36,36 @@ ifndef PETSC_DIR
$(error PETSC_DIR not set)
endif

ifndef PETSC_ARCH
$(error PETSC_ARCH not set)
endif

ifndef PETSC_OPTIONS
PETSC_OPTIONS=MISSING
$(error PETSC_OPTIONS must be declared as environment variable)
endif

ifndef CLAW_MPIEXEC
CLAW_MPIEXEC=MISSING
$(error CLAW_MPIEXEC must be declared as environment variable)
endif

ifndef CLAW_MPIFC
CLAW_MPIFC=MISSING
$(error Fortran compiler CLAW_MPIFC must be declared as environment variable)
endif

# Environment variable FC should be set to fortran compiler, e.g. gfortran
FC = ${CLAW_MPIFC}

# How many MPI processes to use:
BOUSS_MPI_PROCS ?= 6

EXE = $(PWD)/xgeoclaw
RUNEXE="${PETSC_DIR}/${PETSC_ARCH}/bin/mpiexec -n ${BOUSS_MPI_PROCS}"
#RUNEXE="${PETSC_DIR}/${PETSC_ARCH}/bin/mpiexec -n ${BOUSS_MPI_PROCS}"
RUNEXE="${CLAW_MPIEXEC} -n ${BOUSS_MPI_PROCS}"
SETRUN_FILE = setrun.py # File containing function to make data
OUTDIR = _output # Directory for output
SETPLOT_FILE = setplot.py # File containing function to set plots
PLOTDIR = _plots # Directory for plots


# Environment variable FC should be set to fortran compiler, e.g. gfortran

FC = gfortran

# Some compiler flags below are needed for PETSc
PETSC_INCLUDE = $(PETSC_DIR)/include $(PETSC_DIR)/$(PETSC_ARCH)/include
INCLUDE += $(PETSC_INCLUDE)
Expand Down Expand Up @@ -121,6 +127,13 @@ check:
@env | grep PETSC_OPTIONS
@echo PETSC_DIR = $(PETSC_DIR)
@echo PETSC_ARCH = $(PETSC_ARCH)
@echo CLAW_MPIEXEC = $(CLAW_MPIEXEC)
@echo RUNEXE = $(RUNEXE)
@echo EXE = $(EXE)
@echo CLAW_MPIFC = $(CLAW_MPIFC)
@echo FC = $(FC)
@echo FFLAGS = $(FFLAGS)
@echo LFLAGS = $(LFLAGS)
@echo OUTDIR = $(OUTDIR)
@echo PLOTDIR = $(PLOTDIR)
@echo ===================
33 changes: 28 additions & 5 deletions examples/bouss/setenv.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,38 @@
# to run the Bouss version of GeoClaw with MPI and OpenMP.
# Adjust as needed for your system...

# You also need to set CLAW, FC, and perhaps PYTHONPATH

# For more information, see
# https://www.clawpack.org/bouss2d.html
# https://www.clawpack.org/setenv.html

export PETSC_DIR=/full/path/to/petsc
export PETSC_ARCH=arch-darwin-c-opt
# You also need to set CLAW and perhaps PYTHONPATH, see:
# https://www.clawpack.org/setenv.html

echo CLAW is set to $CLAW

# path to PETSc installation:
export PETSC_DIR=/full/path/to/petsc # NEED TO FIX!

# PETSC_ARCH is only needed if PETSc is installed inside the PETSc directory.
# For PETSc installs by conda or package managers, it should not be set.
#export PETSC_ARCH=
export PETSC_ARCH=arch-darwin-c-opt # NEED TO FIX!

# You may want to use a different version of petscMPIoptions
# This setting uses the version in this directory:
export PETSC_OPTIONS="-options_file $CLAW/geoclaw/examples/bouss/petscMPIoptions"

export OMP_NUM_THREADS=6
export BOUSS_MPI_PROCS=6 # only used in Clawpack Boussinesq example
export BOUSS_MPI_PROCS=6

# CLAW_MPIEXEC should be set to the command used to execute MPI code:
export CLAW_MPIEXEC=mpiexec
# set CLAW_MPIEXEC to mpiexec only if this command is defined in your shell,
# e.g. to use some version of MPI was installed outside of PETSc.
# Or set to the full path to this command, e.g. for the PETSc version:
#export CLAW_MPIEXEC=$PETSC_DIR/$PETSC_ARCH/bin/mpiexec # requires PETSC_ARCH

# set CLAW_MPIFC to the proper Fortran compiler to use for MPI code
# e.g. mpif90 if that is defined in your shell, or gfortran *might* work.
# This will over-rule any FC environment variable.
export CLAW_MPIFC=mpif90
Loading