From 1aa8ab8742ce563bef08b8463fbd5d79e2a42f4c Mon Sep 17 00:00:00 2001 From: mathias77515 Date: Tue, 10 Sep 2024 22:32:26 +0200 Subject: [PATCH] remove __pycache__ + test code --- src/run.py | 38 ++++++++++++++++++++++ src/running_scrips/run_cmm.py | 16 ++++++++++ src/running_scrips/run_cmm.sh | 20 ++++++++++++ src/running_scrips/run_fit_fmm.py | 45 +++++++++++++++++++++++++++ src/running_scrips/run_fmm.py | 23 ++++++++++++++ src/running_scrips/run_fmm.sh | 20 ++++++++++++ src/running_scrips/run_spectra_cmm.py | 1 + 7 files changed, 163 insertions(+) create mode 100644 src/run.py create mode 100644 src/running_scrips/run_cmm.py create mode 100644 src/running_scrips/run_cmm.sh create mode 100644 src/running_scrips/run_fit_fmm.py create mode 100644 src/running_scrips/run_fmm.py create mode 100644 src/running_scrips/run_fmm.sh create mode 100644 src/running_scrips/run_spectra_cmm.py diff --git a/src/run.py b/src/run.py new file mode 100644 index 0000000..a3b3ede --- /dev/null +++ b/src/run.py @@ -0,0 +1,38 @@ +import sys +from pyoperators import * +from FMM.pipeline import PipelineEnd2End +from CMM.pipeline import Pipeline + + +simu = 'CMM' + +if __name__ == "__main__": + + ### Common MPI arguments + comm = MPI.COMM_WORLD + + + if simu == 'FMM': + + try: + file = str(sys.argv[1]) + except IndexError: + file = None + + ### Initialization + pipeline = PipelineEnd2End(comm) + + ### Execution + pipeline.main(specific_file=file) + + elif simu == 'CMM': + + seed_noise = int(sys.argv[1]) + + ### Initialization + pipeline = Pipeline(comm, 1, seed_noise) + + ### Execution + pipeline.main() + + \ No newline at end of file diff --git a/src/running_scrips/run_cmm.py b/src/running_scrips/run_cmm.py new file mode 100644 index 0000000..4c1415e --- /dev/null +++ b/src/running_scrips/run_cmm.py @@ -0,0 +1,16 @@ +import sys + +from pyoperators import * + +from CMM.pipeline import Pipeline + +seed_noise = int(sys.argv[1]) + +### MPI common arguments +comm = MPI.COMM_WORLD + +if __name__ == "__main__": + + pipeline = Pipeline(comm, 1, seed_noise) + + pipeline.main() diff --git a/src/running_scrips/run_cmm.sh b/src/running_scrips/run_cmm.sh new file mode 100644 index 0000000..74c4016 --- /dev/null +++ b/src/running_scrips/run_cmm.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +#SBATCH --job-name=FMM + +# we ask for n MPI tasks with N cores each on c nodes + +#SBATCH --partition=htc +#SBATCH --nodes=1 # c +#SBATCH --ntasks-per-node=1 # n +#SBATCH --cpus-per-task=4 # N +#SBATCH --mem=60G +#SBATCH --time=3-00:00:00 +#SBATCH --output=mulitple_jobs_%j.log +#SBATCH --array=1-500 + +export OMP_NUM_THREADS=${SLURM_CPUS_PER_TASK} + +module load mpich + +mpirun -np $SLURM_NTASKS python run_cmm.py $1 diff --git a/src/running_scrips/run_fit_fmm.py b/src/running_scrips/run_fit_fmm.py new file mode 100644 index 0000000..8fc1e57 --- /dev/null +++ b/src/running_scrips/run_fit_fmm.py @@ -0,0 +1,45 @@ +import os + +import matplotlib.pyplot as plt +import numpy as np +from multiprocess import Pool +from pyoperators import MPI +from schwimmbad import MPIPool + +from lib.Qfit import FitEllSpace +from lib.Qfoldertools import MergeAllFiles +from lib.Qspectra_component import SkySpectra + +comm = MPI.COMM_WORLD + +### Concatenate all realizations +files = MergeAllFiles( + "/Users/mregnier/Desktop/git/Pipeline/src/FMM/CMBDUST_nrec2_new_code/spectrum/" +) + +nus_index = np.array([True, True, False, False, False, False, False, False, True]) +NBINS = 16 + +ell = files._reads_one_file(0, "ell")[:NBINS] +nus = files._reads_one_file(0, "nus")[nus_index] + +BBsignal = np.mean(files._reads_all_files("Dls"), axis=0)[:, nus_index, :NBINS][ + nus_index, :, :NBINS +] +BBnoise = files._reads_all_files("Nl")[:, :, nus_index, :NBINS][:, nus_index, :, :NBINS] +BBsignal -= np.mean(BBnoise, axis=0) + +sky = SkySpectra(ell, nus) +fit = FitEllSpace(ell, BBsignal, BBnoise, model=sky.model) + +samples, samples_flat = fit.run(300, 10, discard=200, comm=comm) + +plt.figure() +plt.plot(samples[..., 0], "-k", alpha=0.1) +plt.axhline(0) +plt.show() + +print() +print(f"Average : {np.mean(samples_flat, axis=0)}") +print(f"Error : {np.std(samples_flat, axis=0)}") +print() diff --git a/src/running_scrips/run_fmm.py b/src/running_scrips/run_fmm.py new file mode 100644 index 0000000..1d53aed --- /dev/null +++ b/src/running_scrips/run_fmm.py @@ -0,0 +1,23 @@ +import sys +from pyoperators import * + +from ..FMM.pipeline import PipelineEnd2End +#from .FMM.pipeline import PipelineEnd2End + + +stop +try: + file = str(sys.argv[1]) +except IndexError: + file = None + +if __name__ == "__main__": + + ### Common MPI arguments + comm = MPI.COMM_WORLD + + ### Initialization + pipeline = PipelineEnd2End(comm) + + ### Execution + pipeline.main(specific_file=file) diff --git a/src/running_scrips/run_fmm.sh b/src/running_scrips/run_fmm.sh new file mode 100644 index 0000000..fd59745 --- /dev/null +++ b/src/running_scrips/run_fmm.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +#SBATCH --job-name=FMM + +# we ask for n MPI tasks with N cores each on c nodes + +#SBATCH --partition=htc +#SBATCH --nodes=1 # c +#SBATCH --ntasks-per-node=1 # n +#SBATCH --cpus-per-task=4 # N +#SBATCH --mem=60G +#SBATCH --time=3-00:00:00 +#SBATCH --output=mulitple_jobs_%j.log +#SBATCH --array=1-500 + +export OMP_NUM_THREADS=${SLURM_CPUS_PER_TASK} + +module load mpich + +mpirun -np $SLURM_NTASKS python src/run_fmm.py \ No newline at end of file diff --git a/src/running_scrips/run_spectra_cmm.py b/src/running_scrips/run_spectra_cmm.py new file mode 100644 index 0000000..7bcad1c --- /dev/null +++ b/src/running_scrips/run_spectra_cmm.py @@ -0,0 +1 @@ +from CMM.spectrum.get_spectra import Spectra