From 07ba4ead639981ed614acfffb242d446eac07e16 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Sun, 2 Jul 2023 23:31:11 -0400 Subject: [PATCH 01/19] remove pygw and replace with git submodule of wxflow --- .gitmodules | 4 + ci/scripts/create_experiment.py | 6 +- ci/scripts/pygw | 1 - ci/scripts/wxflow | 1 + jobs/rocoto/aeroanlfinal.sh | 4 +- jobs/rocoto/aeroanlinit.sh | 4 +- jobs/rocoto/aeroanlrun.sh | 4 +- jobs/rocoto/atmanlfinal.sh | 4 +- jobs/rocoto/atmanlinit.sh | 4 +- jobs/rocoto/atmanlrun.sh | 4 +- jobs/rocoto/atmensanlfinal.sh | 4 +- jobs/rocoto/atmensanlinit.sh | 4 +- jobs/rocoto/atmensanlrun.sh | 4 +- jobs/rocoto/fcst.sh | 4 +- jobs/rocoto/landanl.sh | 4 +- jobs/rocoto/preplandobs.sh | 4 +- jobs/rocoto/upp.sh | 4 +- scripts/exglobal_aero_analysis_finalize.py | 4 +- scripts/exglobal_aero_analysis_initialize.py | 4 +- scripts/exglobal_aero_analysis_run.py | 4 +- scripts/exglobal_atm_analysis_finalize.py | 4 +- scripts/exglobal_atm_analysis_initialize.py | 4 +- scripts/exglobal_atm_analysis_run.py | 4 +- scripts/exglobal_atmens_analysis_finalize.py | 4 +- .../exglobal_atmens_analysis_initialize.py | 4 +- scripts/exglobal_atmens_analysis_run.py | 4 +- scripts/exglobal_atmos_upp.py | 6 +- scripts/exglobal_forecast.py | 6 +- scripts/exglobal_land_analysis.py | 4 +- scripts/exglobal_prep_land_obs.py | 4 +- ush/python/pygfs/task/aero_analysis.py | 18 +- ush/python/pygfs/task/analysis.py | 14 +- ush/python/pygfs/task/atm_analysis.py | 16 +- ush/python/pygfs/task/atmens_analysis.py | 18 +- ush/python/pygfs/task/gfs_forecast.py | 4 +- ush/python/pygfs/task/land_analysis.py | 18 +- ush/python/pygfs/task/upp.py | 18 +- ush/python/pygfs/ufswm/gfs.py | 2 +- ush/python/pygfs/ufswm/ufs.py | 4 +- ush/python/pygw/.gitignore | 139 --- ush/python/pygw/README.md | 36 - ush/python/pygw/setup.cfg | 62 -- ush/python/pygw/setup.py | 4 - ush/python/pygw/src/pygw/__init__.py | 8 - ush/python/pygw/src/pygw/attrdict.py | 171 ---- ush/python/pygw/src/pygw/configuration.py | 179 ---- ush/python/pygw/src/pygw/exceptions.py | 87 -- ush/python/pygw/src/pygw/executable.py | 357 ------- ush/python/pygw/src/pygw/factory.py | 134 --- ush/python/pygw/src/pygw/file_utils.py | 77 -- ush/python/pygw/src/pygw/fsutils.py | 87 -- ush/python/pygw/src/pygw/jinja.py | 253 ----- ush/python/pygw/src/pygw/logger.py | 275 ------ ush/python/pygw/src/pygw/schema.py | 887 ------------------ ush/python/pygw/src/pygw/task.py | 93 -- ush/python/pygw/src/pygw/template.py | 191 ---- ush/python/pygw/src/pygw/timetools.py | 316 ------- ush/python/pygw/src/pygw/yaml_file.py | 208 ---- ush/python/pygw/src/tests/__init__.py | 0 .../src/tests/test-files/test_schema.yaml | 21 - .../pygw/src/tests/test_configuration.py | 172 ---- ush/python/pygw/src/tests/test_exceptions.py | 35 - ush/python/pygw/src/tests/test_executable.py | 60 -- ush/python/pygw/src/tests/test_factory.py | 74 -- ush/python/pygw/src/tests/test_file_utils.py | 66 -- ush/python/pygw/src/tests/test_jinja.py | 37 - ush/python/pygw/src/tests/test_logger.py | 67 -- ush/python/pygw/src/tests/test_schema.py | 82 -- ush/python/pygw/src/tests/test_template.py | 147 --- ush/python/pygw/src/tests/test_timetools.py | 80 -- ush/python/pygw/src/tests/test_yaml_file.py | 97 -- ush/python/wxflow | 1 + workflow/applications/application_factory.py | 2 +- workflow/applications/applications.py | 2 +- workflow/applications/gefs.py | 2 +- workflow/applications/gfs_cycled.py | 2 +- workflow/applications/gfs_forecast_only.py | 2 +- workflow/hosts.py | 2 +- workflow/pygw | 1 - workflow/rocoto/gefs_xml.py | 2 +- workflow/rocoto/gfs_cycled_xml.py | 2 +- workflow/rocoto/gfs_forecast_only_xml.py | 2 +- workflow/rocoto/rocoto_xml_factory.py | 2 +- workflow/rocoto/tasks.py | 2 +- workflow/rocoto/tasks_factory.py | 2 +- workflow/setup_expt.py | 6 +- workflow/setup_xml.py | 2 +- workflow/test_configuration.py | 2 +- workflow/wxflow | 1 + 89 files changed, 137 insertions(+), 4634 deletions(-) create mode 100644 .gitmodules delete mode 120000 ci/scripts/pygw create mode 120000 ci/scripts/wxflow delete mode 100644 ush/python/pygw/.gitignore delete mode 100644 ush/python/pygw/README.md delete mode 100644 ush/python/pygw/setup.cfg delete mode 100644 ush/python/pygw/setup.py delete mode 100644 ush/python/pygw/src/pygw/__init__.py delete mode 100644 ush/python/pygw/src/pygw/attrdict.py delete mode 100644 ush/python/pygw/src/pygw/configuration.py delete mode 100644 ush/python/pygw/src/pygw/exceptions.py delete mode 100644 ush/python/pygw/src/pygw/executable.py delete mode 100644 ush/python/pygw/src/pygw/factory.py delete mode 100644 ush/python/pygw/src/pygw/file_utils.py delete mode 100644 ush/python/pygw/src/pygw/fsutils.py delete mode 100644 ush/python/pygw/src/pygw/jinja.py delete mode 100644 ush/python/pygw/src/pygw/logger.py delete mode 100644 ush/python/pygw/src/pygw/schema.py delete mode 100644 ush/python/pygw/src/pygw/task.py delete mode 100644 ush/python/pygw/src/pygw/template.py delete mode 100644 ush/python/pygw/src/pygw/timetools.py delete mode 100644 ush/python/pygw/src/pygw/yaml_file.py delete mode 100644 ush/python/pygw/src/tests/__init__.py delete mode 100644 ush/python/pygw/src/tests/test-files/test_schema.yaml delete mode 100644 ush/python/pygw/src/tests/test_configuration.py delete mode 100644 ush/python/pygw/src/tests/test_exceptions.py delete mode 100644 ush/python/pygw/src/tests/test_executable.py delete mode 100644 ush/python/pygw/src/tests/test_factory.py delete mode 100644 ush/python/pygw/src/tests/test_file_utils.py delete mode 100644 ush/python/pygw/src/tests/test_jinja.py delete mode 100644 ush/python/pygw/src/tests/test_logger.py delete mode 100644 ush/python/pygw/src/tests/test_schema.py delete mode 100644 ush/python/pygw/src/tests/test_template.py delete mode 100644 ush/python/pygw/src/tests/test_timetools.py delete mode 100644 ush/python/pygw/src/tests/test_yaml_file.py create mode 160000 ush/python/wxflow delete mode 120000 workflow/pygw create mode 120000 workflow/wxflow diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..5d860873ed --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "ush/python/wxflow"] + path = ush/python/wxflow + url = https://github.com/noaa-emc/wxflow + branch = feature/wxflow diff --git a/ci/scripts/create_experiment.py b/ci/scripts/create_experiment.py index ee95d8795e..4ca0ee7fa8 100755 --- a/ci/scripts/create_experiment.py +++ b/ci/scripts/create_experiment.py @@ -22,9 +22,9 @@ from pathlib import Path -from pygw.yaml_file import YAMLFile -from pygw.logger import Logger -from pygw.executable import Executable +from wxflow.yaml_file import YAMLFile +from wxflow.logger import Logger +from wxflow.executable import Executable from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter diff --git a/ci/scripts/pygw b/ci/scripts/pygw deleted file mode 120000 index 77d784f6ca..0000000000 --- a/ci/scripts/pygw +++ /dev/null @@ -1 +0,0 @@ -../../ush/python/pygw/src/pygw \ No newline at end of file diff --git a/ci/scripts/wxflow b/ci/scripts/wxflow new file mode 120000 index 0000000000..36aeb9617b --- /dev/null +++ b/ci/scripts/wxflow @@ -0,0 +1 @@ +../../ush/python/wxflow/src/wxflow \ No newline at end of file diff --git a/jobs/rocoto/aeroanlfinal.sh b/jobs/rocoto/aeroanlfinal.sh index 8f5a445de4..16bb6887fd 100755 --- a/jobs/rocoto/aeroanlfinal.sh +++ b/jobs/rocoto/aeroanlfinal.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### # Execute the JJOB diff --git a/jobs/rocoto/aeroanlinit.sh b/jobs/rocoto/aeroanlinit.sh index 4e3d32ff9f..9aaf255782 100755 --- a/jobs/rocoto/aeroanlinit.sh +++ b/jobs/rocoto/aeroanlinit.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlrun.sh index 0ec2fb8437..bcd86e3fbf 100755 --- a/jobs/rocoto/aeroanlrun.sh +++ b/jobs/rocoto/aeroanlrun.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh index 3c75c52cb0..3d3c3ba9e6 100755 --- a/jobs/rocoto/atmanlfinal.sh +++ b/jobs/rocoto/atmanlfinal.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### # Execute the JJOB diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh index 7bb2587f0b..13c7d8710b 100755 --- a/jobs/rocoto/atmanlinit.sh +++ b/jobs/rocoto/atmanlinit.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmanlrun.sh b/jobs/rocoto/atmanlrun.sh index aad80e0b06..1b87cb4074 100755 --- a/jobs/rocoto/atmanlrun.sh +++ b/jobs/rocoto/atmanlrun.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh index 838e9712f8..5ffaa92754 100755 --- a/jobs/rocoto/atmensanlfinal.sh +++ b/jobs/rocoto/atmensanlfinal.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### # Execute the JJOB diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh index 0ab78a1083..2c2204548a 100755 --- a/jobs/rocoto/atmensanlinit.sh +++ b/jobs/rocoto/atmensanlinit.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/atmensanlrun.sh index 91efdb3768..d991e3eb82 100755 --- a/jobs/rocoto/atmensanlrun.sh +++ b/jobs/rocoto/atmensanlrun.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/fcst.sh b/jobs/rocoto/fcst.sh index 4c957cb205..9d59f70bd8 100755 --- a/jobs/rocoto/fcst.sh +++ b/jobs/rocoto/fcst.sh @@ -59,8 +59,8 @@ set_trace ############################################################### # exglobal_forecast.py requires the following in PYTHONPATH # This will be moved to a module load when ready -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src:${HOMEgfs}/ush/python/pygfs" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src:${HOMEgfs}/ush/python/pygfs" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH export job="fcst" diff --git a/jobs/rocoto/landanl.sh b/jobs/rocoto/landanl.sh index f7bcea30cf..f49b6f9f8b 100755 --- a/jobs/rocoto/landanl.sh +++ b/jobs/rocoto/landanl.sh @@ -13,8 +13,8 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/preplandobs.sh b/jobs/rocoto/preplandobs.sh index 6fcd659eae..399b26048c 100755 --- a/jobs/rocoto/preplandobs.sh +++ b/jobs/rocoto/preplandobs.sh @@ -13,9 +13,9 @@ export jobid="${job}.$$" ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" gdasappPATH="${HOMEgfs}/sorc/gdas.cd/iodaconv/src:${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7/pyioda" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}:${gdasappPATH}" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}:${gdasappPATH}" export PYTHONPATH ############################################################### diff --git a/jobs/rocoto/upp.sh b/jobs/rocoto/upp.sh index fc4c9c566c..23432a5b14 100755 --- a/jobs/rocoto/upp.sh +++ b/jobs/rocoto/upp.sh @@ -15,8 +15,8 @@ status=$? ############################################################### # setup python path for workflow utilities and tasks -pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" -PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +wxflowPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/wxflow/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${wxflowPATH}" export PYTHONPATH export job="upp" diff --git a/scripts/exglobal_aero_analysis_finalize.py b/scripts/exglobal_aero_analysis_finalize.py index 7342bf8357..595f069a76 100755 --- a/scripts/exglobal_aero_analysis_finalize.py +++ b/scripts/exglobal_aero_analysis_finalize.py @@ -6,8 +6,8 @@ # for a global aerosol variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis diff --git a/scripts/exglobal_aero_analysis_initialize.py b/scripts/exglobal_aero_analysis_initialize.py index 6c4135fc2d..3b2024265b 100755 --- a/scripts/exglobal_aero_analysis_initialize.py +++ b/scripts/exglobal_aero_analysis_initialize.py @@ -7,8 +7,8 @@ # for a global aerosol variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis # Initialize root logger diff --git a/scripts/exglobal_aero_analysis_run.py b/scripts/exglobal_aero_analysis_run.py index 887700f476..d67f75e969 100755 --- a/scripts/exglobal_aero_analysis_run.py +++ b/scripts/exglobal_aero_analysis_run.py @@ -5,8 +5,8 @@ # which executes the global aerosol variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis # Initialize root logger diff --git a/scripts/exglobal_atm_analysis_finalize.py b/scripts/exglobal_atm_analysis_finalize.py index e51bf082b5..0e0d0f5258 100755 --- a/scripts/exglobal_atm_analysis_finalize.py +++ b/scripts/exglobal_atm_analysis_finalize.py @@ -6,8 +6,8 @@ # for a global atm variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py index e0077f3323..b93123f70d 100755 --- a/scripts/exglobal_atm_analysis_initialize.py +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -7,8 +7,8 @@ # for a global atm variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis # Initialize root logger diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_run.py index 6b29a56976..946a705543 100755 --- a/scripts/exglobal_atm_analysis_run.py +++ b/scripts/exglobal_atm_analysis_run.py @@ -5,8 +5,8 @@ # which executes the global atm variational analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py index 7bac671aee..c857e0763a 100755 --- a/scripts/exglobal_atmens_analysis_finalize.py +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -6,8 +6,8 @@ # for a global atm local ensemble analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py index 1461e0b441..d08956bdb8 100755 --- a/scripts/exglobal_atmens_analysis_initialize.py +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -7,8 +7,8 @@ # for a global atm local ensemble analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_run.py index dda4f7a11d..384627de02 100755 --- a/scripts/exglobal_atmens_analysis_run.py +++ b/scripts/exglobal_atmens_analysis_run.py @@ -5,8 +5,8 @@ # which executes the global atm local ensemble analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmos_upp.py b/scripts/exglobal_atmos_upp.py index 1a8dac7eb3..eb405d0058 100755 --- a/scripts/exglobal_atmos_upp.py +++ b/scripts/exglobal_atmos_upp.py @@ -2,9 +2,9 @@ import os -from pygw.attrdict import AttrDict -from pygw.logger import Logger, logit -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.attrdict import AttrDict +from wxflow.logger import Logger, logit +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.upp import UPP # initialize root logger diff --git a/scripts/exglobal_forecast.py b/scripts/exglobal_forecast.py index 2b21934bfa..7d081bc2cb 100755 --- a/scripts/exglobal_forecast.py +++ b/scripts/exglobal_forecast.py @@ -2,9 +2,9 @@ import os -from pygw.logger import Logger, logit -from pygw.yaml_file import save_as_yaml -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger, logit +from wxflow.yaml_file import save_as_yaml +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.gfs_forecast import GFSForecast # initialize root logger diff --git a/scripts/exglobal_land_analysis.py b/scripts/exglobal_land_analysis.py index c45a75aed6..c7bf9f2de2 100755 --- a/scripts/exglobal_land_analysis.py +++ b/scripts/exglobal_land_analysis.py @@ -5,8 +5,8 @@ # for a global Land Snow Depth analysis import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.land_analysis import LandAnalysis # Initialize root logger diff --git a/scripts/exglobal_prep_land_obs.py b/scripts/exglobal_prep_land_obs.py index 69d15b95c8..19b939c193 100755 --- a/scripts/exglobal_prep_land_obs.py +++ b/scripts/exglobal_prep_land_obs.py @@ -5,8 +5,8 @@ # which perform the pre-processing for IMS data import os -from pygw.logger import Logger -from pygw.configuration import cast_strdict_as_dtypedict +from wxflow.logger import Logger +from wxflow.configuration import cast_strdict_as_dtypedict from pygfs.task.land_analysis import LandAnalysis diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index e3c9ad50a2..5dc94240b8 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -7,15 +7,15 @@ from logging import getLogger from typing import Dict, List, Any -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta -from pygw.fsutils import rm_p, chdir -from pygw.timetools import to_fv3time -from pygw.yaml_file import YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException +from wxflow.attrdict import AttrDict +from wxflow.file_utils import FileHandler +from wxflow.timetools import add_to_datetime, to_fv3time, to_timedelta +from wxflow.fsutils import rm_p, chdir +from wxflow.timetools import to_fv3time +from wxflow.yaml_file import YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml +from wxflow.logger import logit +from wxflow.executable import Executable +from wxflow.exceptions import WorkflowException from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 40b2035882..8bd7da8ac9 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -7,13 +7,13 @@ from netCDF4 import Dataset from typing import List, Dict, Any, Union -from pygw.yaml_file import parse_j2yaml -from pygw.file_utils import FileHandler -from pygw.fsutils import rm_p -from pygw.logger import logit -from pygw.task import Task -from pygw.executable import Executable -from pygw.exceptions import WorkflowException +from wxflow.yaml_file import parse_j2yaml +from wxflow.file_utils import FileHandler +from wxflow.fsutils import rm_p +from wxflow.logger import logit +from wxflow.task import Task +from wxflow.executable import Executable +from wxflow.exceptions import WorkflowException logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 3ab0ae3240..477f5032cc 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -7,14 +7,14 @@ from logging import getLogger from typing import Dict, List, Any -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH -from pygw.fsutils import rm_p, chdir -from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException +from wxflow.attrdict import AttrDict +from wxflow.file_utils import FileHandler +from wxflow.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH +from wxflow.fsutils import rm_p, chdir +from wxflow.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from wxflow.logger import logit +from wxflow.executable import Executable +from wxflow.exceptions import WorkflowException from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index c5c7e5b145..510baa5ab7 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -7,15 +7,15 @@ from logging import getLogger from typing import Dict, List, Any -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD -from pygw.fsutils import rm_p, chdir -from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException -from pygw.template import Template, TemplateConstants +from wxflow.attrdict import AttrDict +from wxflow.file_utils import FileHandler +from wxflow.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD +from wxflow.fsutils import rm_p, chdir +from wxflow.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from wxflow.logger import logit +from wxflow.executable import Executable +from wxflow.exceptions import WorkflowException +from wxflow.template import Template, TemplateConstants from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/gfs_forecast.py b/ush/python/pygfs/task/gfs_forecast.py index 3527c623e0..fc3876176f 100644 --- a/ush/python/pygfs/task/gfs_forecast.py +++ b/ush/python/pygfs/task/gfs_forecast.py @@ -2,8 +2,8 @@ import logging from typing import Dict, Any -from pygw.logger import logit -from pygw.task import Task +from wxflow.logger import logit +from wxflow.task import Task from pygfs.ufswm.gfs import GFS logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/land_analysis.py b/ush/python/pygfs/task/land_analysis.py index 181fa87866..60f2e90f10 100644 --- a/ush/python/pygfs/task/land_analysis.py +++ b/ush/python/pygfs/task/land_analysis.py @@ -7,15 +7,15 @@ import numpy as np from netCDF4 import Dataset -from pygw.attrdict import AttrDict -from pygw.file_utils import FileHandler -from pygw.timetools import to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime -from pygw.fsutils import rm_p -from pygw.yaml_file import parse_j2yaml, parse_yamltmpl, save_as_yaml -from pygw.jinja import Jinja -from pygw.logger import logit -from pygw.executable import Executable -from pygw.exceptions import WorkflowException +from wxflow.attrdict import AttrDict +from wxflow.file_utils import FileHandler +from wxflow.timetools import to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime +from wxflow.fsutils import rm_p +from wxflow.yaml_file import parse_j2yaml, parse_yamltmpl, save_as_yaml +from wxflow.jinja import Jinja +from wxflow.logger import logit +from wxflow.executable import Executable +from wxflow.exceptions import WorkflowException from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py index 29f4c6fe2b..faa1ccaf0a 100644 --- a/ush/python/pygfs/task/upp.py +++ b/ush/python/pygfs/task/upp.py @@ -5,15 +5,15 @@ from typing import Dict, Any, Union from pprint import pformat -from pygw.attrdict import AttrDict -from pygw.yaml_file import parse_j2yaml -from pygw.file_utils import FileHandler -from pygw.jinja import Jinja -from pygw.logger import logit -from pygw.task import Task -from pygw.timetools import add_to_datetime, to_timedelta -from pygw.exceptions import WorkflowException -from pygw.executable import Executable, which +from wxflow.attrdict import AttrDict +from wxflow.yaml_file import parse_j2yaml +from wxflow.file_utils import FileHandler +from wxflow.jinja import Jinja +from wxflow.logger import logit +from wxflow.task import Task +from wxflow.timetools import add_to_datetime, to_timedelta +from wxflow.exceptions import WorkflowException +from wxflow.executable import Executable, which logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/ufswm/gfs.py b/ush/python/pygfs/ufswm/gfs.py index f86164d706..fa1e271d92 100644 --- a/ush/python/pygfs/ufswm/gfs.py +++ b/ush/python/pygfs/ufswm/gfs.py @@ -1,7 +1,7 @@ import copy import logging -from pygw.logger import logit +from wxflow.logger import logit from pygfs.ufswm.ufs import UFS logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/ufswm/ufs.py b/ush/python/pygfs/ufswm/ufs.py index a9118801b9..7ffbcd1a05 100644 --- a/ush/python/pygfs/ufswm/ufs.py +++ b/ush/python/pygfs/ufswm/ufs.py @@ -3,8 +3,8 @@ import logging from typing import Dict, Any -from pygw.template import Template, TemplateConstants -from pygw.logger import logit +from wxflow.template import Template, TemplateConstants +from wxflow.logger import logit logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygw/.gitignore b/ush/python/pygw/.gitignore deleted file mode 100644 index 13a1a9f851..0000000000 --- a/ush/python/pygw/.gitignore +++ /dev/null @@ -1,139 +0,0 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -pip-wheel-metadata/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py,cover -.hypothesis/ -.pytest_cache/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -.python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# Sphinx documentation -docs/_build/ - -# Editor backup files (Emacs, vim) -*~ -*.sw[a-p] - -# Pycharm IDE files -.idea/ diff --git a/ush/python/pygw/README.md b/ush/python/pygw/README.md deleted file mode 100644 index 13db34471c..0000000000 --- a/ush/python/pygw/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# global workflow specific tools - -Python tools specifically for global applications - -## Installation -Simple installation instructions -```sh -$> git clone https://github.com/noaa-emc/global-workflow -$> cd global-workflow/ush/python/pygw -$> pip install . -``` - -It is not required to install this package. Instead, -```sh -$> cd global-workflow/ush/python/pygw -$> export PYTHONPATH=$PWD/src/pygw -``` -would put this package in the `PYTHONPATH` - -### Note: -These instructions will be updated and the tools are under development. - -### Running python tests: -Simple instructions to enable executing pytests manually -```sh -# Create a python virtual environment and step into it -$> cd global-workflow/ush/python/pygw -$> python3 -m venv venv -$> source venv/bin/activate - -# Install pygw with the developer requirements -(venv) $> pip install .[dev] - -# Run pytests -(venv) $> pytest -v -``` diff --git a/ush/python/pygw/setup.cfg b/ush/python/pygw/setup.cfg deleted file mode 100644 index 1d45df0d76..0000000000 --- a/ush/python/pygw/setup.cfg +++ /dev/null @@ -1,62 +0,0 @@ -[metadata] -name = pygw -version = 0.0.1 -description = Global applications specific workflow related tools -long_description = file: README.md -long_description_content_type = text/markdown -author = "NOAA/NWS/NCEP/EMC" -#author_email = first.last@domain.tld -keywords = NOAA, NWS, NCEP, EMC, GFS, GEFS -home_page = https://github.com/noaa-emc/global-workflow -license = GNU Lesser General Public License -classifiers = - Development Status :: 1 - Beta - Intended Audience :: Developers - Intended Audience :: Science/Research - License :: OSI Approved :: GNU Lesser General Public License - Natural Language :: English - Operating System :: OS Independent - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Topic :: Software Development :: Libraries :: Python Modules - Operating System :: OS Independent - Typing :: Typed -project_urls = - Bug Tracker = https://github.com/noaa-emc/global-workflow/issues - CI = https://github.com/noaa-emc/global-workflow/actions - -[options] -zip_safe = False -include_package_data = True -package_dir = - =src -packages = find_namespace: -python_requires = >= 3.6 -setup_requires = - setuptools -install_requires = - numpy==1.21.6 - PyYAML==6.0 - Jinja2==3.1.2 -tests_require = - pytest - -[options.packages.find] -where=src - -[options.package_data] -* = *.txt, *.md - -[options.extras_require] -dev = pytest>=7; pytest-cov>=3 - -[green] -file-pattern = test_*.py -verbose = 2 -no-skip-report = true -quiet-stdout = true -run-coverage = true diff --git a/ush/python/pygw/setup.py b/ush/python/pygw/setup.py deleted file mode 100644 index e748ce0b71..0000000000 --- a/ush/python/pygw/setup.py +++ /dev/null @@ -1,4 +0,0 @@ -''' Standard file for building the package with Distutils. ''' - -import setuptools -setuptools.setup() diff --git a/ush/python/pygw/src/pygw/__init__.py b/ush/python/pygw/src/pygw/__init__.py deleted file mode 100644 index d44158004c..0000000000 --- a/ush/python/pygw/src/pygw/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -""" -Commonly used toolset for the global applications and beyond. -""" -__docformat__ = "restructuredtext" - -import os - -pygw_directory = os.path.dirname(__file__) diff --git a/ush/python/pygw/src/pygw/attrdict.py b/ush/python/pygw/src/pygw/attrdict.py deleted file mode 100644 index f2add20a19..0000000000 --- a/ush/python/pygw/src/pygw/attrdict.py +++ /dev/null @@ -1,171 +0,0 @@ -# attrdict is a Python module that gives you dictionaries whose values are both -# gettable and settable using attributes, in addition to standard item-syntax. -# https://github.com/mewwts/addict -# addict/addict.py -> attrdict.py -# hash: 7e8d23d -# License: MIT -# class Dict -> class AttrDict to prevent name collisions w/ typing.Dict - -import copy - -__all__ = ['AttrDict'] - - -class AttrDict(dict): - - def __init__(__self, *args, **kwargs): - object.__setattr__(__self, '__parent', kwargs.pop('__parent', None)) - object.__setattr__(__self, '__key', kwargs.pop('__key', None)) - object.__setattr__(__self, '__frozen', False) - for arg in args: - if not arg: - continue - elif isinstance(arg, dict): - for key, val in arg.items(): - __self[key] = __self._hook(val) - elif isinstance(arg, tuple) and (not isinstance(arg[0], tuple)): - __self[arg[0]] = __self._hook(arg[1]) - else: - for key, val in iter(arg): - __self[key] = __self._hook(val) - - for key, val in kwargs.items(): - __self[key] = __self._hook(val) - - def __setattr__(self, name, value): - if hasattr(self.__class__, name): - raise AttributeError("'AttrDict' object attribute " - "'{0}' is read-only".format(name)) - else: - self[name] = value - - def __setitem__(self, name, value): - isFrozen = (hasattr(self, '__frozen') and - object.__getattribute__(self, '__frozen')) - if isFrozen and name not in super(AttrDict, self).keys(): - raise KeyError(name) - if isinstance(value, dict): - value = AttrDict(value) - super(AttrDict, self).__setitem__(name, value) - try: - p = object.__getattribute__(self, '__parent') - key = object.__getattribute__(self, '__key') - except AttributeError: - p = None - key = None - if p is not None: - p[key] = self - object.__delattr__(self, '__parent') - object.__delattr__(self, '__key') - - def __add__(self, other): - if not self.keys(): - return other - else: - self_type = type(self).__name__ - other_type = type(other).__name__ - msg = "unsupported operand type(s) for +: '{}' and '{}'" - raise TypeError(msg.format(self_type, other_type)) - - @classmethod - def _hook(cls, item): - if isinstance(item, dict): - return cls(item) - elif isinstance(item, (list, tuple)): - return type(item)(cls._hook(elem) for elem in item) - return item - - def __getattr__(self, item): - return self.__getitem__(item) - - def __missing__(self, name): - if object.__getattribute__(self, '__frozen'): - raise KeyError(name) - return self.__class__(__parent=self, __key=name) - - def __delattr__(self, name): - del self[name] - - def to_dict(self): - base = {} - for key, value in self.items(): - if isinstance(value, type(self)): - base[key] = value.to_dict() - elif isinstance(value, (list, tuple)): - base[key] = type(value)( - item.to_dict() if isinstance(item, type(self)) else - item for item in value) - else: - base[key] = value - return base - - def copy(self): - return copy.copy(self) - - def deepcopy(self): - return copy.deepcopy(self) - - def __deepcopy__(self, memo): - other = self.__class__() - memo[id(self)] = other - for key, value in self.items(): - other[copy.deepcopy(key, memo)] = copy.deepcopy(value, memo) - return other - - def update(self, *args, **kwargs): - other = {} - if args: - if len(args) > 1: - raise TypeError() - other.update(args[0]) - other.update(kwargs) - for k, v in other.items(): - if ((k not in self) or - (not isinstance(self[k], dict)) or - (not isinstance(v, dict))): - self[k] = v - else: - self[k].update(v) - - def __getnewargs__(self): - return tuple(self.items()) - - def __getstate__(self): - return self - - def __setstate__(self, state): - self.update(state) - - def __or__(self, other): - if not isinstance(other, (AttrDict, dict)): - return NotImplemented - new = AttrDict(self) - new.update(other) - return new - - def __ror__(self, other): - if not isinstance(other, (AttrDict, dict)): - return NotImplemented - new = AttrDict(other) - new.update(self) - return new - - def __ior__(self, other): - self.update(other) - return self - - def setdefault(self, key, default=None): - if key in self: - return self[key] - else: - self[key] = default - return default - - def freeze(self, shouldFreeze=True): - object.__setattr__(self, '__frozen', shouldFreeze) - for key, val in self.items(): - if isinstance(val, AttrDict): - val.freeze(shouldFreeze) - - def unfreeze(self): - self.freeze(False) diff --git a/ush/python/pygw/src/pygw/configuration.py b/ush/python/pygw/src/pygw/configuration.py deleted file mode 100644 index da39a21748..0000000000 --- a/ush/python/pygw/src/pygw/configuration.py +++ /dev/null @@ -1,179 +0,0 @@ -import glob -import os -import random -import subprocess -from pathlib import Path -from pprint import pprint -from typing import Union, List, Dict, Any - -from pygw.attrdict import AttrDict -from pygw.timetools import to_datetime - -__all__ = ['Configuration', 'cast_as_dtype', 'cast_strdict_as_dtypedict'] - - -class ShellScriptException(Exception): - def __init__(self, scripts, errors): - self.scripts = scripts - self.errors = errors - super(ShellScriptException, self).__init__( - str(errors) + - ': error processing' + - (' '.join(scripts))) - - -class UnknownConfigError(Exception): - pass - - -class Configuration: - """ - Configuration parser for the global-workflow - (or generally for sourcing a shell script into a python dictionary) - """ - - def __init__(self, config_dir: Union[str, Path]): - """ - Given a directory containing config files (config.XYZ), - return a list of config_files minus the ones ending with ".default" - """ - - self.config_dir = config_dir - self.config_files = self._get_configs - - @property - def _get_configs(self) -> List[str]: - """ - Given a directory containing config files (config.XYZ), - return a list of config_files minus the ones ending with ".default" - """ - result = list() - for config in glob.glob(f'{self.config_dir}/config.*'): - if not config.endswith('.default'): - result.append(config) - - return result - - def find_config(self, config_name: str) -> str: - """ - Given a config file name, find the full path of the config file - """ - - for config in self.config_files: - if config_name == os.path.basename(config): - return config - - raise UnknownConfigError( - f'{config_name} does not exist (known: {repr(config_name)}), ABORT!') - - def parse_config(self, files: Union[str, bytes, list]) -> Dict[str, Any]: - """ - Given the name of config file(s), key-value pair of all variables in the config file(s) - are returned as a dictionary - :param files: config file or list of config files - :type files: list or str or unicode - :return: Key value pairs representing the environment variables defined - in the script. - :rtype: dict - """ - if isinstance(files, (str, bytes)): - files = [files] - files = [self.find_config(file) for file in files] - return cast_strdict_as_dtypedict(self._get_script_env(files)) - - def print_config(self, files: Union[str, bytes, list]) -> None: - """ - Given the name of config file(s), key-value pair of all variables in the config file(s) are printed - Same signature as parse_config - :param files: config file or list of config files - :type files: list or str or unicode - :return: None - """ - config = self.parse_config(files) - pprint(config, width=4) - - @classmethod - def _get_script_env(cls, scripts: List) -> Dict[str, Any]: - default_env = cls._get_shell_env([]) - and_script_env = cls._get_shell_env(scripts) - vars_just_in_script = set(and_script_env) - set(default_env) - union_env = dict(default_env) - union_env.update(and_script_env) - return dict([(v, union_env[v]) for v in vars_just_in_script]) - - @staticmethod - def _get_shell_env(scripts: List) -> Dict[str, Any]: - varbls = dict() - runme = ''.join([f'source {s} ; ' for s in scripts]) - magic = f'--- ENVIRONMENT BEGIN {random.randint(0,64**5)} ---' - runme += f'/bin/echo -n "{magic}" ; /usr/bin/env -0' - with open('/dev/null', 'w') as null: - env = subprocess.Popen(runme, shell=True, stdin=null.fileno(), - stdout=subprocess.PIPE) - (out, err) = env.communicate() - out = out.decode() - begin = out.find(magic) - if begin < 0: - raise ShellScriptException(scripts, 'Cannot find magic string; ' - 'at least one script failed: ' + repr(out)) - for entry in out[begin + len(magic):].split('\x00'): - iequal = entry.find('=') - varbls[entry[0:iequal]] = entry[iequal + 1:] - return varbls - - -def cast_strdict_as_dtypedict(ctx: Dict[str, str]) -> Dict[str, Any]: - """ - Environment variables are typically stored as str - This method attempts to translate those into datatypes - Parameters - ---------- - ctx : dict - dictionary with values as str - Returns - ------- - varbles : dict - dictionary with values as datatypes - """ - varbles = AttrDict() - for key, value in ctx.items(): - varbles[key] = cast_as_dtype(value) - return varbles - - -def cast_as_dtype(string: str) -> Union[str, int, float, bool, Any]: - """ - Cast a value into known datatype - Parameters - ---------- - string: str - Returns - ------- - value : str or int or float or datetime - default: str - """ - TRUTHS = ['y', 'yes', 't', 'true', '.t.', '.true.'] - BOOLS = ['n', 'no', 'f', 'false', '.f.', '.false.'] + TRUTHS - BOOLS = [x.upper() for x in BOOLS] + BOOLS + ['Yes', 'No', 'True', 'False'] - - def _cast_or_not(type: Any, string: str): - try: - return type(string) - except ValueError: - return string - - def _true_or_not(string: str): - try: - return string.lower() in TRUTHS - except AttributeError: - return string - - try: - return to_datetime(string) # Try as a datetime - except Exception as exc: - if string in BOOLS: # Likely a boolean, convert to True/False - return _true_or_not(string) - elif '.' in string: # Likely a number and that too a float - return _cast_or_not(float, string) - else: # Still could be a number, may be an integer - return _cast_or_not(int, string) diff --git a/ush/python/pygw/src/pygw/exceptions.py b/ush/python/pygw/src/pygw/exceptions.py deleted file mode 100644 index a97cba6406..0000000000 --- a/ush/python/pygw/src/pygw/exceptions.py +++ /dev/null @@ -1,87 +0,0 @@ -# pylint: disable=unused-argument - -# ---- - -from collections.abc import Callable - -from pygw.logger import Logger, logit - -logger = Logger(level="error", colored_log=True) - -__all__ = ["WorkflowException", "msg_except_handle"] - - -class WorkflowException(Exception): - """ - Description - ----------- - - This is the base-class for all exceptions; it is a sub-class of - Exceptions. - - Parameters - ---------- - - msg: str - - A Python string containing a message to accompany the - exception. - - """ - - @logit(logger) - def __init__(self: Exception, msg: str): - """ - Description - ----------- - - Creates a new WorkflowException object. - - """ - - # Define the base-class attributes. - logger.error(msg=msg) - super().__init__() - - -# ---- - - -def msg_except_handle(err_cls: object) -> Callable: - """ - Description - ----------- - - This function provides a decorator to be used to raise specified - exceptions. - - Parameters - ---------- - - err_cls: object - - A Python object containing the WorkflowException subclass to - be used for exception raises. - - Parameters - ---------- - - decorator: Callable - - A Python decorator. - - """ - - # Define the decorator function. - def decorator(func: Callable): - - # Execute the caller function; proceed accordingly. - def call_function(msg: str) -> None: - - # If an exception is encountered, raise the respective - # exception. - raise err_cls(msg=msg) - - return call_function - - return decorator diff --git a/ush/python/pygw/src/pygw/executable.py b/ush/python/pygw/src/pygw/executable.py deleted file mode 100644 index e9868b0214..0000000000 --- a/ush/python/pygw/src/pygw/executable.py +++ /dev/null @@ -1,357 +0,0 @@ -import os -import shlex -import subprocess -import sys -from typing import Any, Optional, Union, List - -__all__ = ["Executable", "which", "CommandNotFoundError"] - - -class Executable: - """ - Class representing a program that can be run on the command line. - - Example: - -------- - - >>> from pygw.executable import Executable - >>> cmd = Executable('srun') # Lets say we need to run command e.g. "srun" - >>> cmd.add_default_arg('my_exec.x') # Lets say we need to run the executable "my_exec.x" - >>> cmd.add_default_arg('my_arg.yaml') # Lets say we need to pass an argument to this executable e.g. "my_arg.yaml" - >>> cmd.add_default_env('OMP_NUM_THREADS', 4) # Lets say we want to run w/ 4 threads in the environment - >>> cmd(output='stdout', error='stderr') # Run the command and capture the stdout and stderr in files named similarly. - - `cmd` line above will translate to: - - $ export OMP_NUM_THREADS=4 - $ srun my_exec.x my_arg.yaml 1>&stdout 2>&stderr - - References - ---------- - .. [1] "spack.util.executable.py", https://github.com/spack/spack/blob/develop/lib/spack/spack/util/executable.py - """ - - def __init__(self, name: str): - """ - Construct an executable object. - - Parameters - ---------- - name : str - name of the executable to run - """ - self.exe = shlex.split(str(name)) - self.default_env = {} - self.returncode = None - - if not self.exe: - raise ProcessError(f"Cannot construct executable for '{name}'") - - def add_default_arg(self, arg: Union[str, List]) -> None: - """ - Add a default argument to the command. - Parameters - ---------- - arg : str - argument to the executable - """ - if isinstance(arg, list): - self.exe.extend(arg) - else: - self.exe.append(arg) - - def add_default_env(self, key: str, value: Any) -> None: - """ - Set an environment variable when the command is run. - - Parameters: - ---------- - key : str - The environment variable to set - value : Any - The value to set it to - """ - self.default_env[key] = str(value) - - @property - def command(self) -> str: - """ - The command-line string. - - Returns: - -------- - str : The executable and default arguments - """ - return " ".join(self.exe) - - @property - def name(self) -> str: - """ - The executable name. - - Returns: - -------- - str : The basename of the executable - """ - return os.path.basename(self.path) - - @property - def path(self) -> str: - """ - The path to the executable. - - Returns: - -------- - str : The path to the executable - """ - return self.exe[0] - - def __call__(self, *args, **kwargs): - """ - Run this executable in a subprocess. - - Parameters: - ----------- - *args (str): Command-line arguments to the executable to run - - Keyword Arguments: - ------------------ - _dump_env : Dict - Dict to be set to the environment actually - used (envisaged for testing purposes only) - env : Dict - The environment with which to run the executable - fail_on_error : bool - Raise an exception if the subprocess returns - an error. Default is True. The return code is available as - ``exe.returncode`` - ignore_errors : int or List - A list of error codes to ignore. - If these codes are returned, this process will not raise - an exception even if ``fail_on_error`` is set to ``True`` - input : - Where to read stdin from - output : - Where to send stdout - error : - Where to send stderr - - Accepted values for input, output, and error: - - * python streams, e.g. open Python file objects, or ``os.devnull`` - * filenames, which will be automatically opened for writing - * ``str``, as in the Python string type. If you set these to ``str``, - output and error will be written to pipes and returned as a string. - If both ``output`` and ``error`` are set to ``str``, then one string - is returned containing output concatenated with error. Not valid - for ``input`` - * ``str.split``, as in the ``split`` method of the Python string type. - Behaves the same as ``str``, except that value is also written to - ``stdout`` or ``stderr``. - - By default, the subprocess inherits the parent's file descriptors. - - """ - # Environment - env_arg = kwargs.get("env", None) - - # Setup default environment - env = os.environ.copy() if env_arg is None else {} - env.update(self.default_env) - - # Apply env argument - if env_arg: - env.update(env_arg) - - if "_dump_env" in kwargs: - kwargs["_dump_env"].clear() - kwargs["_dump_env"].update(env) - - fail_on_error = kwargs.pop("fail_on_error", True) - ignore_errors = kwargs.pop("ignore_errors", ()) - - # If they just want to ignore one error code, make it a tuple. - if isinstance(ignore_errors, int): - ignore_errors = (ignore_errors,) - - output = kwargs.pop("output", None) - error = kwargs.pop("error", None) - input = kwargs.pop("input", None) - - if input is str: - raise ValueError("Cannot use `str` as input stream.") - - def streamify(arg, mode): - if isinstance(arg, str): - return open(arg, mode), True - elif arg in (str, str.split): - return subprocess.PIPE, False - else: - return arg, False - - istream, close_istream = streamify(input, "r") - ostream, close_ostream = streamify(output, "w") - estream, close_estream = streamify(error, "w") - - cmd = self.exe + list(args) - - escaped_cmd = ["'%s'" % arg.replace("'", "'\"'\"'") for arg in cmd] - cmd_line_string = " ".join(escaped_cmd) - - proc = None # initialize to avoid lint warning - try: - proc = subprocess.Popen(cmd, stdin=istream, stderr=estream, stdout=ostream, env=env, close_fds=False) - out, err = proc.communicate() - - result = None - if output in (str, str.split) or error in (str, str.split): - result = "" - if output in (str, str.split): - outstr = str(out.decode("utf-8")) - result += outstr - if output is str.split: - sys.stdout.write(outstr) - if error in (str, str.split): - errstr = str(err.decode("utf-8")) - result += errstr - if error is str.split: - sys.stderr.write(errstr) - - rc = self.returncode = proc.returncode - if fail_on_error and rc != 0 and (rc not in ignore_errors): - long_msg = cmd_line_string - if result: - # If the output is not captured in the result, it will have - # been stored either in the specified files (e.g. if - # 'output' specifies a file) or written to the parent's - # stdout/stderr (e.g. if 'output' is not specified) - long_msg += "\n" + result - - raise ProcessError(f"Command exited with status {proc.returncode}:", long_msg) - - return result - - except OSError as e: - raise ProcessError(f"{self.exe[0]}: {e.strerror}", f"Command: {cmd_line_string}") - - except subprocess.CalledProcessError as e: - if fail_on_error: - raise ProcessError( - str(e), - f"\nExit status {proc.returncode} when invoking command: {cmd_line_string}", - ) - - finally: - if close_ostream: - ostream.close() - if close_estream: - estream.close() - if close_istream: - istream.close() - - def __eq__(self, other): - return hasattr(other, "exe") and self.exe == other.exe - - def __neq__(self, other): - return not (self == other) - - def __hash__(self): - return hash((type(self),) + tuple(self.exe)) - - def __repr__(self): - return f"" - - def __str__(self): - return " ".join(self.exe) - - -def which_string(*args, **kwargs) -> str: - """ - Like ``which()``, but return a string instead of an ``Executable``. - - If given multiple executables, returns the string of the first one that is found. - If no executables are found, returns None. - - Parameters: - ----------- - *args : str - One or more executables to search for - - Keyword Arguments: - ------------------ - path : str or List - The path to search. Defaults to ``PATH`` - required : bool - If set to True, raise an error if executable not found - - Returns: - -------- - str : - The first executable that is found in the path - """ - path = kwargs.get("path", os.environ.get("PATH", "")) - required = kwargs.get("required", False) - - if isinstance(path, str): - path = path.split(os.pathsep) - - for name in args: - for candidate_name in [name]: - if os.path.sep in candidate_name: - exe = os.path.abspath(candidate_name) - if os.path.isfile(exe) and os.access(exe, os.X_OK): - return exe - else: - for directory in path: - exe = os.path.join(directory, candidate_name) - if os.path.isfile(exe) and os.access(exe, os.X_OK): - return exe - - if required: - raise CommandNotFoundError(f"'{args[0]}' is required. Make sure it is in your PATH.") - - return None - - -def which(*args, **kwargs) -> Optional[Executable]: - """ - Finds an executable in the PATH like command-line which. - - If given multiple executables, returns the first one that is found. - If no executables are found, returns None. - - Parameters: - ----------- - *args : str - One or more executables to search for - - Keyword Arguments: - ------------------ - path : str or List - The path to search. Defaults to ``PATH`` - required : bool - If set to True, raise an error if executable not found - - Returns: - -------- - Executable: The first executable that is found in the path - """ - exe = which_string(*args, **kwargs) - return Executable(shlex.quote(exe)) if exe else None - - -class ProcessError(Exception): - """ - ProcessErrors are raised when Executables exit with an error code. - """ - def __init__(self, short_msg, long_msg=None): - self.short_msg = short_msg - self.long_msg = long_msg - message = short_msg + '\n' + long_msg if long_msg else short_msg - super().__init__(message) - - -class CommandNotFoundError(OSError): - """ - Raised when ``which()`` cannot find a required executable. - """ diff --git a/ush/python/pygw/src/pygw/factory.py b/ush/python/pygw/src/pygw/factory.py deleted file mode 100644 index 58b93928cc..0000000000 --- a/ush/python/pygw/src/pygw/factory.py +++ /dev/null @@ -1,134 +0,0 @@ -import sys - - -__all__ = ['Factory'] - - -class Factory: - """ - General Purpose Object Factory (Factory) to create all kinds of objects. - It provides methods to register a Builder and create concrete object - instances based on key value. - It also provides methods to check if a Builder is registered as well as - all the registered builders in the Factory. - """ - - def __init__(self, name: str): - """ - Initialize an empty {name}Factory with no Builders - - - Parameters - ---------- - name : Name of factory - """ - self._name = f'{name}Factory' - self._builders = {} - - # Register {name}Factory as importable from pygw.factory - me = sys.modules[__name__] - if not hasattr(me, self._name): - setattr(me, self._name, self) - else: - raise AttributeError(f"{self._name} is already an importable object from {me}") - - def register(self, key: str, builder: object): - """ - Register a new builder in the Factory - - Parameters - ---------- - key: str - Name of the builder - - Returns - ------- - object: The class that will serve as the builder for this key - """ - if self.is_registered(key): - print(f'{key} is already a registered Builder in {self._name}') - return - self._builders[key] = builder - - def create(self, key: str, *args, **kwargs): - """ - Instantiate a registered Builder - - Parameters - ---------- - key: str - Name of the builder to use - args : tuple - Arguments to pass to the builder - kwargs : dict - Keyword arguments to pass to the builder - """ - if not self.is_registered(key): - raise KeyError( - f"{key} is not a registered builder in {self._name}.\n" + - "Available builders are:\n" + - f"{', '.join(self._builders.keys())}") - - return self._builders[key](*args, **kwargs) - - def destroy(self, key: str): - """ - Retire a registered builder from the Factory - Note: This will not delete the instance if it was created, just that - this Builder will no longer be able to work in the Factory - - Parameters - ---------- - key : str - Name of builder to unregister - """ - try: - del self._builders[key] - except KeyError: - print(f'{key} is not a registered Builder in {self._name}') - - @property - def registered(self): - """ - Return a set of all registered builders in the Factory - - Returns - ------- - set : All registered builders - - """ - return set(self._builders.keys()) - - def is_registered(self, key: str): - """ - Return True/False if a builder is registered in the Factory - - Parameters - ---------- - key : Name of builder to check - - Returns - ------- - bool : if builder is registered in the Factory - """ - return key in self._builders.keys() - - @classmethod - def get_factory(cls, name: str): - """ - Retrieve the named factory - - Parameters - ---------- - name : Name of factory - - Returns - ------- - Factory : Factory designated by the specified name - - """ - me = sys.modules[__name__] - try: - return getattr(me, name) - except AttributeError: - raise AttributeError(f"{name} is not a Factory in {me}") diff --git a/ush/python/pygw/src/pygw/file_utils.py b/ush/python/pygw/src/pygw/file_utils.py deleted file mode 100644 index a8220fcce1..0000000000 --- a/ush/python/pygw/src/pygw/file_utils.py +++ /dev/null @@ -1,77 +0,0 @@ -from logging import getLogger - -from .fsutils import cp, mkdir - -__all__ = ['FileHandler'] - -logger = getLogger(__name__.split('.')[-1]) - - -class FileHandler: - """Class to manipulate files in bulk for a given configuration - - Parameters - ---------- - config : dict - A dictionary containing the "action" and the "act" in the form of a list - - NOTE - ---- - "action" can be one of mkdir", "copy", etc. - Corresponding "act" would be ['dir1', 'dir2'], [['src1', 'dest1'], ['src2', 'dest2']] - - Attributes - ---------- - config : dict - Dictionary of files to manipulate - """ - - def __init__(self, config): - - self.config = config - - def sync(self): - """ - Method to execute bulk actions on files described in the configuration - """ - sync_factory = { - 'copy': self._copy_files, - 'mkdir': self._make_dirs, - } - # loop through the configuration keys - for action, files in self.config.items(): - sync_factory[action](files) - - @staticmethod - def _copy_files(filelist): - """Function to copy all files specified in the list - - `filelist` should be in the form: - - [src, dest] - - Parameters - ---------- - filelist : list - List of lists of [src, dest] - """ - for sublist in filelist: - if len(sublist) != 2: - raise Exception( - f"List must be of the form ['src', 'dest'], not {sublist}") - src = sublist[0] - dest = sublist[1] - cp(src, dest) - logger.info(f'Copied {src} to {dest}') - - @staticmethod - def _make_dirs(dirlist): - """Function to make all directories specified in the list - - Parameters - ---------- - dirlist : list - List of directories to create - """ - for dd in dirlist: - mkdir(dd) - logger.info(f'Created {dd}') diff --git a/ush/python/pygw/src/pygw/fsutils.py b/ush/python/pygw/src/pygw/fsutils.py deleted file mode 100644 index 23331a9bbd..0000000000 --- a/ush/python/pygw/src/pygw/fsutils.py +++ /dev/null @@ -1,87 +0,0 @@ -import os -import errno -import shutil -import contextlib - -__all__ = ['mkdir', 'mkdir_p', 'rmdir', 'chdir', 'rm_p', 'cp'] - - -def mkdir_p(path): - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST and os.path.isdir(path): - pass - else: - raise OSError(f"unable to create directory at {path}") - - -mkdir = mkdir_p - - -def rmdir(dir_path): - try: - shutil.rmtree(dir_path) - except OSError as exc: - raise OSError(f"unable to remove {dir_path}") - - -@contextlib.contextmanager -def chdir(path): - """Change current working directory and yield. - Upon completion, the working directory is switched back to the directory at the time of call. - - Parameters - ---------- - path : str | os.PathLike - Directory to change to for operations - - Example - ------- - with chdir(path_to_cd_and_do_stuff): - do_thing_1 - do_thing_2 - """ - cwd = os.getcwd() - try: - os.chdir(path) - yield - finally: - print(f"WARNING: Unable to chdir({path})") # TODO: use logging - os.chdir(cwd) - - -def rm_p(path): - try: - os.unlink(path) - except OSError as exc: - if exc.errno == errno.ENOENT: - pass - else: - raise OSError(f"unable to remove {path}") - - -def cp(source: str, target: str) -> None: - """ - copy `source` file to `target` using `shutil.copyfile` - If `target` is a directory, then the filename from `source` is retained into the `target` - Parameters - ---------- - source : str - Source filename - target : str - Destination filename or directory - Returns - ------- - None - """ - - if os.path.isdir(target): - target = os.path.join(target, os.path.basename(source)) - - try: - shutil.copy2(source, target) - except OSError: - raise OSError(f"unable to copy {source} to {target}") - except Exception as exc: - raise Exception(exc) diff --git a/ush/python/pygw/src/pygw/jinja.py b/ush/python/pygw/src/pygw/jinja.py deleted file mode 100644 index 2f30367b75..0000000000 --- a/ush/python/pygw/src/pygw/jinja.py +++ /dev/null @@ -1,253 +0,0 @@ -import io -import os -import sys -import jinja2 -from markupsafe import Markup -from pathlib import Path -from typing import Dict - -from .timetools import strftime, to_YMDH, to_YMD, to_fv3time, to_isotime, to_julian - -__all__ = ['Jinja'] - - -@jinja2.pass_eval_context -class SilentUndefined(jinja2.Undefined): - """ - Description - ----------- - A Jinja2 undefined that does not raise an error when it is used in a - template. Instead, it returns the template back when the variable is not found - This class is not to be used outside of this file - Its purpose is to return the template instead of an empty string - Presently, it also does not return the filter applied to the variable. - This will be added later when a use case for it presents itself. - """ - def __str__(self): - return "{{ " + self._undefined_name + " }}" - - def __add__(self, other): - return str(self) + other - - def __radd__(self, other): - return other + str(self) - - def __mod__(self, other): - return str(self) % other - - def __call__(self, *args, **kwargs): - return Markup("{{ " + self._undefined_name + " }}") - - -class Jinja: - """ - Description - ----------- - A wrapper around jinja2 to render templates - """ - - def __init__(self, template_path_or_string: str, data: Dict, allow_missing: bool = True): - """ - Description - ----------- - Given a path to a (jinja2) template and a data object, substitute the - template file with data. - Allow for retaining missing or undefined variables. - Parameters - ---------- - template_path_or_string : str - Path to the template file or a templated string - data : dict - Data to be substituted into the template - allow_missing : bool - If True, allow for missing or undefined variables - """ - - self.data = data - self.undefined = SilentUndefined if allow_missing else jinja2.StrictUndefined - - if os.path.isfile(template_path_or_string): - self.template_type = 'file' - self.template_path = Path(template_path_or_string) - else: - self.template_type = 'stream' - self.template_stream = template_path_or_string - - @property - def render(self, data: Dict = None) -> str: - """ - Description - ----------- - Render the Jinja2 template with the data - Parameters - ---------- - data: dict (optional) - Additional data to be used in the template - Not implemented yet. Placed here for future use - Returns - ------- - rendered: str - Rendered template into text - """ - - render_map = {'stream': self._render_stream, - 'file': self._render_file} - return render_map[self.template_type]() - - def get_set_env(self, loader: jinja2.BaseLoader, filters: Dict[str, callable] = None) -> jinja2.Environment: - """ - Description - ----------- - Define the environment for the jinja2 template - Any number of filters can be added here. - Optionally, a dictionary of external filters can be passed in - - Currently, the following filters are defined: - strftime: convert a datetime object to a string with a user defined format - to_isotime: convert a datetime object to an ISO 8601 string - to_fv3time: convert a datetime object to a FV3 time string - to_YMDH: convert a datetime object to a YYYYMMDDHH string - to_YMD: convert a datetime object to a YYYYMMDD string - to_julian: convert a datetime object to a julian day - to_f90bool: convert a boolean to a fortran boolean - getenv: read variable from enviornment if defined, else UNDEFINED - - Parameters - ---------- - loader: jinja2.BaseLoader - An instance of class jinja2.BaseLoader - filters: Dict[str, callable] (optional) - A dictionary of filters to be added to the environment - - Returns - ------- - env: jinja2.Environment - """ - - env = jinja2.Environment(loader=loader, undefined=self.undefined) - env.filters["strftime"] = lambda dt, fmt: strftime(dt, fmt) - env.filters["to_isotime"] = lambda dt: to_isotime(dt) if not isinstance(dt, SilentUndefined) else dt - env.filters["to_fv3time"] = lambda dt: to_fv3time(dt) if not isinstance(dt, SilentUndefined) else dt - env.filters["to_YMDH"] = lambda dt: to_YMDH(dt) if not isinstance(dt, SilentUndefined) else dt - env.filters["to_YMD"] = lambda dt: to_YMD(dt) if not isinstance(dt, SilentUndefined) else dt - env.filters["to_julian"] = lambda dt: to_julian(dt) if not isinstance(dt, SilentUndefined) else dt - env.filters["to_f90bool"] = lambda bool: ".true." if bool else ".false." - env.filters['getenv'] = lambda name, default='UNDEFINED': os.environ.get(name, default) - - # Add any additional filters - if filters is not None: - for filter_name, filter_func in filters.items(): - env.filters[filter_name] = filter_func - - return env - - @staticmethod - def add_filter_env(env: jinja2.Environment, filter_name: str, filter_func: callable): - """ - Description - ----------- - Add a custom filter to the jinja2 environment - Not implemented yet. Placed here for future use - Parameters - ---------- - env: jinja2.Environment - Active jinja2 environment - filter_name: str - name of the filter - filter_func: callable - function that will be called - Returns - ------- - env: jinja2.Environment - Active jinja2 environment with the new filter added - """ - - env.filters[filter_name] = filter_func - - return env - - def _render_stream(self, filters: Dict[str, callable] = None): - loader = jinja2.BaseLoader() - env = self.get_set_env(loader, filters) - template = env.from_string(self.template_stream) - return self._render_template(template) - - def _render_file(self, data: Dict = None, filters: Dict[str, callable] = None): - template_dir = self.template_path.parent - template_file = self.template_path.relative_to(template_dir) - - loader = jinja2.FileSystemLoader(template_dir) - env = self.get_set_env(loader, filters) - template = env.get_template(str(template_file)) - return self._render_template(template) - - def _render_template(self, template: jinja2.Template): - """ - Description - ----------- - Render a jinja2 template object - Parameters - ---------- - template: jinja2.Template - - Returns - ------- - rendered: str - """ - try: - rendered = template.render(**self.data) - except jinja2.UndefinedError as ee: - raise Exception(f"Undefined variable in Jinja2 template\n{ee}") - - return rendered - - def _render(self, template_name: str, loader: jinja2.BaseLoader) -> str: - """ - Description - ----------- - Internal method to render a jinja2 template - Parameters - ---------- - template_name: str - loader: jinja2.BaseLoader - Returns - ------- - rendered: str - rendered template - """ - env = jinja2.Environment(loader=loader, undefined=self.undefined) - template = env.get_template(template_name) - try: - rendered = template.render(**self.data) - except jinja2.UndefinedError as ee: - raise Exception(f"Undefined variable in Jinja2 template\n{ee}") - - return rendered - - def save(self, output_file: str) -> None: - """ - Description - ----------- - Render and save the output to a file - Parameters - ---------- - output_file: str - Path to the output file - Returns - ------- - None - """ - with open(output_file, 'wb') as fh: - fh.write(self.render.encode("utf-8")) - - def dump(self) -> None: - """ - Description - ----------- - Render and dump the output to stdout - Returns - ------- - None - """ - io.TextIOWrapper(sys.stdout.buffer, - encoding="utf-8").write(self.render) diff --git a/ush/python/pygw/src/pygw/logger.py b/ush/python/pygw/src/pygw/logger.py deleted file mode 100644 index 1bf2ed2985..0000000000 --- a/ush/python/pygw/src/pygw/logger.py +++ /dev/null @@ -1,275 +0,0 @@ -""" -Logger -""" - -import os -import sys -from functools import wraps -from pathlib import Path -from typing import Union, List -import logging - - -class ColoredFormatter(logging.Formatter): - """ - Logging colored formatter - adapted from https://stackoverflow.com/a/56944256/3638629 - """ - - grey = '\x1b[38;21m' - blue = '\x1b[38;5;39m' - yellow = '\x1b[38;5;226m' - red = '\x1b[38;5;196m' - bold_red = '\x1b[31;1m' - reset = '\x1b[0m' - - def __init__(self, fmt): - super().__init__() - self.fmt = fmt - self.formats = { - logging.DEBUG: self.blue + self.fmt + self.reset, - logging.INFO: self.grey + self.fmt + self.reset, - logging.WARNING: self.yellow + self.fmt + self.reset, - logging.ERROR: self.red + self.fmt + self.reset, - logging.CRITICAL: self.bold_red + self.fmt + self.reset - } - - def format(self, record): - log_fmt = self.formats.get(record.levelno) - formatter = logging.Formatter(log_fmt) - return formatter.format(record) - - -class Logger: - """ - Improved logging - """ - LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] - DEFAULT_LEVEL = 'INFO' - DEFAULT_FORMAT = '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' - - def __init__(self, name: str = None, - level: str = os.environ.get("LOGGING_LEVEL"), - _format: str = DEFAULT_FORMAT, - colored_log: bool = False, - logfile_path: Union[str, Path] = None): - """ - Initialize Logger - - Parameters - ---------- - name : str - Name of the Logger object - default : None - level : str - Desired Logging level - default : 'INFO' - _format : str - Desired Logging Format - default : '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' - colored_log : bool - Use colored logging for stdout - default: False - logfile_path : str or Path - Path for logging to a file - default : None - """ - - self.name = name - self.level = level.upper() if level else Logger.DEFAULT_LEVEL - self.format = _format - self.colored_log = colored_log - - if self.level not in Logger.LOG_LEVELS: - raise LookupError(f"{self.level} is unknown logging level\n" + - f"Currently supported log levels are:\n" + - f"{' | '.join(Logger.LOG_LEVELS)}") - - # Initialize the root logger if no name is present - self._logger = logging.getLogger(name) if name else logging.getLogger() - - self._logger.setLevel(self.level) - - _handlers = [] - # Add console handler for logger - _handler = Logger.add_stream_handler( - level=self.level, - _format=self.format, - colored_log=self.colored_log, - ) - _handlers.append(_handler) - self._logger.addHandler(_handler) - - # Add file handler for logger - if logfile_path is not None: - _handler = Logger.add_file_handler( - logfile_path, level=self.level, _format=self.format) - self._logger.addHandler(_handler) - _handlers.append(_handler) - - def __getattr__(self, attribute): - """ - Allows calling logging module methods directly - - Parameters - ---------- - attribute : str - attribute name of a logging object - - Returns - ------- - attribute : logging attribute - """ - return getattr(self._logger, attribute) - - def get_logger(self): - """ - Return the logging object - - Returns - ------- - logger : Logger object - """ - return self._logger - - @classmethod - def add_handlers(cls, logger: logging.Logger, handlers: List[logging.Handler]): - """ - Add a list of handlers to a logger - - Parameters - ---------- - logger : logging.Logger - Logger object to add a new handler to - handlers: list - A list of handlers to be added to the logger object - - Returns - ------- - logger : Logger object - """ - for handler in handlers: - logger.addHandler(handler) - - return logger - - @classmethod - def add_stream_handler(cls, level: str = DEFAULT_LEVEL, - _format: str = DEFAULT_FORMAT, - colored_log: bool = False): - """ - Create stream handler - This classmethod will allow setting a custom stream handler on children - - Parameters - ---------- - level : str - logging level - default : 'INFO' - _format : str - logging format - default : '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' - colored_log : bool - enable colored output for stdout - default : False - - Returns - ------- - handler : logging.Handler - stream handler of a logging object - """ - - handler = logging.StreamHandler(sys.stdout) - handler.setLevel(level) - _format = ColoredFormatter( - _format) if colored_log else logging.Formatter(_format) - handler.setFormatter(_format) - - return handler - - @classmethod - def add_file_handler(cls, logfile_path: Union[str, Path], - level: str = DEFAULT_LEVEL, - _format: str = DEFAULT_FORMAT): - """ - Create file handler. - This classmethod will allow setting custom file handler on children - Create stream handler - This classmethod will allow setting a custom stream handler on children - - Parameters - ---------- - logfile_path: str or Path - Path for writing out logfiles from logging - default : False - level : str - logging level - default : 'INFO' - _format : str - logging format - default : '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' - - Returns - ------- - handler : logging.Handler - file handler of a logging object - """ - - logfile_path = Path(logfile_path) - - # Create the directory containing the logfile_path - if not logfile_path.parent.is_dir(): - logfile_path.mkdir(parents=True, exist_ok=True) - - handler = logging.FileHandler(str(logfile_path)) - handler.setLevel(level) - handler.setFormatter(logging.Formatter(_format)) - - return handler - - -def logit(logger, name=None, message=None): - """ - Logger decorator to add logging to a function. - Simply add: - @logit(logger) before any function - Parameters - ---------- - logger : Logger - Logger object - name : str - Name of the module to be logged - default: __module__ - message : str - Name of the function to be logged - default: __name__ - """ - - def decorate(func): - - log_name = name if name else func.__module__ - log_msg = message if message else log_name + "." + func.__name__ - - @wraps(func) - def wrapper(*args, **kwargs): - - passed_args = [repr(aa) for aa in args] - passed_kwargs = [f"{kk}={repr(vv)}" for kk, vv in list(kwargs.items())] - - call_msg = 'BEGIN: ' + log_msg - logger.info(call_msg) - logger.debug(f"( {', '.join(passed_args + passed_kwargs)} )") - - # Call the function - retval = func(*args, **kwargs) - - # Close the logging with printing the return val - ret_msg = ' END: ' + log_msg - logger.info(ret_msg) - logger.debug(f" returning: {retval}") - - return retval - - return wrapper - - return decorate diff --git a/ush/python/pygw/src/pygw/schema.py b/ush/python/pygw/src/pygw/schema.py deleted file mode 100644 index 2a46c62f01..0000000000 --- a/ush/python/pygw/src/pygw/schema.py +++ /dev/null @@ -1,887 +0,0 @@ -"""schema is a library for validating Python data structures, such as those -obtained from config-files, forms, external services or command-line -parsing, converted from JSON/YAML (or something else) to Python data-types.""" - -import inspect -import re - -from typing import Dict -from pydoc import locate - -try: - from contextlib import ExitStack -except ImportError: - from contextlib2 import ExitStack - - -__version__ = "0.7.5" -__all__ = [ - "Schema", - "And", - "Or", - "Regex", - "Optional", - "Use", - "Forbidden", - "Const", - "Literal", - "SchemaError", - "SchemaWrongKeyError", - "SchemaMissingKeyError", - "SchemaForbiddenKeyError", - "SchemaUnexpectedTypeError", - "SchemaOnlyOneAllowedError", -] - - -class SchemaError(Exception): - """Error during Schema validation.""" - - def __init__(self, autos, errors=None): - self.autos = autos if type(autos) is list else [autos] - self.errors = errors if type(errors) is list else [errors] - Exception.__init__(self, self.code) - - @property - def code(self): - """ - Removes duplicates values in auto and error list. - parameters. - """ - - def uniq(seq): - """ - Utility function that removes duplicate. - """ - seen = set() - seen_add = seen.add - # This way removes duplicates while preserving the order. - return [x for x in seq if x not in seen and not seen_add(x)] - - data_set = uniq(i for i in self.autos if i is not None) - error_list = uniq(i for i in self.errors if i is not None) - if error_list: - return "\n".join(error_list) - return "\n".join(data_set) - - -class SchemaWrongKeyError(SchemaError): - """Error Should be raised when an unexpected key is detected within the - data set being.""" - - pass - - -class SchemaMissingKeyError(SchemaError): - """Error should be raised when a mandatory key is not found within the - data set being validated""" - - pass - - -class SchemaOnlyOneAllowedError(SchemaError): - """Error should be raised when an only_one Or key has multiple matching candidates""" - - pass - - -class SchemaForbiddenKeyError(SchemaError): - """Error should be raised when a forbidden key is found within the - data set being validated, and its value matches the value that was specified""" - - pass - - -class SchemaUnexpectedTypeError(SchemaError): - """Error should be raised when a type mismatch is detected within the - data set being validated.""" - - pass - - -class And(object): - """ - Utility function to combine validation directives in AND Boolean fashion. - """ - - def __init__(self, *args, **kw): - self._args = args - if not set(kw).issubset({"error", "schema", "ignore_extra_keys"}): - diff = {"error", "schema", "ignore_extra_keys"}.difference(kw) - raise TypeError("Unknown keyword arguments %r" % list(diff)) - self._error = kw.get("error") - self._ignore_extra_keys = kw.get("ignore_extra_keys", False) - # You can pass your inherited Schema class. - self._schema = kw.get("schema", Schema) - - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, ", ".join(repr(a) for a in self._args)) - - @property - def args(self): - """The provided parameters""" - return self._args - - def validate(self, data, **kwargs): - """ - Validate data using defined sub schema/expressions ensuring all - values are valid. - :param data: to be validated with sub defined schemas. - :return: returns validated data - """ - for s in [self._schema(s, error=self._error, ignore_extra_keys=self._ignore_extra_keys) for s in self._args]: - data = s.validate(data, **kwargs) - return data - - -class Or(And): - """Utility function to combine validation directives in a OR Boolean - fashion.""" - - def __init__(self, *args, **kwargs): - self.only_one = kwargs.pop("only_one", False) - self.match_count = 0 - super(Or, self).__init__(*args, **kwargs) - - def reset(self): - failed = self.match_count > 1 and self.only_one - self.match_count = 0 - if failed: - raise SchemaOnlyOneAllowedError(["There are multiple keys present " + "from the %r condition" % self]) - - def validate(self, data, **kwargs): - """ - Validate data using sub defined schema/expressions ensuring at least - one value is valid. - :param data: data to be validated by provided schema. - :return: return validated data if not validation - """ - autos, errors = [], [] - for s in [self._schema(s, error=self._error, ignore_extra_keys=self._ignore_extra_keys) for s in self._args]: - try: - validation = s.validate(data, **kwargs) - self.match_count += 1 - if self.match_count > 1 and self.only_one: - break - return validation - except SchemaError as _x: - autos += _x.autos - errors += _x.errors - raise SchemaError( - ["%r did not validate %r" % (self, data)] + autos, - [self._error.format(data) if self._error else None] + errors, - ) - - -class Regex(object): - """ - Enables schema.py to validate string using regular expressions. - """ - - # Map all flags bits to a more readable description - NAMES = [ - "re.ASCII", - "re.DEBUG", - "re.VERBOSE", - "re.UNICODE", - "re.DOTALL", - "re.MULTILINE", - "re.LOCALE", - "re.IGNORECASE", - "re.TEMPLATE", - ] - - def __init__(self, pattern_str, flags=0, error=None): - self._pattern_str = pattern_str - flags_list = [ - Regex.NAMES[i] for i, f in enumerate("{0:09b}".format(int(flags))) if f != "0" - ] # Name for each bit - - if flags_list: - self._flags_names = ", flags=" + "|".join(flags_list) - else: - self._flags_names = "" - - self._pattern = re.compile(pattern_str, flags=flags) - self._error = error - - def __repr__(self): - return "%s(%r%s)" % (self.__class__.__name__, self._pattern_str, self._flags_names) - - @property - def pattern_str(self): - """The pattern for the represented regular expression""" - return self._pattern_str - - def validate(self, data, **kwargs): - """ - Validated data using defined regex. - :param data: data to be validated - :return: return validated data. - """ - e = self._error - - try: - if self._pattern.search(data): - return data - else: - raise SchemaError("%r does not match %r" % (self, data), e.format(data) if e else None) - except TypeError: - raise SchemaError("%r is not string nor buffer" % data, e) - - -class Use(object): - """ - For more general use cases, you can use the Use class to transform - the data while it is being validate. - """ - - def __init__(self, callable_, error=None): - if not callable(callable_): - raise TypeError("Expected a callable, not %r" % callable_) - self._callable = callable_ - self._error = error - - def __repr__(self): - return "%s(%r)" % (self.__class__.__name__, self._callable) - - def validate(self, data, **kwargs): - try: - return self._callable(data) - except SchemaError as x: - raise SchemaError([None] + x.autos, [self._error.format(data) if self._error else None] + x.errors) - except BaseException as x: - f = _callable_str(self._callable) - raise SchemaError("%s(%r) raised %r" % (f, data, x), self._error.format(data) if self._error else None) - - -COMPARABLE, CALLABLE, VALIDATOR, TYPE, DICT, ITERABLE = range(6) - - -def _priority(s): - """Return priority for a given object.""" - if type(s) in (list, tuple, set, frozenset): - return ITERABLE - if type(s) is dict: - return DICT - if issubclass(type(s), type): - return TYPE - if isinstance(s, Literal): - return COMPARABLE - if hasattr(s, "validate"): - return VALIDATOR - if callable(s): - return CALLABLE - else: - return COMPARABLE - - -def _invoke_with_optional_kwargs(f, **kwargs): - s = inspect.signature(f) - if len(s.parameters) == 0: - return f() - return f(**kwargs) - - -class Schema(object): - """ - Entry point of the library, use this class to instantiate validation - schema for the data that will be validated. - """ - - def __init__(self, schema, error=None, ignore_extra_keys=False, name=None, description=None, as_reference=False): - self._schema = schema - self._error = error - self._ignore_extra_keys = ignore_extra_keys - self._name = name - self._description = description - # Ask json_schema to create a definition for this schema and use it as part of another - self.as_reference = as_reference - if as_reference and name is None: - raise ValueError("Schema used as reference should have a name") - - def __repr__(self): - return "%s(%r)" % (self.__class__.__name__, self._schema) - - @property - def schema(self): - return self._schema - - @property - def description(self): - return self._description - - @property - def name(self): - return self._name - - @property - def ignore_extra_keys(self): - return self._ignore_extra_keys - - @staticmethod - def _dict_key_priority(s): - """Return priority for a given key object.""" - if isinstance(s, Hook): - return _priority(s._schema) - 0.5 - if isinstance(s, Optional): - return _priority(s._schema) + 0.5 - return _priority(s) - - @staticmethod - def _is_optional_type(s): - """Return True if the given key is optional (does not have to be found)""" - return any(isinstance(s, optional_type) for optional_type in [Optional, Hook]) - - def is_valid(self, data, **kwargs): - """Return whether the given data has passed all the validations - that were specified in the given schema. - """ - try: - self.validate(data, **kwargs) - except SchemaError: - return False - else: - return True - - def _prepend_schema_name(self, message): - """ - If a custom schema name has been defined, prepends it to the error - message that gets raised when a schema error occurs. - """ - if self._name: - message = "{0!r} {1!s}".format(self._name, message) - return message - - def validate(self, data, **kwargs): - Schema = self.__class__ - s = self._schema - e = self._error - i = self._ignore_extra_keys - - if isinstance(s, Literal): - s = s.schema - - flavor = _priority(s) - if flavor == ITERABLE: - data = Schema(type(s), error=e).validate(data, **kwargs) - o = Or(*s, error=e, schema=Schema, ignore_extra_keys=i) - return type(data)(o.validate(d, **kwargs) for d in data) - if flavor == DICT: - exitstack = ExitStack() - data = Schema(dict, error=e).validate(data, **kwargs) - new = type(data)() # new - is a dict of the validated values - coverage = set() # matched schema keys - # for each key and value find a schema entry matching them, if any - sorted_skeys = sorted(s, key=self._dict_key_priority) - for skey in sorted_skeys: - if hasattr(skey, "reset"): - exitstack.callback(skey.reset) - - with exitstack: - # Evaluate dictionaries last - data_items = sorted(data.items(), key=lambda value: isinstance(value[1], dict)) - for key, value in data_items: - for skey in sorted_skeys: - svalue = s[skey] - try: - nkey = Schema(skey, error=e).validate(key, **kwargs) - except SchemaError: - pass - else: - if isinstance(skey, Hook): - # As the content of the value makes little sense for - # keys with a hook, we reverse its meaning: - # we will only call the handler if the value does match - # In the case of the forbidden key hook, - # we will raise the SchemaErrorForbiddenKey exception - # on match, allowing for excluding a key only if its - # value has a certain type, and allowing Forbidden to - # work well in combination with Optional. - try: - nvalue = Schema(svalue, error=e).validate(value, **kwargs) - except SchemaError: - continue - skey.handler(nkey, data, e) - else: - try: - nvalue = Schema(svalue, error=e, ignore_extra_keys=i).validate(value, **kwargs) - except SchemaError as x: - k = "Key '%s' error:" % nkey - message = self._prepend_schema_name(k) - raise SchemaError([message] + x.autos, [e.format(data) if e else None] + x.errors) - else: - new[nkey] = nvalue - coverage.add(skey) - break - required = set(k for k in s if not self._is_optional_type(k)) - if not required.issubset(coverage): - missing_keys = required - coverage - s_missing_keys = ", ".join(repr(k) for k in sorted(missing_keys, key=repr)) - message = "Missing key%s: %s" % (_plural_s(missing_keys), s_missing_keys) - message = self._prepend_schema_name(message) - raise SchemaMissingKeyError(message, e.format(data) if e else None) - if not self._ignore_extra_keys and (len(new) != len(data)): - wrong_keys = set(data.keys()) - set(new.keys()) - s_wrong_keys = ", ".join(repr(k) for k in sorted(wrong_keys, key=repr)) - message = "Wrong key%s %s in %r" % (_plural_s(wrong_keys), s_wrong_keys, data) - message = self._prepend_schema_name(message) - raise SchemaWrongKeyError(message, e.format(data) if e else None) - - # Apply default-having optionals that haven't been used: - defaults = set(k for k in s if isinstance(k, Optional) and hasattr(k, "default")) - coverage - for default in defaults: - new[default.key] = _invoke_with_optional_kwargs(default.default, **kwargs) if callable(default.default) else default.default - - return new - if flavor == TYPE: - if isinstance(data, s) and not (isinstance(data, bool) and s == int): - return data - else: - message = "%r should be instance of %r" % (data, s.__name__) - message = self._prepend_schema_name(message) - raise SchemaUnexpectedTypeError(message, e.format(data) if e else None) - if flavor == VALIDATOR: - try: - return s.validate(data, **kwargs) - except SchemaError as x: - raise SchemaError([None] + x.autos, [e.format(data) if e else None] + x.errors) - except BaseException as x: - message = "%r.validate(%r) raised %r" % (s, data, x) - message = self._prepend_schema_name(message) - raise SchemaError(message, e.format(data) if e else None) - if flavor == CALLABLE: - f = _callable_str(s) - try: - if s(data): - return data - except SchemaError as x: - raise SchemaError([None] + x.autos, [e.format(data) if e else None] + x.errors) - except BaseException as x: - message = "%s(%r) raised %r" % (f, data, x) - message = self._prepend_schema_name(message) - raise SchemaError(message, e.format(data) if e else None) - message = "%s(%r) should evaluate to True" % (f, data) - message = self._prepend_schema_name(message) - raise SchemaError(message, e.format(data) if e else None) - if s == data: - return data - else: - message = "%r does not match %r" % (s, data) - message = self._prepend_schema_name(message) - raise SchemaError(message, e.format(data) if e else None) - - def json_schema(self, schema_id, use_refs=False, **kwargs): - """Generate a draft-07 JSON schema dict representing the Schema. - This method must be called with a schema_id. - - :param schema_id: The value of the $id on the main schema - :param use_refs: Enable reusing object references in the resulting JSON schema. - Schemas with references are harder to read by humans, but are a lot smaller when there - is a lot of reuse - """ - - seen = dict() # For use_refs - definitions_by_name = {} - - def _json_schema(schema, is_main_schema=True, description=None, allow_reference=True): - Schema = self.__class__ - - def _create_or_use_ref(return_dict): - """If not already seen, return the provided part of the schema unchanged. - If already seen, give an id to the already seen dict and return a reference to the previous part - of the schema instead. - """ - if not use_refs or is_main_schema: - return return_schema - - hashed = hash(repr(sorted(return_dict.items()))) - - if hashed not in seen: - seen[hashed] = return_dict - return return_dict - else: - id_str = "#" + str(hashed) - seen[hashed]["$id"] = id_str - return {"$ref": id_str} - - def _get_type_name(python_type): - """Return the JSON schema name for a Python type""" - if python_type == str: - return "string" - elif python_type == int: - return "integer" - elif python_type == float: - return "number" - elif python_type == bool: - return "boolean" - elif python_type == list: - return "array" - elif python_type == dict: - return "object" - return "string" - - def _to_json_type(value): - """Attempt to convert a constant value (for "const" and "default") to a JSON serializable value""" - if value is None or type(value) in (str, int, float, bool, list, dict): - return value - - if type(value) in (tuple, set, frozenset): - return list(value) - - if isinstance(value, Literal): - return value.schema - - return str(value) - - def _to_schema(s, ignore_extra_keys): - if not isinstance(s, Schema): - return Schema(s, ignore_extra_keys=ignore_extra_keys) - - return s - - s = schema.schema - i = schema.ignore_extra_keys - flavor = _priority(s) - - return_schema = {} - - return_description = description or schema.description - if return_description: - return_schema["description"] = return_description - - # Check if we have to create a common definition and use as reference - if allow_reference and schema.as_reference: - # Generate sub schema if not already done - if schema.name not in definitions_by_name: - definitions_by_name[schema.name] = {} # Avoid infinite loop - definitions_by_name[schema.name] = _json_schema(schema, is_main_schema=False, allow_reference=False) - - return_schema["$ref"] = "#/definitions/" + schema.name - else: - if flavor == TYPE: - # Handle type - return_schema["type"] = _get_type_name(s) - elif flavor == ITERABLE: - # Handle arrays or dict schema - - return_schema["type"] = "array" - if len(s) == 1: - return_schema["items"] = _json_schema(_to_schema(s[0], i), is_main_schema=False) - elif len(s) > 1: - return_schema["items"] = _json_schema(Schema(Or(*s)), is_main_schema=False) - elif isinstance(s, Or): - # Handle Or values - - # Check if we can use an enum - if all(priority == COMPARABLE for priority in [_priority(value) for value in s.args]): - or_values = [str(s) if isinstance(s, Literal) else s for s in s.args] - # All values are simple, can use enum or const - if len(or_values) == 1: - return_schema["const"] = _to_json_type(or_values[0]) - return return_schema - return_schema["enum"] = or_values - else: - # No enum, let's go with recursive calls - any_of_values = [] - for or_key in s.args: - new_value = _json_schema(_to_schema(or_key, i), is_main_schema=False) - if new_value != {} and new_value not in any_of_values: - any_of_values.append(new_value) - if len(any_of_values) == 1: - # Only one representable condition remains, do not put under anyOf - return_schema.update(any_of_values[0]) - else: - return_schema["anyOf"] = any_of_values - elif isinstance(s, And): - # Handle And values - all_of_values = [] - for and_key in s.args: - new_value = _json_schema(_to_schema(and_key, i), is_main_schema=False) - if new_value != {} and new_value not in all_of_values: - all_of_values.append(new_value) - if len(all_of_values) == 1: - # Only one representable condition remains, do not put under allOf - return_schema.update(all_of_values[0]) - else: - return_schema["allOf"] = all_of_values - elif flavor == COMPARABLE: - return_schema["const"] = _to_json_type(s) - elif flavor == VALIDATOR and type(s) == Regex: - return_schema["type"] = "string" - return_schema["pattern"] = s.pattern_str - else: - if flavor != DICT: - # If not handled, do not check - return return_schema - - # Schema is a dict - - required_keys = [] - expanded_schema = {} - additional_properties = i - for key in s: - if isinstance(key, Hook): - continue - - def _key_allows_additional_properties(key): - """Check if a key is broad enough to allow additional properties""" - if isinstance(key, Optional): - return _key_allows_additional_properties(key.schema) - - return key == str or key == object - - def _get_key_description(key): - """Get the description associated to a key (as specified in a Literal object). Return None if not a Literal""" - if isinstance(key, Optional): - return _get_key_description(key.schema) - - if isinstance(key, Literal): - return key.description - - return None - - def _get_key_name(key): - """Get the name of a key (as specified in a Literal object). Return the key unchanged if not a Literal""" - if isinstance(key, Optional): - return _get_key_name(key.schema) - - if isinstance(key, Literal): - return key.schema - - return key - - additional_properties = additional_properties or _key_allows_additional_properties(key) - sub_schema = _to_schema(s[key], ignore_extra_keys=i) - key_name = _get_key_name(key) - - if isinstance(key_name, str): - if not isinstance(key, Optional): - required_keys.append(key_name) - expanded_schema[key_name] = _json_schema( - sub_schema, is_main_schema=False, description=_get_key_description(key) - ) - if isinstance(key, Optional) and hasattr(key, "default"): - expanded_schema[key_name]["default"] = _to_json_type(_invoke_with_optional_kwargs(key.default, **kwargs) if callable(key.default) else key.default) # nopep8 - elif isinstance(key_name, Or): - # JSON schema does not support having a key named one name or another, so we just add both options - # This is less strict because we cannot enforce that one or the other is required - - for or_key in key_name.args: - expanded_schema[_get_key_name(or_key)] = _json_schema( - sub_schema, is_main_schema=False, description=_get_key_description(or_key) - ) - - return_schema.update( - { - "type": "object", - "properties": expanded_schema, - "required": required_keys, - "additionalProperties": additional_properties, - } - ) - - if is_main_schema: - return_schema.update({"$id": schema_id, "$schema": "http://json-schema.org/draft-07/schema#"}) - if self._name: - return_schema["title"] = self._name - - if definitions_by_name: - return_schema["definitions"] = {} - for definition_name, definition in definitions_by_name.items(): - return_schema["definitions"][definition_name] = definition - - return _create_or_use_ref(return_schema) - - return _json_schema(self, True) - - -class Optional(Schema): - """Marker for an optional part of the validation Schema.""" - - _MARKER = object() - - def __init__(self, *args, **kwargs): - default = kwargs.pop("default", self._MARKER) - super(Optional, self).__init__(*args, **kwargs) - if default is not self._MARKER: - # See if I can come up with a static key to use for myself: - if _priority(self._schema) != COMPARABLE: - raise TypeError( - "Optional keys with defaults must have simple, " - "predictable values, like literal strings or ints. " - '"%r" is too complex.' % (self._schema,) - ) - self.default = default - self.key = str(self._schema) - - def __hash__(self): - return hash(self._schema) - - def __eq__(self, other): - return ( - self.__class__ is other.__class__ and - getattr(self, "default", self._MARKER) == getattr(other, "default", self._MARKER) and - self._schema == other._schema - ) - - def reset(self): - if hasattr(self._schema, "reset"): - self._schema.reset() - - -class Hook(Schema): - def __init__(self, *args, **kwargs): - self.handler = kwargs.pop("handler", lambda *args: None) - super(Hook, self).__init__(*args, **kwargs) - self.key = self._schema - - -class Forbidden(Hook): - def __init__(self, *args, **kwargs): - kwargs["handler"] = self._default_function - super(Forbidden, self).__init__(*args, **kwargs) - - @staticmethod - def _default_function(nkey, data, error): - raise SchemaForbiddenKeyError("Forbidden key encountered: %r in %r" % (nkey, data), error) - - -class Literal(object): - def __init__(self, value, description=None): - self._schema = value - self._description = description - - def __str__(self): - return self._schema - - def __repr__(self): - return 'Literal("' + self.schema + '", description="' + (self.description or "") + '")' - - @property - def description(self): - return self._description - - @property - def schema(self): - return self._schema - - -class Const(Schema): - def validate(self, data, **kwargs): - super(Const, self).validate(data, **kwargs) - return data - - -def _callable_str(callable_): - if hasattr(callable_, "__name__"): - return callable_.__name__ - return str(callable_) - - -def _plural_s(sized): - return "s" if len(sized) > 1 else "" - - -# The following functions are added to be able to translate an user-specified Dict into a SchemaDict. The Schema -# class module was obtained from: - -# https://github.com/keleshev/schema/blob/master/schema.py - - -def build_schema(data: Dict) -> Dict: - """ - Description - ----------- - - This function takes in a user-provided dictionary and defines the - respective schema. - - Parameters - ---------- - - data: Dict - - A Python dictionary containing the schema attributes. - - Returns - ------- - - schema_dict: Dict - - A Python dictionary containing the schema. - - """ - - # TODO: Find an alternative to pydoc.locate() to identify type. - schema_dict = {} - for datum in data: - data_dict = data[datum] - - # Check whether the variable is optional; proceed accordingly. - if "optional" not in data_dict: - data_dict['optional'] = False - schema_dict[datum] = locate(data_dict["type"]) - else: - if data_dict['optional']: - schema_dict[datum] = locate(data_dict["type"]) - - # Build the schema accordingly. - try: - if data_dict["optional"]: - schema_dict[Optional(datum, default=data_dict["default"]) - ] = locate(data_dict["type"]) - else: - schema_dict[datum] = locate(data_dict["type"]) - except AttributeError: - pass - - return schema_dict - - -def validate_schema(schema_dict: Dict, data: Dict) -> Dict: - """ - Description - ------------ - - This function validates the schema; if an optional key value has - not be specified, a the default value for the option is defined - within the returned Dict. - - Parameters - ---------- - - schema_dict: Dict - - A Python dictionary containing the schema. - - data: Dict - - A Python dictionary containing the configuration to be - validated. - - Returns - ------- - - data: Dict - - A Python dictionary containing the validated schema; if any - optional values have not been define within `data` (above), - they are updated with the schema default values. - - """ - - # Define the schema instance. - schema = Schema([schema_dict], ignore_extra_keys=True) - - # If any `Optional` keys are missing from the scheme to be - # validated (`data`), update them acccordingly. - for k, v in schema_dict.items(): - if isinstance(k, Optional): - if k.key not in data: - data[k.key] = k.default - - # Validate the schema and return the updated dictionary. - schema.validate([data]) - - return data diff --git a/ush/python/pygw/src/pygw/task.py b/ush/python/pygw/src/pygw/task.py deleted file mode 100644 index 22ce4626d8..0000000000 --- a/ush/python/pygw/src/pygw/task.py +++ /dev/null @@ -1,93 +0,0 @@ -import logging -from typing import Dict - -from pygw.attrdict import AttrDict -from pygw.timetools import add_to_datetime, to_timedelta - -logger = logging.getLogger(__name__.split('.')[-1]) - - -class Task: - """ - Base class for all tasks - """ - - def __init__(self, config: Dict, *args, **kwargs): - """ - Every task needs a config. - Additional arguments (or key-value arguments) can be provided. - - Parameters - ---------- - config : Dict - dictionary object containing task configuration - - *args : tuple - Additional arguments to `Task` - - **kwargs : dict, optional - Extra keyword arguments to `Task` - """ - - # Store the config and arguments as attributes of the object - self.config = AttrDict(config) - - for arg in args: - setattr(self, str(arg), arg) - - for key, value in kwargs.items(): - setattr(self, key, value) - - # Pull out basic runtime keys values from config into its own runtime config - self.runtime_config = AttrDict() - runtime_keys = ['PDY', 'cyc', 'DATA', 'RUN', 'CDUMP'] # TODO: eliminate CDUMP and use RUN instead - for kk in runtime_keys: - try: - self.runtime_config[kk] = config[kk] - logger.debug(f'Deleting runtime_key {kk} from config') - del self.config[kk] - except KeyError: - raise KeyError(f"Encountered an unreferenced runtime_key {kk} in 'config'") - - # Any other composite runtime variables that may be needed for the duration of the task - # can be constructed here - - # Construct the current cycle datetime object - self.runtime_config['current_cycle'] = add_to_datetime(self.runtime_config['PDY'], to_timedelta(f"{self.runtime_config.cyc}H")) - logger.debug(f"current cycle: {self.runtime_config['current_cycle']}") - - # Construct the previous cycle datetime object - self.runtime_config['previous_cycle'] = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H")) - logger.debug(f"previous cycle: {self.runtime_config['previous_cycle']}") - - pass - - def initialize(self): - """ - Initialize methods for a task - """ - pass - - def configure(self): - """ - Configuration methods for a task in preparation for execution - """ - pass - - def execute(self): - """ - Execute methods for a task - """ - pass - - def finalize(self): - """ - Methods for after the execution that produces output task - """ - pass - - def clean(self): - """ - Methods to clean after execution and finalization prior to closing out a task - """ - pass diff --git a/ush/python/pygw/src/pygw/template.py b/ush/python/pygw/src/pygw/template.py deleted file mode 100644 index 8532305783..0000000000 --- a/ush/python/pygw/src/pygw/template.py +++ /dev/null @@ -1,191 +0,0 @@ -import re -import os -import copy -from collections import namedtuple -from collections.abc import Sequence - -# Template imported with permission from jcsda/solo - -__all__ = ['Template', 'TemplateConstants'] - - -class TemplateConstants: - DOLLAR_CURLY_BRACE = '${}' - DOLLAR_PARENTHESES = '$()' - DOUBLE_CURLY_BRACES = '{{}}' - AT_SQUARE_BRACES = '@[]' - AT_ANGLE_BRACKETS = '@<>' - - SubPair = namedtuple('SubPair', ['regex', 'slice']) - - -class Template: - - """ - Utility for substituting variables in a template. The template can be the contents of a whole file - as a string (substitute_string) or in a complex dictionary (substitute_structure). - substitutions define different type of variables with a regex and a slice: - - the regex is supposed to find the whole variable, e.g, $(variable) - - the slice indicate how to slice the value returned by the regex to have the variable name, in the - case of $(variable), the slice is 2, -1 to remove $( and ). - You can easily add new type of variables following those rules. - - Please note that the regexes allow for at least one nested variable and the code is able to handle it. - It means that $($(variable)) will be processed correctly but the substitutions will need more than one - pass. - - If you have a file that is deeper than just a simple dictionary of has lists in it, you can use the method - build_index to create a dictionary that will have all the options from deeper levels (list, dicts). - You can then pass index.get as an argument to any method you use. - If you use substitute_with_dependencies, this is done automatically. - """ - - substitutions = { - TemplateConstants.DOLLAR_CURLY_BRACE: TemplateConstants.SubPair(re.compile(r'\${.*?}+'), slice(2, -1)), - TemplateConstants.DOLLAR_PARENTHESES: TemplateConstants.SubPair(re.compile(r'\$\(.*?\)+'), slice(2, -1)), - TemplateConstants.DOUBLE_CURLY_BRACES: TemplateConstants.SubPair(re.compile(r'{{.*?}}+'), slice(2, -2)), - TemplateConstants.AT_SQUARE_BRACES: TemplateConstants.SubPair(re.compile(r'@\[.*?\]+'), slice(2, -1)), - TemplateConstants.AT_ANGLE_BRACKETS: TemplateConstants.SubPair( - re.compile(r'@\<.*?\>+'), slice(2, -1)) - } - - @classmethod - def find_variables(cls, variable_to_substitute: str, var_type: str): - pair = cls.substitutions[var_type] - return [x[pair.slice] for x in re.findall(pair.regex, variable_to_substitute)] - - @classmethod - def substitute_string(cls, variable_to_substitute, var_type: str, get_value): - """ - Substitutes variables under the form var_type (e.g. DOLLAR_CURLY_BRACE), looks for a value returned - by function get_value and if found, substitutes the variable. Convert floats and int to string - before substitution. If the value in the dictionary is a complex type, just assign it instead - of substituting. - get_value is a function that returns the value to substitute: - signature: get_value(variable_name). - If substituting from a dictionary my_dict, pass my_dict.get - """ - pair = cls.substitutions[var_type] - if isinstance(variable_to_substitute, str): - variable_names = re.findall(pair.regex, variable_to_substitute) - for variable in variable_names: - var = variable[pair.slice] - v = get_value(var) - if v is not None: - if not is_single_type_or_string(v): - if len(variable_names) == 1: - # v could be a list or a dictionary (complex structure and not a string). - # If there is one variable that is the whole - # string, we can safely replace, otherwise do nothing. - if variable_to_substitute.replace(variable_names[0][pair.slice], '') == var_type: - variable_to_substitute = v - else: - if isinstance(v, float) or isinstance(v, int): - v = str(v) - if isinstance(v, str): - variable_to_substitute = variable_to_substitute.replace( - variable, v) - else: - variable_to_substitute = v - else: - more = re.search(pair.regex, var) - if more is not None: - new_value = cls.substitute_string( - var, var_type, get_value) - variable_to_substitute = variable_to_substitute.replace( - var, new_value) - return variable_to_substitute - - @classmethod - def substitute_structure(cls, structure_to_substitute, var_type: str, get_value): - """ - Traverses a dictionary and substitutes variables in fields, lists - and nested dictionaries. - """ - if isinstance(structure_to_substitute, dict): - for key, item in structure_to_substitute.items(): - structure_to_substitute[key] = cls.substitute_structure( - item, var_type, get_value) - elif is_sequence_and_not_string(structure_to_substitute): - for i, item in enumerate(structure_to_substitute): - structure_to_substitute[i] = cls.substitute_structure( - item, var_type, get_value) - else: - structure_to_substitute = cls.substitute_string(structure_to_substitute, var_type, - get_value) - return structure_to_substitute - - @classmethod - def substitute_structure_from_environment(cls, structure_to_substitute): - return cls.substitute_structure(structure_to_substitute, TemplateConstants.DOLLAR_CURLY_BRACE, os.environ.get) - - @classmethod - def substitute_with_dependencies(cls, dictionary, keys, var_type: str, shallow_precedence=True, excluded=()): - """ - Given a dictionary with a complex (deep) structure, we want to substitute variables, - using keys, another dictionary that may also have a deep structure (dictionary and keys - can be the same dictionary if you want to substitute in place). - We create an index based on keys (see build_index) and substitute values in dictionary - using index. If variables may refer to other variables, more than one pass of substitution - may be needed, so we substitute until there is no more change in dictionary (convergence). - """ - all_variables = cls.build_index(keys, excluded, shallow_precedence) - previous = {} - while dictionary != previous: - previous = copy.deepcopy(dictionary) - dictionary = cls.substitute_structure( - dictionary, var_type, all_variables.get) - return dictionary - - @classmethod - def build_index(cls, dictionary, excluded=None, shallow_precedence=True): - """ - Builds an index of all keys with their values, going deep into the dictionary. The index - if a flat structure (dictionary). - If the same key name is present more than once in the structure, we want to - either prioritise the values that are near the root of the tree (shallow_precedence=True) - or values that are near the leaves (shallow_precedence=False). We don't anticipate use - cases where the "nearest variable" should be used, but this could constitute a future - improvement. - """ - def build(structure, variables): - if isinstance(structure, dict): - for k, i in structure.items(): - if ((k not in variables) or (k in variables and not shallow_precedence)) and k not in excluded: - variables[k] = i - build(i, variables) - elif is_sequence_and_not_string(structure): - for v in structure: - build(v, variables) - var = {} - if excluded is None: - excluded = set() - build(dictionary, var) - return var - - -# These used to be in basic.py, and have been copied here till they are needed elsewhere. - - -def is_sequence_and_not_string(a): - return isinstance(a, Sequence) and not isinstance(a, str) - - -def is_single_type(s): - try: - len(s) - except TypeError: - return True - else: - return False - - -def is_single_type_or_string(s): - if isinstance(s, str): - return True - try: - len(s) - except TypeError: - return True - else: - return False diff --git a/ush/python/pygw/src/pygw/timetools.py b/ush/python/pygw/src/pygw/timetools.py deleted file mode 100644 index cd43b55bfa..0000000000 --- a/ush/python/pygw/src/pygw/timetools.py +++ /dev/null @@ -1,316 +0,0 @@ -import re -import datetime - - -__all__ = ["to_datetime", "to_timedelta", - "datetime_to_YMDH", "datetime_to_YMD", "datetime_to_JDAY", - "timedelta_to_HMS", - "strftime", "strptime", - "to_YMDH", "to_YMD", "to_JDAY", "to_julian", - "to_isotime", "to_fv3time", - "add_to_datetime", "add_to_timedelta"] - - -_DATETIME_RE = re.compile( - r"(?P\d{4})(-)?(?P\d{2})(-)?(?P\d{2})" - r"(T)?(?P\d{2})?(:)?(?P\d{2})?(:)?(?P\d{2})?(Z)?") - -_TIMEDELTA_HOURS_RE = re.compile( - r"(?P[+-])?" - r"((?P\d+)[d])?" - r"(T)?((?P\d+)[H])?((?P\d+)[M])?((?P\d+)[S])?(Z)?") -_TIMEDELTA_TIME_RE = re.compile( - r"(?P[+-])?" - r"((?P\d+)(\s)day(s)?,(\s)?)?" - r"(T)?(?P\d{1,2})?(:(?P\d{1,2}))?(:(?P\d{1,2}))?") - - -def to_datetime(dtstr: str) -> datetime.datetime: - """ - Description - ----------- - Translate a string into a datetime object in a generic way. - The string can also support ISO 8601 representation. - - Formats accepted (T, Z, -, :) are optional: - YYYY-mm-dd - YYYY-mm-ddTHHZ - YYYY-mm-ddTHH:MMZ - YYYY-mm-ddTHH:MM:SSZ - - Parameters - ---------- - dtstr : str - String to be translated into a datetime object - - Returns - ------- - datetime.datetime - Datetime object - """ - - mm = _DATETIME_RE.match(dtstr) - if mm: - return datetime.datetime(**{kk: int(vv) for kk, vv in mm.groupdict().items() if vv}) - else: - raise Exception(f"Bad datetime string: '{dtstr}'") - - -def to_timedelta(tdstr: str) -> datetime.timedelta: - """ - Description - ----------- - Translate a string into a timedelta object in a generic way - - Formats accepted (, T, Z) are optional: -
dTHMSZ -
day(s), hh:mm:ss - - can be +/-, default is + -
can be any integer, default is 0 - can be any integer, default is 0 - can be any integer, default is 0 - can be any integer, default is 0 - - Parameters - ---------- - tdstr : str - String to be translated into a timedelta object - - Returns - ------- - datetime.timedelta - Timedelta object - """ - - time_dict = {'sign': '+', - 'days': 0, - 'hours': 0, - 'minutes': 0, - 'seconds': 0} - - if any(x in tdstr for x in ['day', 'days', ':']): - mm = _TIMEDELTA_TIME_RE.match(tdstr) # timedelta representation - else: - mm = _TIMEDELTA_HOURS_RE.match(tdstr) # ISO 8601 representation - - if mm: - nmm = {kk: vv if vv is not None else time_dict[kk] - for kk, vv in mm.groupdict().items()} - del nmm['sign'] - nmm = {kk: float(vv) for kk, vv in nmm.items()} - dt = datetime.timedelta(**nmm) - if mm.group('sign') is not None and mm.group('sign') == '-': - dt = -dt - return dt - else: - raise Exception(f"Bad timedelta string: '{tdstr}'") - - -def datetime_to_YMDH(dt: datetime.datetime) -> str: - """ - Description - ----------- - Translate a datetime object to 'YYYYmmddHH' format. - - Parameters - ---------- - dt : datetime.datetime - Datetime object to translate. - - Returns - ------- - str: str - Formatted string in 'YYYYmmddHH' format. - """ - try: - return dt.strftime('%Y%m%d%H') - except Exception: - raise Exception(f"Bad datetime: '{dt}'") - - -def datetime_to_YMD(dt: datetime.datetime) -> str: - """ - Description - ----------- - Translate a datetime object to 'YYYYmmdd' format. - - Parameters - ---------- - dt : datetime.datetime - Datetime object to translate. - - Returns - ------- - str: str - Formatted string in 'YYYYmmdd' format. - """ - try: - return dt.strftime('%Y%m%d') - except Exception: - raise Exception(f"Bad datetime: '{dt}'") - - -def datetime_to_JDAY(dt: datetime.datetime) -> str: - """ - Description - ----------- - Translate a datetime object to 'YYYYDOY' format. - - - Parameters - ---------- - dt : datetime.datetime - Datetime object to translate - - Returns - ------- - str: str - Formatted string in 'YYYYDOY' format. - """ - try: - return dt.strftime('%Y%j') - except Exception: - raise Exception(f"Bad datetime: '{dt}'") - - -def timedelta_to_HMS(td: datetime.timedelta) -> str: - """ - Description - ----------- - Translate a timedelta object to 'HH:MM:SS' format. - - Parameters - ---------- - td : datetime.timedelta - Timedelta object to translate. - - Returns - ------- - str: str - Formatted string in 'HH:MM:SS' format. - """ - try: - hours, remainder = divmod(int(td.total_seconds()), 3600) - minutes, seconds = divmod(remainder, 60) - return f"{hours:02d}:{minutes:02d}:{seconds:02d}" - except Exception: - raise Exception(f"Bad timedelta: '{td}'") - - -def strftime(dt: datetime.datetime, fmt: str) -> str: - """ - Return a formatted string from a datetime object. - """ - try: - return dt.strftime(fmt) - except Exception: - raise Exception(f"Bad datetime (format): '{dt} ({fmt})'") - - -def strptime(dtstr: str, fmt: str) -> datetime.datetime: - """ - Description - ----------- - Translate a formatted string into datetime object. - - Parameters - ---------- - dtstr : str - Datetime string to translate. - fmt : str - Datetime string format. - - Returns - ------- - datetime.datetime: datetime.datetime - Datetime object. - """ - try: - return datetime.datetime.strptime(dtstr, fmt) - except Exception: - raise Exception(f"Bad datetime string (format): '{dtstr} ({fmt})'") - - -def to_isotime(dt: datetime.datetime) -> str: - """ - Description - ----------- - Return a ISO formatted '%Y-%m-%dT%H:%M:%SZ' string from a datetime object. - - Parameters - ---------- - dt : datetime.datetime - Datetime object to format. - - Returns - ------- - str: str - Formatted string in ISO format. - """ - return strftime(dt, '%Y-%m-%dT%H:%M:%SZ') - - -def to_fv3time(dt: datetime.datetime) -> str: - """ - Description - ----------- - Return a FV3 formatted string from a datetime object. - - Parameters - ---------- - dt : datetime.datetime - Datetime object to format. - - Returns - ------- - str: str - Formatted string in FV3 format. - """ - return strftime(dt, '%Y%m%d.%H%M%S') - - -def add_to_datetime(dt: datetime.datetime, td: datetime.timedelta) -> datetime.datetime: - """ - Description - ----------- - Adds a timedelta to a datetime object. - - Parameters - ---------- - dt : datetime.datetime - Datetime object to add to. - td : datetime.timedelta - Timedelta object to add. - - Returns - ------- - datetime.datetime - """ - return dt + td - - -def add_to_timedelta(td1, td2): - """ - Description - ----------- - Adds two timedelta objects. - - Parameters - ---------- - td1 : datetime.timedelta - First timedelta object to add. - td2 : datetime.timedelta - Second timedelta object to add. - - Returns - ------- - datetime.timedelta - """ - return td1 + td2 - - -to_YMDH = datetime_to_YMDH -to_YMD = datetime_to_YMD -to_JDAY = datetime_to_JDAY -to_julian = datetime_to_JDAY diff --git a/ush/python/pygw/src/pygw/yaml_file.py b/ush/python/pygw/src/pygw/yaml_file.py deleted file mode 100644 index 89cd1e2ec0..0000000000 --- a/ush/python/pygw/src/pygw/yaml_file.py +++ /dev/null @@ -1,208 +0,0 @@ -import os -import re -import json -import yaml -import datetime -from typing import Any, Dict -from .attrdict import AttrDict -from .template import TemplateConstants, Template -from .jinja import Jinja - -__all__ = ['YAMLFile', 'parse_yaml', 'parse_yamltmpl', 'parse_j2yaml', - 'save_as_yaml', 'dump_as_yaml', 'vanilla_yaml'] - - -class YAMLFile(AttrDict): - """ - Reads a YAML file as an AttrDict and recursively converts - nested dictionaries into AttrDict. - This is the entry point for all YAML files. - """ - - def __init__(self, path=None, data=None): - super().__init__() - - if path and data: - print("Ignoring 'data' and using 'path' argument") - - config = None - if path is not None: - config = parse_yaml(path=path) - elif data is not None: - config = parse_yaml(data=data) - - if config is not None: - self.update(config) - - def save(self, target): - save_as_yaml(self, target) - - def dump(self): - return dump_as_yaml(self) - - def as_dict(self): - return vanilla_yaml(self) - - -def save_as_yaml(data, target): - # specifies a wide file so that long strings are on one line. - with open(target, 'w') as fh: - yaml.safe_dump(vanilla_yaml(data), fh, - width=100000, sort_keys=False) - - -def dump_as_yaml(data): - return yaml.dump(vanilla_yaml(data), - width=100000, sort_keys=False) - - -def parse_yaml(path=None, data=None, - encoding='utf-8', loader=yaml.SafeLoader): - """ - Load a yaml configuration file and resolve any environment variables - The environment variables must have !ENV before them and be in this format - to be parsed: ${VAR_NAME}. - E.g.: - database: - host: !ENV ${HOST} - port: !ENV ${PORT} - app: - log_path: !ENV '/var/${LOG_PATH}' - something_else: !ENV '${AWESOME_ENV_VAR}/var/${A_SECOND_AWESOME_VAR}' - :param str path: the path to the yaml file - :param str data: the yaml data itself as a stream - :param Type[yaml.loader] loader: Specify which loader to use. Defaults to yaml.SafeLoader - :param str encoding: the encoding of the data if a path is specified, defaults to utf-8 - :return: the dict configuration - :rtype: Dict[str, Any] - - Adopted from: - https://dev.to/mkaranasou/python-yaml-configuration-with-environment-variables-parsing-2ha6 - """ - # define tags - envtag = '!ENV' - inctag = '!INC' - # pattern for global vars: look for ${word} - pattern = re.compile(r'.*?\${(\w+)}.*?') - loader = loader or yaml.SafeLoader - - # the envtag will be used to mark where to start searching for the pattern - # e.g. somekey: !ENV somestring${MYENVVAR}blah blah blah - loader.add_implicit_resolver(envtag, pattern, None) - loader.add_implicit_resolver(inctag, pattern, None) - - def expand_env_variables(line): - match = pattern.findall(line) # to find all env variables in line - if match: - full_value = line - for g in match: - full_value = full_value.replace( - f'${{{g}}}', os.environ.get(g, f'${{{g}}}') - ) - return full_value - return line - - def constructor_env_variables(loader, node): - """ - Extracts the environment variable from the node's value - :param yaml.Loader loader: the yaml loader - :param node: the current node in the yaml - :return: the parsed string that contains the value of the environment - variable - """ - value = loader.construct_scalar(node) - return expand_env_variables(value) - - def constructor_include_variables(loader, node): - """ - Extracts the environment variable from the node's value - :param yaml.Loader loader: the yaml loader - :param node: the current node in the yaml - :return: the content of the file to be included - """ - value = loader.construct_scalar(node) - value = expand_env_variables(value) - expanded = parse_yaml(value) - return expanded - - loader.add_constructor(envtag, constructor_env_variables) - loader.add_constructor(inctag, constructor_include_variables) - - if path: - with open(path, 'r', encoding=encoding) as conf_data: - return yaml.load(conf_data, Loader=loader) - elif data: - return yaml.load(data, Loader=loader) - else: - raise ValueError( - "Either a path or data should be defined as input") - - -def vanilla_yaml(ctx): - """ - Transform an input object of complex type as a plain type - """ - if isinstance(ctx, AttrDict): - return {kk: vanilla_yaml(vv) for kk, vv in ctx.items()} - elif isinstance(ctx, list): - return [vanilla_yaml(vv) for vv in ctx] - elif isinstance(ctx, datetime.datetime): - return ctx.strftime("%Y-%m-%dT%H:%M:%SZ") - else: - return ctx - - -def parse_j2yaml(path: str, data: Dict) -> Dict[str, Any]: - """ - Description - ----------- - Load a compound jinja2-templated yaml file and resolve any templated variables. - The jinja2 templates are first resolved and then the rendered template is parsed as a yaml. - Finally, any remaining $( ... ) templates are resolved - - Parameters - ---------- - path : str - the path to the yaml file - data : Dict[str, Any], optional - the context for jinja2 templating - Returns - ------- - Dict[str, Any] - the dict configuration - """ - jenv = Jinja(path, data) - yaml_file = jenv.render - yaml_dict = YAMLFile(data=yaml_file) - yaml_dict = Template.substitute_structure( - yaml_dict, TemplateConstants.DOLLAR_PARENTHESES, data.get) - - # If the input yaml file included other yamls with jinja2 templates, then we need to re-parse the jinja2 templates in them - jenv2 = Jinja(json.dumps(yaml_dict, indent=4), data) - yaml_file2 = jenv2.render - yaml_dict = YAMLFile(data=yaml_file2) - - return yaml_dict - - -def parse_yamltmpl(path: str, data: Dict = None) -> Dict[str, Any]: - """ - Description - ----------- - Load a simple templated yaml file and then resolve any templated variables defined as $( ... ) - Parameters - ---------- - path : str - the path to the yaml file - data : Dict[str, Any], optional - the context for pygw.Template templating - Returns - ------- - Dict[str, Any] - the dict configuration - """ - yaml_dict = YAMLFile(path=path) - if data is not None: - yaml_dict = Template.substitute_structure(yaml_dict, TemplateConstants.DOLLAR_PARENTHESES, data.get) - - return yaml_dict diff --git a/ush/python/pygw/src/tests/__init__.py b/ush/python/pygw/src/tests/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/ush/python/pygw/src/tests/test-files/test_schema.yaml b/ush/python/pygw/src/tests/test-files/test_schema.yaml deleted file mode 100644 index 741313118b..0000000000 --- a/ush/python/pygw/src/tests/test-files/test_schema.yaml +++ /dev/null @@ -1,21 +0,0 @@ -# A mandatory boolean valued variable. -variable1: - optional: False - type: bool - -# An optional complex valued variable. -variable2: - optional: True - type: int - default: 2 - -# A mandatory string variable. -variable3: - type: str - -# The default value should be ignored here as it is not optional; the -# default value is meaningless. -variable4: - type: float - optional: False - default: 10.0 diff --git a/ush/python/pygw/src/tests/test_configuration.py b/ush/python/pygw/src/tests/test_configuration.py deleted file mode 100644 index e83c2755b8..0000000000 --- a/ush/python/pygw/src/tests/test_configuration.py +++ /dev/null @@ -1,172 +0,0 @@ -import os -import pytest -from datetime import datetime - -from pygw.configuration import Configuration, cast_as_dtype - -file0 = """#!/bin/bash -export SOME_ENVVAR1="${USER}" -export SOME_LOCALVAR1="myvar1" -export SOME_LOCALVAR2="myvar2.0" -export SOME_LOCALVAR3="myvar3_file0" -export SOME_PATH1="/path/to/some/directory" -export SOME_PATH2="/path/to/some/file" -export SOME_DATE1="20221225" -export SOME_DATE2="2022122518" -export SOME_DATE3="202212251845" -export SOME_INT1=3 -export SOME_INT2=15 -export SOME_INT3=-999 -export SOME_FLOAT1=0.2 -export SOME_FLOAT2=3.5 -export SOME_FLOAT3=-9999. -export SOME_BOOL1=YES -export SOME_BOOL2=.true. -export SOME_BOOL3=.T. -export SOME_BOOL4=NO -export SOME_BOOL5=.false. -export SOME_BOOL6=.F. -""" - -file1 = """#!/bin/bash -export SOME_LOCALVAR3="myvar3_file1" -export SOME_LOCALVAR4="myvar4" -export SOME_BOOL7=.TRUE. -""" - -file0_dict = { - 'SOME_ENVVAR1': os.environ['USER'], - 'SOME_LOCALVAR1': "myvar1", - 'SOME_LOCALVAR2': "myvar2.0", - 'SOME_LOCALVAR3': "myvar3_file0", - 'SOME_PATH1': "/path/to/some/directory", - 'SOME_PATH2': "/path/to/some/file", - 'SOME_DATE1': datetime(2022, 12, 25, 0, 0, 0), - 'SOME_DATE2': datetime(2022, 12, 25, 18, 0, 0), - 'SOME_DATE3': datetime(2022, 12, 25, 18, 45, 0), - 'SOME_INT1': 3, - 'SOME_INT2': 15, - 'SOME_INT3': -999, - 'SOME_FLOAT1': 0.2, - 'SOME_FLOAT2': 3.5, - 'SOME_FLOAT3': -9999., - 'SOME_BOOL1': True, - 'SOME_BOOL2': True, - 'SOME_BOOL3': True, - 'SOME_BOOL4': False, - 'SOME_BOOL5': False, - 'SOME_BOOL6': False -} - -file1_dict = { - 'SOME_LOCALVAR3': "myvar3_file1", - 'SOME_LOCALVAR4': "myvar4", - 'SOME_BOOL7': True -} - -str_dtypes = [ - ('HOME', 'HOME'), -] - -int_dtypes = [ - ('1', 1), -] - -float_dtypes = [ - ('1.0', 1.0), -] - -bool_dtypes = [ - ('y', True), ('n', False), - ('Y', True), ('N', False), - ('yes', True), ('no', False), - ('Yes', True), ('No', False), - ('YES', True), ('NO', False), - ('t', True), ('f', False), - ('T', True), ('F', False), - ('true', True), ('false', False), - ('True', True), ('False', False), - ('TRUE', True), ('FALSE', False), - ('.t.', True), ('.f.', False), - ('.T.', True), ('.F.', False), -] - -datetime_dtypes = [ - ('20221215', datetime(2022, 12, 15, 0, 0, 0)), - ('2022121518', datetime(2022, 12, 15, 18, 0, 0)), - ('2022121518Z', datetime(2022, 12, 15, 18, 0, 0)), - ('20221215T1830', datetime(2022, 12, 15, 18, 30, 0)), - ('20221215T1830Z', datetime(2022, 12, 15, 18, 30, 0)), -] - - -def evaluate(dtypes): - for pair in dtypes: - print(f"Test: '{pair[0]}' ==> {pair[1]}") - assert pair[1] == cast_as_dtype(pair[0]) - - -def test_cast_as_dtype_str(): - evaluate(str_dtypes) - - -def test_cast_as_dtype_int(): - evaluate(int_dtypes) - - -def test_cast_as_dtype_float(): - evaluate(float_dtypes) - - -def test_cast_as_dtype_bool(): - evaluate(bool_dtypes) - - -def test_cast_as_dtype_datetimes(): - evaluate(datetime_dtypes) - - -@pytest.fixture -def create_configs(tmp_path): - - file_path = tmp_path / 'config.file0' - with open(file_path, 'w') as fh: - fh.write(file0) - - file_path = tmp_path / 'config.file1' - with open(file_path, 'w') as fh: - fh.write(file1) - - -def test_configuration_config_dir(tmp_path, create_configs): - cfg = Configuration(tmp_path) - assert cfg.config_dir == tmp_path - - -@pytest.mark.skip(reason="fails in GH runner, passes on localhost") -def test_configuration_config_files(tmp_path, create_configs): - cfg = Configuration(tmp_path) - config_files = [str(tmp_path / 'config.file0'), str(tmp_path / 'config.file1')] - assert config_files == cfg.config_files - - -def test_find_config(tmp_path, create_configs): - cfg = Configuration(tmp_path) - file0 = cfg.find_config('config.file0') - assert str(tmp_path / 'config.file0') == file0 - - -@pytest.mark.skip(reason="fails in GH runner, passes on localhost") -def test_parse_config1(tmp_path, create_configs): - cfg = Configuration(tmp_path) - f0 = cfg.parse_config('config.file0') - assert file0_dict == f0 - - -@pytest.mark.skip(reason="fails in GH runner, passes on localhost") -def test_parse_config2(tmp_path, create_configs): - cfg = Configuration(tmp_path) - ff = cfg.parse_config(['config.file0', 'config.file1']) - ff_dict = file0_dict.copy() - ff_dict.update(file1_dict) - assert ff_dict == ff diff --git a/ush/python/pygw/src/tests/test_exceptions.py b/ush/python/pygw/src/tests/test_exceptions.py deleted file mode 100644 index 79f3e4f1ec..0000000000 --- a/ush/python/pygw/src/tests/test_exceptions.py +++ /dev/null @@ -1,35 +0,0 @@ -import pytest - -from pygw.exceptions import WorkflowException - -# ---- - - -class TestError(WorkflowException): - """ - Description - ----------- - - This is the base-class for exceptions encountered within the - pygw/errors unit-tests module; it is a sub-class of Error. - - """ - -# ---- - - -def test_errors() -> None: - """ - Description - ----------- - - This function provides a unit test for the errors module. - - """ - - # Raise the base-class exception. - with pytest.raises(Exception): - msg = "Testing exception raise." - raise TestError(msg=msg) - - assert True diff --git a/ush/python/pygw/src/tests/test_executable.py b/ush/python/pygw/src/tests/test_executable.py deleted file mode 100644 index 4c0e584fab..0000000000 --- a/ush/python/pygw/src/tests/test_executable.py +++ /dev/null @@ -1,60 +0,0 @@ -import os -from pathlib import Path -import pytest -from pygw.executable import Executable, which, CommandNotFoundError - - -script = """#!/bin/bash -echo ${USER} -""" - - -def test_executable(tmp_path): - """ - Tests the class `Executable` - Parameters: - ----------- - tmp_path : Path - temporary path created by pytest - """ - whoami = os.environ['USER'] - - test_file = tmp_path / 'whoami.x' - Path(test_file).touch(mode=0o755) - with open(test_file, 'w') as fh: - fh.write(script) - - cmd = Executable(str(test_file)) - assert cmd.exe == [str(test_file)] - - stdout_file = tmp_path / 'stdout' - stderr_file = tmp_path / 'stderr' - cmd(output=str(stdout_file), error=str(stderr_file)) - with open(str(stdout_file)) as fh: - assert fh.read() == whoami + '\n' - - -def test_which(tmpdir): - """ - Tests the `which()` function. - `which` should return `None` if the executable is not found - Parameters - ---------- - tmpdir : Path - path to a temporary directory created by pytest - """ - os.environ["PATH"] = str(tmpdir) - assert which('test.x') is None - - with pytest.raises(CommandNotFoundError): - which('test.x', required=True) - - path = str(tmpdir.join("test.x")) - - # create a test.x executable in the tmpdir - with tmpdir.as_cwd(): - Path('test.x').touch(mode=0o755) - - exe = which("test.x") - assert exe is not None - assert exe.path == path diff --git a/ush/python/pygw/src/tests/test_factory.py b/ush/python/pygw/src/tests/test_factory.py deleted file mode 100644 index 2165eca044..0000000000 --- a/ush/python/pygw/src/tests/test_factory.py +++ /dev/null @@ -1,74 +0,0 @@ -from pygw.factory import Factory - - -class Class1: - def __init__(self): - self.my_name = 'Class1' - - -class Class2: - def __init__(self): - self.my_name = 'Class2' - - -def test_factory(): - _ = Factory('Test0') - try: - from pygw.factory import Test0Factory - # linter will likely throw an error here since 'Test0Factory' is not a valid module until runtime - except ModuleNotFoundError: - raise AssertionError("'Test0Factory' was not found in the 'pygw.factory' module") - - -def test_register(): - factory = Factory('Test1') - try: - factory.register('Class1', Class1) - except Exception: - raise AssertionError("Unable to register 'Class1' in 'Test1Factory'") - - -def test_create(): - factory = Factory('Test2') - factory.register('Class1', Class1) - factory.register('Class2', Class2) - - c1 = factory.create('Class1') - assert c1.my_name == 'Class1', "Error in creating builder 'Class1' in Factory 'Test2Factory'" - c2 = factory.create('Class2') - assert c2.my_name == 'Class2', "Error in creating builder 'Class2' in Factory 'Test2Factory'" - - -def test_destroy(): - factory = Factory('Test3') - factory.register('Class1', Class1) - factory.register('Class2', Class2) - - _ = factory.create('Class1') - _ = factory.create('Class2') - factory.destroy('Class2') - assert not (factory.is_registered('Class2')) - - -def test_registered(): - factory = Factory('Test4') - factory.register('Class1', Class1) - factory.register('Class2', Class2) - - _ = factory.create('Class1') - _ = factory.create('Class2') - assert factory.registered == {'Class1', 'Class2'}, "Unregistered builders in 'Test4Factory'" - - -def test_is_registered(): - factory = Factory('Test5') - factory.register('Class1', Class1) - - _ = factory.create('Class1') - assert factory.is_registered('Class1'), "Unregistered builder in 'Test5Factory'" - assert not (factory.is_registered('Class3')), "Unregistered builder in 'Test5Factory'" - - -def test_get_factory(): - factory = Factory('Test6') - assert Factory.get_factory('Test6Factory') == factory diff --git a/ush/python/pygw/src/tests/test_file_utils.py b/ush/python/pygw/src/tests/test_file_utils.py deleted file mode 100644 index 684c76b650..0000000000 --- a/ush/python/pygw/src/tests/test_file_utils.py +++ /dev/null @@ -1,66 +0,0 @@ -import os -from pygw.file_utils import FileHandler - - -def test_mkdir(tmp_path): - """ - Test for creating directories: - Parameters - ---------- - tmp_path - pytest fixture - """ - - dir_path = tmp_path / 'my_test_dir' - d1 = f'{dir_path}1' - d2 = f'{dir_path}2' - d3 = f'{dir_path}3' - - # Create config object for FileHandler - config = {'mkdir': [d1, d2, d3]} - - # Create d1, d2, d3 - FileHandler(config).sync() - - # Check if d1, d2, d3 were indeed created - for dd in config['mkdir']: - assert os.path.exists(dd) - - -def test_copy(tmp_path): - """ - Test for copying files: - Parameters - ---------- - tmp_path - pytest fixture - """ - - input_dir_path = tmp_path / 'my_input_dir' - - # Create the input directory - config = {'mkdir': [input_dir_path]} - FileHandler(config).sync() - - # Put empty files in input_dir_path - src_files = [input_dir_path / 'a.txt', input_dir_path / 'b.txt'] - for ff in src_files: - ff.touch() - - # Create output_dir_path and expected file names - output_dir_path = tmp_path / 'my_output_dir' - config = {'mkdir': [output_dir_path]} - FileHandler(config).sync() - dest_files = [output_dir_path / 'a.txt', output_dir_path / 'bb.txt'] - - copy_list = [] - for src, dest in zip(src_files, dest_files): - copy_list.append([src, dest]) - - # Create config object for FileHandler - config = {'copy': copy_list} - - # Copy input files to output files - FileHandler(config).sync() - - # Check if files were indeed copied - for ff in dest_files: - assert os.path.isfile(ff) diff --git a/ush/python/pygw/src/tests/test_jinja.py b/ush/python/pygw/src/tests/test_jinja.py deleted file mode 100644 index 10749515ab..0000000000 --- a/ush/python/pygw/src/tests/test_jinja.py +++ /dev/null @@ -1,37 +0,0 @@ -import pytest - -from datetime import datetime -from pygw.jinja import Jinja -from pygw.timetools import to_isotime - -current_date = datetime.now() -j2tmpl = """Hello {{ name }}! {{ greeting }} It is: {{ current_date | to_isotime }}""" - - -@pytest.fixture -def create_template(tmp_path): - file_path = tmp_path / 'template.j2' - with open(file_path, 'w') as fh: - fh.write(j2tmpl) - - -def test_render_stream(): - data = {"name": "John"} - j = Jinja(j2tmpl, data, allow_missing=True) - assert j.render == "Hello John! {{ greeting }} It is: {{ current_date }}" - - data = {"name": "Jane", "greeting": "How are you?", "current_date": current_date} - j = Jinja(j2tmpl, data, allow_missing=False) - assert j.render == f"Hello Jane! How are you? It is: {to_isotime(current_date)}" - - -def test_render_file(tmp_path, create_template): - - file_path = tmp_path / 'template.j2' - data = {"name": "John"} - j = Jinja(str(file_path), data, allow_missing=True) - assert j.render == "Hello John! {{ greeting }} It is: {{ current_date }}" - - data = {"name": "Jane", "greeting": "How are you?", "current_date": current_date} - j = Jinja(str(file_path), data, allow_missing=False) - assert j.render == f"Hello Jane! How are you? It is: {to_isotime(current_date)}" diff --git a/ush/python/pygw/src/tests/test_logger.py b/ush/python/pygw/src/tests/test_logger.py deleted file mode 100644 index a9b4504d57..0000000000 --- a/ush/python/pygw/src/tests/test_logger.py +++ /dev/null @@ -1,67 +0,0 @@ -from pygw.logger import Logger -from pygw.logger import logit - -level = 'debug' -number_of_log_msgs = 5 -reference = {'debug': "Logging test has started", - 'info': "Logging to 'logger.log' in the script dir", - 'warning': "This is my last warning, take heed", - 'error': "This is an error", - 'critical': "He's dead, She's dead. They are all dead!"} - - -def test_logger(tmp_path): - """Test log file""" - - logfile = tmp_path / "logger.log" - - try: - log = Logger('test_logger', level=level, logfile_path=logfile, colored_log=True) - log.debug(reference['debug']) - log.info(reference['info']) - log.warning(reference['warning']) - log.error(reference['error']) - log.critical(reference['critical']) - except Exception as e: - raise AssertionError(f'logging failed as {e}') - - # Make sure log to file created messages - try: - with open(logfile, 'r') as fh: - log_msgs = fh.readlines() - except Exception as e: - raise AssertionError(f'failed reading log file as {e}') - - # Ensure number of messages are same - log_msgs_in_logfile = len(log_msgs) - assert log_msgs_in_logfile == number_of_log_msgs - - # Ensure messages themselves are same - for count, line in enumerate(log_msgs): - lev = line.split('-')[3].strip().lower() - message = line.split(':')[-1].strip() - assert reference[lev] == message - - -def test_logit(tmp_path): - - logger = Logger('test_logit', level=level, colored_log=True) - - @logit(logger) - def add(x, y): - return x + y - - @logit(logger) - def usedict(n, j=0, k=1): - return n + j + k - - @logit(logger, 'example') - def spam(): - print('Spam!') - - add(2, 3) - usedict(2, 3) - usedict(2, k=3) - spam() - - assert True diff --git a/ush/python/pygw/src/tests/test_schema.py b/ush/python/pygw/src/tests/test_schema.py deleted file mode 100644 index 220b9866a9..0000000000 --- a/ush/python/pygw/src/tests/test_schema.py +++ /dev/null @@ -1,82 +0,0 @@ -""" -Description ------------ - -Unit-tests for `pygw.schema`. -""" - -import os -import pytest -from pygw import schema -from pygw.yaml_file import parse_yaml -from pygw.schema import SchemaError -from pygw.configuration import cast_strdict_as_dtypedict - - -# Define the path to the YAML-formatted file containing the schema -# attributes. -# yaml_path = os.path.join(os.getcwd(), "tests", -# "test-files", "test_schema.yaml") -# data = parse_yaml(path=yaml_path) -@pytest.mark.skip(reason="disable till the developer fixes the test") -def test_build_schema(): - """ - Description - ----------- - - This function tests the `pygw.schema.build_schema` function. - - """ - - # Test that the schema can be defined. - assert schema.build_schema(data=data) - - -@pytest.mark.skip(reason="disable till the developer fixes the test") -def test_validate_schema(): - """ - Description - ----------- - - This function tests various application configurations (i.e., - `data_in`) for various schema validation applications. - - """ - - # Define the schema. - schema_dict = schema.build_schema(data=data) - - # Test that the schema validates and returns a the dictionary - # passed; this unit-test should pass. - data_in = { - "variable1": False, - "variable2": 1, - "variable3": "hello world", - "variable4": 10.0, - } - data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) - assert True - assert data_in == data_out - - # Test that optional values are updated with defaults. - del data_in["variable2"] - data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) - assert True - - # This unit-test should raise a `SchemaError` exception in order - # to pass. - data_in["variable2"] = "I **should** fail." - try: - data_out = schema.validate_schema( - schema_dict=schema_dict, data=data_in) - except SchemaError: - assert True - - # This unit-test passes the full environment, including `data_in`, - # to be validated; this tests the `ignore_extra_keys` attribute; - # this unit-test should pass. - del data_in["variable2"] - data_in = {**cast_strdict_as_dtypedict(os.environ), **data_in} - data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) - assert True - assert data_in == data_out diff --git a/ush/python/pygw/src/tests/test_template.py b/ush/python/pygw/src/tests/test_template.py deleted file mode 100644 index f6d594b2d9..0000000000 --- a/ush/python/pygw/src/tests/test_template.py +++ /dev/null @@ -1,147 +0,0 @@ -import os -from pygw.template import TemplateConstants, Template - - -def test_substitute_string_from_dict(): - """ - Substitute with ${v} - """ - template = '${greeting} to ${the_world}' - dictionary = { - 'greeting': 'Hello', - 'the_world': 'the world' - } - final = 'Hello to the world' - assert Template.substitute_structure(template, - TemplateConstants.DOLLAR_CURLY_BRACE, dictionary.get) == final - - -def test_substitute_string_from_dict_paren(): - """ - Substitute with $(v) - """ - template = '$(greeting) to $(the_world)' - dictionary = { - 'greeting': 'Hello', - 'the_world': 'the world' - } - final = 'Hello to the world' - assert Template.substitute_structure(template, - TemplateConstants.DOLLAR_PARENTHESES, dictionary.get) == final - - -def test_assign_string_from_dict_paren(): - """ - Substitute with $(v) should replace with the actual object - """ - template = '$(greeting)' - dictionary = { - 'greeting': { - 'a': 1, - 'b': 2 - } - } - assert Template.substitute_structure(template, - TemplateConstants.DOLLAR_PARENTHESES, - dictionary.get) == dictionary['greeting'] - - -def test_substitute_string_from_dict_double_curly(): - """ - Substitute with {{v}} - """ - template = '{{greeting}} to {{the_world}}' - dictionary = { - 'greeting': 'Hello', - 'the_world': 'the world' - } - final = 'Hello to the world' - assert Template.substitute_structure(template, - TemplateConstants.DOUBLE_CURLY_BRACES, - dictionary.get) == final - - -def test_substitute_string_from_dict_at_square(): - """ - Substitute with @[v] - """ - template = '@[greeting] to @[the_world]' - dictionary = { - 'greeting': 'Hello', - 'the_world': 'the world' - } - final = 'Hello to the world' - assert Template.substitute_structure(template, - TemplateConstants.AT_SQUARE_BRACES, - dictionary.get) == final - - -def test_substitute_string_from_dict_at_carrots(): - """ - Substitute with @ - """ - template = '@ to @' - dictionary = { - 'greeting': 'Hello', - 'the_world': 'the world' - } - final = 'Hello to the world' - assert Template.substitute_structure(template, - TemplateConstants.AT_ANGLE_BRACKETS, - dictionary.get) == final - - -def test_substitute_string_from_environment(): - """ - Substitute from environment - """ - template = '${GREETING} to ${THE_WORLD}' - os.environ['GREETING'] = 'Hello' - os.environ['THE_WORLD'] = 'the world' - final = 'Hello to the world' - assert Template.substitute_structure_from_environment(template) == final - - -def test_substitute_with_dependencies(): - input = { - 'root': '/home/user', - 'config_file': 'config.yaml', - 'config': '$(root)/config/$(config_file)', - 'greeting': 'hello $(world)', - 'world': 'world', - 'complex': '$(dictionary)', - 'dictionary': { - 'a': 1, - 'b': 2 - }, - 'dd': {'2': 'a', '1': 'b'}, - 'ee': {'3': 'a', '1': 'b'}, - 'ff': {'4': 'a', '1': 'b $(greeting)'}, - 'host': { - 'name': 'xenon', - 'config': '$(root)/hosts', - 'config_file': '$(config)/$(name).config.yaml', - 'proxy2': { - 'config': '$(root)/$(name).$(greeting).yaml', - 'list': [['$(root)/$(name)', 'toto.$(name).$(greeting)'], '$(config_file)'] - } - } - } - output = {'complex': {'a': 1, 'b': 2}, - 'config': '/home/user/config/config.yaml', - 'config_file': 'config.yaml', - 'dd': {'1': 'b', '2': 'a'}, - 'dictionary': {'a': 1, 'b': 2}, - 'ee': {'1': 'b', '3': 'a'}, - 'ff': {'1': 'b hello world', '4': 'a'}, - 'greeting': 'hello world', - 'host': {'config': '/home/user/hosts', - 'config_file': '/home/user/config/config.yaml/xenon.config.yaml', - 'name': 'xenon', - 'proxy2': {'config': '/home/user/xenon.hello world.yaml', - 'list': [['/home/user/xenon', 'toto.xenon.hello world'], - 'config.yaml']}}, - 'root': '/home/user', - 'world': 'world'} - - assert Template.substitute_with_dependencies(input, input, TemplateConstants.DOLLAR_PARENTHESES) == output diff --git a/ush/python/pygw/src/tests/test_timetools.py b/ush/python/pygw/src/tests/test_timetools.py deleted file mode 100644 index f7e2cfd2ce..0000000000 --- a/ush/python/pygw/src/tests/test_timetools.py +++ /dev/null @@ -1,80 +0,0 @@ -from datetime import datetime, timedelta -from pygw.timetools import * - -current_date = datetime.now() - - -def test_to_datetime(): - - assert to_datetime('20220314') == datetime(2022, 3, 14) - assert to_datetime('2022031412') == datetime(2022, 3, 14, 12) - assert to_datetime('202203141230') == datetime(2022, 3, 14, 12, 30) - assert to_datetime('2022-03-14') == datetime(2022, 3, 14) - assert to_datetime('2022-03-14T12Z') == datetime(2022, 3, 14, 12) - assert to_datetime('2022-03-14T12:30Z') == datetime(2022, 3, 14, 12, 30) - assert to_datetime('2022-03-14T12:30:45') == datetime(2022, 3, 14, 12, 30, 45) - assert to_datetime('2022-03-14T12:30:45Z') == datetime(2022, 3, 14, 12, 30, 45) - - -def test_to_timedelta(): - assert to_timedelta('2d3H4M5S') == timedelta(days=2, hours=3, minutes=4, seconds=5) - assert to_timedelta('-3H15M') == timedelta(hours=-3, minutes=-15) - assert to_timedelta('1:30:45') == timedelta(hours=1, minutes=30, seconds=45) - assert to_timedelta('5 days, 12:30:15') == timedelta(days=5, hours=12, minutes=30, seconds=15) - - -def test_datetime_to_ymdh(): - assert datetime_to_YMDH(current_date) == current_date.strftime('%Y%m%d%H') - - -def test_datetime_to_ymd(): - assert datetime_to_YMD(current_date) == current_date.strftime('%Y%m%d') - - -def test_timedelta_to_hms(): - td = timedelta(hours=5, minutes=39, seconds=56) - assert timedelta_to_HMS(td) == '05:39:56' - td = timedelta(days=4, hours=5, minutes=39, seconds=56) - assert timedelta_to_HMS(td) == '101:39:56' - - -def test_strftime(): - assert strftime(current_date, '%Y%m%d') == current_date.strftime('%Y%m%d') - assert strftime(current_date, '%Y%m%d %H') == current_date.strftime('%Y%m%d %H') - - -def test_strptime(): - assert strptime(current_date.strftime('%Y%m%d'), '%Y%m%d') == \ - datetime.strptime(current_date.strftime('%Y%m%d'), '%Y%m%d') - - -def test_to_isotime(): - assert to_isotime(current_date) == current_date.strftime('%Y-%m-%dT%H:%M:%SZ') - - -def test_to_fv3time(): - assert to_fv3time(current_date) == current_date.strftime('%Y%m%d.%H%M%S') - - -def test_to_julian(): - assert to_julian(current_date) == current_date.strftime('%Y%j') - - -def test_add_to_timedelta(): - assert add_to_timedelta(timedelta(days=1), timedelta(hours=3)) == \ - timedelta(days=1, hours=3) - assert add_to_timedelta(timedelta(hours=5, minutes=30), timedelta(minutes=15)) == \ - timedelta(hours=5, minutes=45) - assert add_to_timedelta(timedelta(seconds=45), timedelta(milliseconds=500)) == \ - timedelta(seconds=45, milliseconds=500) - - -def test_add_to_datetime(): - dt = datetime(2023, 3, 14, 12, 0, 0) - td = timedelta(days=1, hours=6) - negative_td = timedelta(days=-1, hours=-6) - zero_td = timedelta() - - assert add_to_datetime(dt, td) == datetime(2023, 3, 15, 18, 0, 0) - assert add_to_datetime(dt, negative_td) == datetime(2023, 3, 13, 6, 0, 0) - assert add_to_datetime(dt, zero_td) == datetime(2023, 3, 14, 12, 0, 0) diff --git a/ush/python/pygw/src/tests/test_yaml_file.py b/ush/python/pygw/src/tests/test_yaml_file.py deleted file mode 100644 index d01eb154b2..0000000000 --- a/ush/python/pygw/src/tests/test_yaml_file.py +++ /dev/null @@ -1,97 +0,0 @@ -import os -import pytest -from datetime import datetime -from pygw.yaml_file import YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml, dump_as_yaml - -host_yaml = """ -host: - hostname: test_host - host_user: !ENV ${USER} -""" - -conf_yaml = """ -config: - config_file: !ENV ${TMP_PATH}/config.yaml - user: !ENV ${USER} - host_file: !INC ${TMP_PATH}/host.yaml -""" - -tmpl_yaml = """ -config: - config_file: !ENV ${TMP_PATH}/config.yaml - user: !ENV ${USER} - host_file: !INC ${TMP_PATH}/host.yaml -tmpl: - cdate: '{{PDY}}{{cyc}}' - homedir: /home/$(user) -""" -# Note the quotes ' ' around {{ }}. These quotes are necessary for yaml otherwise yaml will fail parsing - -j2tmpl_yaml = """ -config: - config_file: !ENV ${TMP_PATH}/config.yaml - user: !ENV ${USER} - host_file: !INC ${TMP_PATH}/host.yaml -tmpl: - cdate: '{{ current_cycle | to_YMD }}{{ current_cycle | strftime('%H') }}' - homedir: /home/$(user) -""" - - -@pytest.fixture -def create_template(tmpdir): - """Create temporary templates for testing""" - tmpdir.join('host.yaml').write(host_yaml) - tmpdir.join('config.yaml').write(conf_yaml) - tmpdir.join('tmpl.yaml').write(tmpl_yaml) - tmpdir.join('j2tmpl.yaml').write(j2tmpl_yaml) - - -def test_yaml_file(tmp_path, create_template): - - # Set env. variable - os.environ['TMP_PATH'] = str(tmp_path) - conf = YAMLFile(path=str(tmp_path / 'config.yaml')) - - # Write out yaml file - yaml_out = tmp_path / 'config_output.yaml' - conf.save(yaml_out) - - # Read in the yaml file and compare w/ conf - yaml_in = YAMLFile(path=str(yaml_out)) - - assert yaml_in == conf - - -def test_yaml_file_with_templates(tmp_path, create_template): - - # Set env. variable - os.environ['TMP_PATH'] = str(tmp_path) - data = {'user': os.environ['USER']} - conf = parse_yamltmpl(path=str(tmp_path / 'tmpl.yaml'), data=data) - - # Write out yaml file - yaml_out = tmp_path / 'tmpl_output.yaml' - save_as_yaml(conf, yaml_out) - - # Read in the yaml file and compare w/ conf - yaml_in = YAMLFile(path=yaml_out) - - assert yaml_in == conf - - -def test_yaml_file_with_j2templates(tmp_path, create_template): - - # Set env. variable - os.environ['TMP_PATH'] = str(tmp_path) - data = {'user': os.environ['USER'], 'current_cycle': datetime.now()} - conf = parse_j2yaml(path=str(tmp_path / 'j2tmpl.yaml'), data=data) - - # Write out yaml file - yaml_out = tmp_path / 'j2tmpl_output.yaml' - save_as_yaml(conf, yaml_out) - - # Read in the yaml file and compare w/ conf - yaml_in = YAMLFile(path=yaml_out) - - assert yaml_in == conf diff --git a/ush/python/wxflow b/ush/python/wxflow new file mode 160000 index 0000000000..c8cfb83187 --- /dev/null +++ b/ush/python/wxflow @@ -0,0 +1 @@ +Subproject commit c8cfb8318751bb7d631bc556a712bbf3c4a58706 diff --git a/workflow/applications/application_factory.py b/workflow/applications/application_factory.py index 57b21394f8..93d2f36551 100644 --- a/workflow/applications/application_factory.py +++ b/workflow/applications/application_factory.py @@ -1,4 +1,4 @@ -from pygw.factory import Factory +from wxflow.factory import Factory from applications.gfs_cycled import GFSCycledAppConfig from applications.gfs_forecast_only import GFSForecastOnlyAppConfig from applications.gefs import GEFSAppConfig diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index c66cf3aaca..f2258aa4eb 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -2,7 +2,7 @@ from typing import Dict, List, Any from hosts import Host -from pygw.configuration import Configuration +from wxflow.configuration import Configuration from abc import ABC, ABCMeta, abstractmethod __all__ = ['AppConfig'] diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index f618767ee2..18fb4f957c 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -1,5 +1,5 @@ from applications.applications import AppConfig -from pygw.configuration import Configuration +from wxflow.configuration import Configuration class GEFSAppConfig(AppConfig): diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 432ec87d17..9978868df3 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -1,6 +1,6 @@ from typing import Dict, Any from applications.applications import AppConfig -from pygw.configuration import Configuration +from wxflow.configuration import Configuration from datetime import timedelta diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index 6415b11bf2..ba14b8cb80 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -1,5 +1,5 @@ from applications.applications import AppConfig -from pygw.configuration import Configuration +from wxflow.configuration import Configuration class GFSForecastOnlyAppConfig(AppConfig): diff --git a/workflow/hosts.py b/workflow/hosts.py index bb0128cb01..eeaa75d43e 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -3,7 +3,7 @@ import os from pathlib import Path -from pygw.yaml_file import YAMLFile +from wxflow.yaml_file import YAMLFile __all__ = ['Host'] diff --git a/workflow/pygw b/workflow/pygw deleted file mode 120000 index dfa1d9a164..0000000000 --- a/workflow/pygw +++ /dev/null @@ -1 +0,0 @@ -../ush/python/pygw/src/pygw \ No newline at end of file diff --git a/workflow/rocoto/gefs_xml.py b/workflow/rocoto/gefs_xml.py index f25544a9cc..5f792d9dee 100644 --- a/workflow/rocoto/gefs_xml.py +++ b/workflow/rocoto/gefs_xml.py @@ -2,7 +2,7 @@ from rocoto.workflow_xml import RocotoXML from applications.applications import AppConfig -from pygw.timetools import to_timedelta +from wxflow.timetools import to_timedelta from typing import Dict diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index 3d2e37c18e..c572de25d9 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -2,7 +2,7 @@ from rocoto.workflow_xml import RocotoXML from applications.applications import AppConfig -from pygw.timetools import to_timedelta +from wxflow.timetools import to_timedelta from typing import Dict diff --git a/workflow/rocoto/gfs_forecast_only_xml.py b/workflow/rocoto/gfs_forecast_only_xml.py index 7aba6bfbbd..d26d9ad117 100644 --- a/workflow/rocoto/gfs_forecast_only_xml.py +++ b/workflow/rocoto/gfs_forecast_only_xml.py @@ -2,7 +2,7 @@ from rocoto.workflow_xml import RocotoXML from applications.applications import AppConfig -from pygw.timetools import to_timedelta +from wxflow.timetools import to_timedelta from typing import Dict diff --git a/workflow/rocoto/rocoto_xml_factory.py b/workflow/rocoto/rocoto_xml_factory.py index 94453ccd42..9fc7b26d1b 100644 --- a/workflow/rocoto/rocoto_xml_factory.py +++ b/workflow/rocoto/rocoto_xml_factory.py @@ -1,4 +1,4 @@ -from pygw.factory import Factory +from wxflow.factory import Factory from rocoto.gfs_cycled_xml import GFSCycledRocotoXML from rocoto.gfs_forecast_only_xml import GFSForecastOnlyRocotoXML from rocoto.gefs_xml import GEFSRocotoXML diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index 06c1bcffe2..b07e4c9e4d 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -3,7 +3,7 @@ import numpy as np from applications.applications import AppConfig import rocoto.rocoto as rocoto -from pygw.template import Template, TemplateConstants +from wxflow.template import Template, TemplateConstants __all__ = ['Tasks', 'create_wf_task'] diff --git a/workflow/rocoto/tasks_factory.py b/workflow/rocoto/tasks_factory.py index 4d3bbb2262..d580dc3b95 100644 --- a/workflow/rocoto/tasks_factory.py +++ b/workflow/rocoto/tasks_factory.py @@ -1,4 +1,4 @@ -from pygw.factory import Factory +from wxflow.factory import Factory from rocoto.gfs_tasks import GFSTasks from rocoto.gefs_tasks import GEFSTasks diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index b127367220..1ea132d14a 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -12,9 +12,9 @@ from hosts import Host -from pygw.yaml_file import parse_j2yaml -from pygw.attrdict import AttrDict -from pygw.timetools import to_datetime, to_timedelta, datetime_to_YMDH +from wxflow.yaml_file import parse_j2yaml +from wxflow.attrdict import AttrDict +from wxflow.timetools import to_datetime, to_timedelta, datetime_to_YMDH _here = os.path.dirname(__file__) diff --git a/workflow/setup_xml.py b/workflow/setup_xml.py index 1bad9edba5..0d06608d21 100755 --- a/workflow/setup_xml.py +++ b/workflow/setup_xml.py @@ -8,7 +8,7 @@ from applications.application_factory import app_config_factory from rocoto.rocoto_xml_factory import rocoto_xml_factory -from pygw.configuration import Configuration +from wxflow.configuration import Configuration def input_args(): diff --git a/workflow/test_configuration.py b/workflow/test_configuration.py index 5c59fd35bf..888ec5dd43 100644 --- a/workflow/test_configuration.py +++ b/workflow/test_configuration.py @@ -1,5 +1,5 @@ import sys -from pygw.configuration import Configuration +from wxflow.configuration import Configuration expdir = sys.argv[1] diff --git a/workflow/wxflow b/workflow/wxflow new file mode 120000 index 0000000000..556e7ab91a --- /dev/null +++ b/workflow/wxflow @@ -0,0 +1 @@ +../ush/python/wxflow/src/wxflow \ No newline at end of file From 8d5beb1f0b5a28c4127c05cea84c07aa87fdeb97 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Sun, 2 Jul 2023 23:44:17 -0400 Subject: [PATCH 02/19] update documentation to reflect checkout with submodules --- docs/source/clone.rst | 44 ++++++++++++++++++------------------------- 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/docs/source/clone.rst b/docs/source/clone.rst index 977c9b9895..2461ccbfb4 100644 --- a/docs/source/clone.rst +++ b/docs/source/clone.rst @@ -11,35 +11,38 @@ Quick clone/build/link instructions (more detailed instructions below). .. note:: Here we are making the assumption that you are using the workflow to run an experiment and so are working from the authoritative repository. If you are using a development branch then follow the instructions in :doc:`development.rst`. Once you do that you can follow the instructions here with the only difference being the repository/fork you are cloning from. -For forecast-only (coupled or uncoupled): +Clone the `global-workflow` (note the use of `--recursive`) and `cd` into the `sorc` directory: :: - git clone https://github.com/NOAA-EMC/global-workflow.git + git clone --recursive https://github.com/NOAA-EMC/global-workflow cd global-workflow/sorc + +For forecast-only (coupled or uncoupled) checkout the components: + +:: + ./checkout.sh - ./build_all.sh - ./link_workflow.sh -For cycled (w/ data assimilation): +For cycled (w/ data assimilation) use the `-g` option during checkout: :: - git clone https://github.com/NOAA-EMC/global-workflow.git - cd global-workflow/sorc ./checkout.sh -g - ./build_all.sh - ./link_workflow.sh -For coupled cycling (include new UFSDA): +For coupled cycling (include new UFSDA) use the `-gu` options during checkout: [Currently only available on Hera and Orion] :: - git clone https://github.com/NOAA-EMC/global-workflow.git - cd global-workflow/sorc ./checkout.sh -gu + + +Build workflow components and link workflow artifacts such as executables, etc. + +:: + ./build_all.sh ./link_workflow.sh @@ -52,24 +55,13 @@ Clone workflow and component repositories Workflow ******** -There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using either the ssh or https methods. **The ssh method is highly preferred and recommended.** - -ssh method (using a password protected SSH key): - -:: - - git clone git@github.com:NOAA-EMC/global-workflow.git - -.. note:: - When using ssh methods you need to make sure that your GitHub account is configured for the computer from which you are accessing the repository (See `this link `_) - -https method: +There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using the `https` methods. :: - git clone https://github.com/NOAA-EMC/global-workflow.git + git clone --recursive https://github.com/NOAA-EMC/global-workflow -Check what you just cloned (by default you will have only the develop branch): +Check what you just cloned (by default you will have only the `develop` branch): :: From 218bdc1a6e73fbbb6b51cd46ffb24ecc7c217891 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Sun, 2 Jul 2023 23:49:44 -0400 Subject: [PATCH 03/19] remove pytests, update README and ci script --- .github/workflows/pytests.yaml | 36 ---------------------------------- README.md | 1 - ci/scripts/clone-build_ci.sh | 2 +- 3 files changed, 1 insertion(+), 38 deletions(-) delete mode 100644 .github/workflows/pytests.yaml diff --git a/.github/workflows/pytests.yaml b/.github/workflows/pytests.yaml deleted file mode 100644 index f15a776c0f..0000000000 --- a/.github/workflows/pytests.yaml +++ /dev/null @@ -1,36 +0,0 @@ -name: pytests -on: [push, pull_request] - -jobs: - run_pytests: - runs-on: ubuntu-latest - name: Install pygw and run tests with pytests - strategy: - max-parallel: 1 - matrix: - python: ["3.7", "3.8", "3.9", "3.10"] - - steps: - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python }} - - - name: Install (upgrade) python dependencies - run: | - pip install --upgrade pip - - - name: Checkout - uses: actions/checkout@v3 - with: - path: global-workflow - - - name: Install pygw - run: | - cd $GITHUB_WORKSPACE/global-workflow/ush/python/pygw - pip install .[dev] - - - name: Run pytests - run: | - cd $GITHUB_WORKSPACE/global-workflow/ush/python/pygw - pytest -v src/tests diff --git a/README.md b/README.md index 66d54e3d20..6b8fbf589f 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,6 @@ [![Read The Docs Status](https://readthedocs.org/projects/global-workflow/badge/?badge=latest)](http://global-workflow.readthedocs.io/) [![shellnorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml) [![pynorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml) -[![pytests](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml) ![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/hera.json) ![Custom badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/emcbot/e35aa2904a54deae6bbb1fdc2d960c71/raw/orion.json) diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index eb2539753b..2771c442a2 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -46,7 +46,7 @@ if [[ -d global-workflow ]]; then rm -Rf global-workflow fi -git clone "${REPO_URL}" +git clone --recursive "${REPO_URL}" cd global-workflow || exit 1 pr_state=$("${GH}" pr view "${PR}" --json state --jq '.state') From 71741973345697589141bd0184e6c34188cc4ea5 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 3 Jul 2023 09:27:32 -0400 Subject: [PATCH 04/19] update submodules when checkout --- ci/scripts/clone-build_ci.sh | 17 +++++++++-------- docs/source/clone.rst | 2 +- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 2771c442a2..1faed6c956 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -10,7 +10,7 @@ usage() { echo "Usage: $0 -p -d -o -h" echo echo " -p PR nunber to clone and build" - echo " -d Full path of of were to clone and build PR" + echo " -d Full path of of were to clone and build PR" echo " -o Full path to output message file detailing results of CI tests" echo " -h display this message and quit" echo @@ -35,8 +35,7 @@ while getopts "p:d:o:h" opt; do *) echo "Unrecognized option" usage - exit - ;; + ;; esac done @@ -46,18 +45,20 @@ if [[ -d global-workflow ]]; then rm -Rf global-workflow fi -git clone --recursive "${REPO_URL}" +git clone "${REPO_URL}" cd global-workflow || exit 1 +git submodule update --init --recursive pr_state=$("${GH}" pr view "${PR}" --json state --jq '.state') if [[ "${pr_state}" != "OPEN" ]]; then title=$("${GH}" pr view "${PR}" --json title --jq '.title') echo "PR ${title} is no longer open, state is ${pr_state} ... quitting" exit 1 -fi - +fi + # checkout pull request "${GH}" pr checkout "${PR}" --repo "${REPO_URL}" +git submodule update --init --recursive HOMEgfs="${PWD}" source "${HOMEgfs}/ush/detect_machine.sh" @@ -79,7 +80,7 @@ echo "${commit}" > "../commit" # run checkout script cd sorc || exit 1 set +e -./checkout.sh -c -g -u &>> log.checkout +./checkout.sh -c -g -u >> log.checkout checkout_status=$? if [[ ${checkout_status} != 0 ]]; then { @@ -99,7 +100,7 @@ fi source "${HOMEgfs}/ush/module-setup.sh" export BUILD_JOBS=8 rm -rf log.build -./build_all.sh &>> log.build +./build_all.sh >> log.build build_status=$? if [[ ${build_status} != 0 ]]; then diff --git a/docs/source/clone.rst b/docs/source/clone.rst index 2461ccbfb4..a4ef73189b 100644 --- a/docs/source/clone.rst +++ b/docs/source/clone.rst @@ -55,7 +55,7 @@ Clone workflow and component repositories Workflow ******** -There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using the `https` methods. +There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using the `https` methods. (Note the use of `--recursive` to clone submodules of `global-workflow`) :: From f5cdab4471cac3001b8c633d52fe896036003145 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 3 Jul 2023 10:12:54 -0400 Subject: [PATCH 05/19] add .editorconfig to project --- .editorconfig | 29 +++++++++++++++++++++++++++++ ci/scripts/clone-build_ci.sh | 1 - 2 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..02b0658f3d --- /dev/null +++ b/.editorconfig @@ -0,0 +1,29 @@ +# EditorConfig helps developers define and maintain consistent +# coding styles between different editors and IDEs +# editorconfig.org + +root = true + + +[*] +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true +indent_style = space +indent_size = 2 + +[*.py] +indent_size = 4 +max_line_length = 88 + +[*.txt] +indent_style = tab +indent_size = 4 +max_line_length = 79 + +[*.{diff}] +trim_trailing_whitespace = false + +[Makefile] +indent_style = tab diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 1faed6c956..76be6219d2 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -66,7 +66,6 @@ source "${HOMEgfs}/ush/detect_machine.sh" # start output file { echo "Automated global-workflow Testing Results:" - echo '```' echo "Machine: ${MACHINE_ID^}" echo "Start: $(date) on $(hostname)" || true echo "---------------------------------------------------" From 138dd7aec61a6c2387ec7ff3521717c708f4c7cb Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 3 Jul 2023 11:22:05 -0400 Subject: [PATCH 06/19] update submodule pointer to wxflow --- .gitmodules | 2 +- ush/python/wxflow | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 5d860873ed..51f381b0ca 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +1,4 @@ [submodule "ush/python/wxflow"] path = ush/python/wxflow url = https://github.com/noaa-emc/wxflow - branch = feature/wxflow + branch = develop diff --git a/ush/python/wxflow b/ush/python/wxflow index c8cfb83187..1db322ce19 160000 --- a/ush/python/wxflow +++ b/ush/python/wxflow @@ -1 +1 @@ -Subproject commit c8cfb8318751bb7d631bc556a712bbf3c4a58706 +Subproject commit 1db322ce197f004545956389d475d5654d3e0b55 From eacdd83e24337408c2f3b0aac540a4c84e0dd7e3 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Mon, 3 Jul 2023 15:54:26 -0400 Subject: [PATCH 07/19] update ci script after code review --- ci/scripts/clone-build_ci.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 76be6219d2..32fa5dcd1d 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -79,7 +79,7 @@ echo "${commit}" > "../commit" # run checkout script cd sorc || exit 1 set +e -./checkout.sh -c -g -u >> log.checkout +./checkout.sh -c -g -u >> log.checkout 2>&1 checkout_status=$? if [[ ${checkout_status} != 0 ]]; then { @@ -99,7 +99,7 @@ fi source "${HOMEgfs}/ush/module-setup.sh" export BUILD_JOBS=8 rm -rf log.build -./build_all.sh >> log.build +./build_all.sh >> log.build 2>&1 build_status=$? if [[ ${build_status} != 0 ]]; then From 1c0dfa0efa9db7265c4468f5ccf6f8ec5362c9a5 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 13:11:12 -0400 Subject: [PATCH 08/19] remove wxflow git submodule --- .gitmodules | 4 ---- ush/python/wxflow | 1 - 2 files changed, 5 deletions(-) delete mode 160000 ush/python/wxflow diff --git a/.gitmodules b/.gitmodules index 51f381b0ca..e69de29bb2 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,4 +0,0 @@ -[submodule "ush/python/wxflow"] - path = ush/python/wxflow - url = https://github.com/noaa-emc/wxflow - branch = develop diff --git a/ush/python/wxflow b/ush/python/wxflow deleted file mode 160000 index 1db322ce19..0000000000 --- a/ush/python/wxflow +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 1db322ce197f004545956389d475d5654d3e0b55 From 125ccb40e2ba6f06b9ce88001a59cee83e31a665 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 13:11:52 -0400 Subject: [PATCH 09/19] remove .gitmodules --- .gitmodules | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 .gitmodules diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index e69de29bb2..0000000000 From 290afc8c00d0ab76451b12c883aa3ca51b7bea6e Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 14:03:58 -0400 Subject: [PATCH 10/19] checkout and link wxflow like any other external component --- .gitignore | 7 +++++++ Externals.cfg | 7 +++++++ sorc/checkout.sh | 1 + sorc/link_workflow.sh | 10 ++++++++++ 4 files changed, 25 insertions(+) diff --git a/.gitignore b/.gitignore index e31978562a..3eb6853c19 100644 --- a/.gitignore +++ b/.gitignore @@ -142,6 +142,13 @@ ush/make_ntc_bull.pl ush/make_tif.sh ush/month_name.sh ush/imsfv3_scf2ioda.py + # version files versions/build.ver versions/run.ver + +# wxflow checkout and symlinks +sorc/wxflow +ush/python/wxflow +workflow/wxflow +ci/scripts/wxflow diff --git a/Externals.cfg b/Externals.cfg index 5509f4b43d..cb02b5b2bb 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -1,5 +1,12 @@ # External sub-modules of global-workflow +[wxflow] +tag = 528f5ab +local_path = sorc/wxflow +repo_url = https://github.com/NOAA-EMC/wxflow.git +protocol = git +required = True + [UFS] tag = bf60924 local_path = sorc/ufs_model.fd diff --git a/sorc/checkout.sh b/sorc/checkout.sh index b651956189..137f75b944 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -149,6 +149,7 @@ source "${topdir}/../workflow/gw_setup.sh" # The checkout version should always be a speciifc commit (hash or tag), not a branch errs=0 +checkout "wxflow" "https://github.com/NOAA-EMC/wxflow" "528f5ab" ; errs=$((errs + $?)) checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "8965258" ; errs=$((errs + $?)) checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "72a0471" ; errs=$((errs + $?)) checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-bf60924}" ; errs=$((errs + $?)) diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 7dc5c6b5df..3954954ff8 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -82,7 +82,17 @@ if [[ "${RUN_ENVIR}" == "nco" ]]; then LINK="cp -rp" fi +# Link wxflow in ush/python, workflow and ci/scripts +# TODO: This will be unnecessary when wxflow is part of the virtualenv +cd "${top_dir}/ush/python" || exit 1 +${LINK} "${top_dir}/sorc/wxflow/src/wxflow" . +cd "${top_dir}/workflow" || exit 1 +${LINK} "${top_dir}/sorc/wxflow/src/wxflow" . +cd "${top_dir}/ci/scripts" || exit 1 +${LINK} "${top_dir}/sorc/wxflow/src/wxflow" . + # Link post +cd "${top_dir}/sorc" || exit 1 [[ -d upp.fd ]] && rm -rf upp.fd ${LINK} ufs_model.fd/FV3/upp upp.fd From e176bf76187b1fba96db02922c54c40c102de280 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 14:23:51 -0400 Subject: [PATCH 11/19] remove wxflow links --- ci/scripts/wxflow | 1 - sorc/link_workflow.sh | 44 +++++++++++++++++++++---------------------- workflow/wxflow | 1 - 3 files changed, 21 insertions(+), 25 deletions(-) delete mode 120000 ci/scripts/wxflow delete mode 120000 workflow/wxflow diff --git a/ci/scripts/wxflow b/ci/scripts/wxflow deleted file mode 120000 index 36aeb9617b..0000000000 --- a/ci/scripts/wxflow +++ /dev/null @@ -1 +0,0 @@ -../../ush/python/wxflow/src/wxflow \ No newline at end of file diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index 3954954ff8..cc83b63718 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -77,7 +77,6 @@ esac source "${top_dir}/versions/fix.ver" LINK="ln -fs" -SLINK="ln -fs" if [[ "${RUN_ENVIR}" == "nco" ]]; then LINK="cp -rp" fi @@ -85,10 +84,13 @@ fi # Link wxflow in ush/python, workflow and ci/scripts # TODO: This will be unnecessary when wxflow is part of the virtualenv cd "${top_dir}/ush/python" || exit 1 +[[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${top_dir}/sorc/wxflow/src/wxflow" . cd "${top_dir}/workflow" || exit 1 +[[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${top_dir}/sorc/wxflow/src/wxflow" . cd "${top_dir}/ci/scripts" || exit 1 +[[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${top_dir}/sorc/wxflow/src/wxflow" . # Link post @@ -301,70 +303,66 @@ cd "${script_dir}" || exit 8 if [[ -d gsi_enkf.fd ]]; then [[ -d gsi.fd ]] && rm -rf gsi.fd - ${SLINK} gsi_enkf.fd/src/gsi gsi.fd + ${LINK} gsi_enkf.fd/src/gsi gsi.fd [[ -d enkf.fd ]] && rm -rf enkf.fd - ${SLINK} gsi_enkf.fd/src/enkf enkf.fd + ${LINK} gsi_enkf.fd/src/enkf enkf.fd fi if [[ -d gsi_utils.fd ]]; then [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd - ${SLINK} gsi_utils.fd/src/netcdf_io/calc_analysis.fd calc_analysis.fd + ${LINK} gsi_utils.fd/src/netcdf_io/calc_analysis.fd calc_analysis.fd [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd - ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd recentersigp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd recentersigp.fd [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd - ${SLINK} gsi_utils.fd/src/netcdf_io/interp_inc.fd interp_inc.fd + ${LINK} gsi_utils.fd/src/netcdf_io/interp_inc.fd interp_inc.fd fi if [[ -d gsi_monitor.fd ]] ; then [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd - ${SLINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd + ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd - ${SLINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd + ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd - ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd fi [[ -d gfs_ncep_post.fd ]] && rm -rf gfs_ncep_post.fd - ${SLINK} upp.fd/sorc/ncep_post.fd gfs_ncep_post.fd + ${LINK} upp.fd/sorc/ncep_post.fd gfs_ncep_post.fd - for prog in fregrid make_hgrid make_solo_mosaic ; do - [[ -d "${prog}.fd" ]] && rm -rf "${prog}.fd" - ${SLINK} "ufs_utils.fd/sorc/fre-nctools.fd/tools/${prog}" "${prog}.fd" - done for prog in global_cycle.fd \ emcsfc_ice_blend.fd \ emcsfc_snow2mdl.fd ;do [[ -d "${prog}" ]] && rm -rf "${prog}" - ${SLINK} "ufs_utils.fd/sorc/${prog}" "${prog}" + ${LINK} "ufs_utils.fd/sorc/${prog}" "${prog}" done for prog in enkf_chgres_recenter_nc.fd \ diff --git a/workflow/wxflow b/workflow/wxflow deleted file mode 120000 index 556e7ab91a..0000000000 --- a/workflow/wxflow +++ /dev/null @@ -1 +0,0 @@ -../ush/python/wxflow/src/wxflow \ No newline at end of file From b19af2753f20f83c63cbde13a3fcfc4f36c36983 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 15:21:42 -0400 Subject: [PATCH 12/19] import directly from wxflow --- workflow/applications/application_factory.py | 2 +- workflow/applications/applications.py | 2 +- workflow/applications/gefs.py | 2 +- workflow/applications/gfs_cycled.py | 2 +- workflow/applications/gfs_forecast_only.py | 2 +- workflow/hosts.py | 2 +- workflow/rocoto/gefs_xml.py | 2 +- workflow/rocoto/gfs_cycled_xml.py | 2 +- workflow/rocoto/gfs_forecast_only_xml.py | 2 +- workflow/rocoto/rocoto_xml_factory.py | 2 +- workflow/rocoto/tasks.py | 2 +- workflow/rocoto/tasks_factory.py | 2 +- workflow/setup_expt.py | 6 +++--- workflow/setup_xml.py | 2 +- workflow/test_configuration.py | 2 +- 15 files changed, 17 insertions(+), 17 deletions(-) diff --git a/workflow/applications/application_factory.py b/workflow/applications/application_factory.py index 93d2f36551..ff6b6992f4 100644 --- a/workflow/applications/application_factory.py +++ b/workflow/applications/application_factory.py @@ -1,4 +1,4 @@ -from wxflow.factory import Factory +from wxflow import Factory from applications.gfs_cycled import GFSCycledAppConfig from applications.gfs_forecast_only import GFSForecastOnlyAppConfig from applications.gefs import GEFSAppConfig diff --git a/workflow/applications/applications.py b/workflow/applications/applications.py index f2258aa4eb..19c2082dd3 100644 --- a/workflow/applications/applications.py +++ b/workflow/applications/applications.py @@ -2,7 +2,7 @@ from typing import Dict, List, Any from hosts import Host -from wxflow.configuration import Configuration +from wxflow import Configuration from abc import ABC, ABCMeta, abstractmethod __all__ = ['AppConfig'] diff --git a/workflow/applications/gefs.py b/workflow/applications/gefs.py index 18fb4f957c..a46451bd3e 100644 --- a/workflow/applications/gefs.py +++ b/workflow/applications/gefs.py @@ -1,5 +1,5 @@ from applications.applications import AppConfig -from wxflow.configuration import Configuration +from wxflow import Configuration class GEFSAppConfig(AppConfig): diff --git a/workflow/applications/gfs_cycled.py b/workflow/applications/gfs_cycled.py index 9978868df3..3478d563a3 100644 --- a/workflow/applications/gfs_cycled.py +++ b/workflow/applications/gfs_cycled.py @@ -1,6 +1,6 @@ from typing import Dict, Any from applications.applications import AppConfig -from wxflow.configuration import Configuration +from wxflow import Configuration from datetime import timedelta diff --git a/workflow/applications/gfs_forecast_only.py b/workflow/applications/gfs_forecast_only.py index ba14b8cb80..5477e13cc6 100644 --- a/workflow/applications/gfs_forecast_only.py +++ b/workflow/applications/gfs_forecast_only.py @@ -1,5 +1,5 @@ from applications.applications import AppConfig -from wxflow.configuration import Configuration +from wxflow import Configuration class GFSForecastOnlyAppConfig(AppConfig): diff --git a/workflow/hosts.py b/workflow/hosts.py index eeaa75d43e..0b199ba382 100644 --- a/workflow/hosts.py +++ b/workflow/hosts.py @@ -3,7 +3,7 @@ import os from pathlib import Path -from wxflow.yaml_file import YAMLFile +from wxflow import YAMLFile __all__ = ['Host'] diff --git a/workflow/rocoto/gefs_xml.py b/workflow/rocoto/gefs_xml.py index 5f792d9dee..7e8df32991 100644 --- a/workflow/rocoto/gefs_xml.py +++ b/workflow/rocoto/gefs_xml.py @@ -2,7 +2,7 @@ from rocoto.workflow_xml import RocotoXML from applications.applications import AppConfig -from wxflow.timetools import to_timedelta +from wxflow import to_timedelta from typing import Dict diff --git a/workflow/rocoto/gfs_cycled_xml.py b/workflow/rocoto/gfs_cycled_xml.py index c572de25d9..40777bcb8c 100644 --- a/workflow/rocoto/gfs_cycled_xml.py +++ b/workflow/rocoto/gfs_cycled_xml.py @@ -2,7 +2,7 @@ from rocoto.workflow_xml import RocotoXML from applications.applications import AppConfig -from wxflow.timetools import to_timedelta +from wxflow import to_timedelta from typing import Dict diff --git a/workflow/rocoto/gfs_forecast_only_xml.py b/workflow/rocoto/gfs_forecast_only_xml.py index d26d9ad117..7ae4eb34c5 100644 --- a/workflow/rocoto/gfs_forecast_only_xml.py +++ b/workflow/rocoto/gfs_forecast_only_xml.py @@ -2,7 +2,7 @@ from rocoto.workflow_xml import RocotoXML from applications.applications import AppConfig -from wxflow.timetools import to_timedelta +from wxflow import to_timedelta from typing import Dict diff --git a/workflow/rocoto/rocoto_xml_factory.py b/workflow/rocoto/rocoto_xml_factory.py index 9fc7b26d1b..cb2d4c276c 100644 --- a/workflow/rocoto/rocoto_xml_factory.py +++ b/workflow/rocoto/rocoto_xml_factory.py @@ -1,4 +1,4 @@ -from wxflow.factory import Factory +from wxflow import Factory from rocoto.gfs_cycled_xml import GFSCycledRocotoXML from rocoto.gfs_forecast_only_xml import GFSForecastOnlyRocotoXML from rocoto.gefs_xml import GEFSRocotoXML diff --git a/workflow/rocoto/tasks.py b/workflow/rocoto/tasks.py index b07e4c9e4d..d6af01f4f2 100644 --- a/workflow/rocoto/tasks.py +++ b/workflow/rocoto/tasks.py @@ -3,7 +3,7 @@ import numpy as np from applications.applications import AppConfig import rocoto.rocoto as rocoto -from wxflow.template import Template, TemplateConstants +from wxflow import Template, TemplateConstants __all__ = ['Tasks', 'create_wf_task'] diff --git a/workflow/rocoto/tasks_factory.py b/workflow/rocoto/tasks_factory.py index d580dc3b95..38cf0d0bd1 100644 --- a/workflow/rocoto/tasks_factory.py +++ b/workflow/rocoto/tasks_factory.py @@ -1,4 +1,4 @@ -from wxflow.factory import Factory +from wxflow import Factory from rocoto.gfs_tasks import GFSTasks from rocoto.gefs_tasks import GEFSTasks diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py index 1ea132d14a..0557b39f43 100755 --- a/workflow/setup_expt.py +++ b/workflow/setup_expt.py @@ -12,9 +12,9 @@ from hosts import Host -from wxflow.yaml_file import parse_j2yaml -from wxflow.attrdict import AttrDict -from wxflow.timetools import to_datetime, to_timedelta, datetime_to_YMDH +from wxflow import parse_j2yaml +from wxflow import AttrDict +from wxflow import to_datetime, to_timedelta, datetime_to_YMDH _here = os.path.dirname(__file__) diff --git a/workflow/setup_xml.py b/workflow/setup_xml.py index 0d06608d21..56a1464bae 100755 --- a/workflow/setup_xml.py +++ b/workflow/setup_xml.py @@ -8,7 +8,7 @@ from applications.application_factory import app_config_factory from rocoto.rocoto_xml_factory import rocoto_xml_factory -from wxflow.configuration import Configuration +from wxflow import Configuration def input_args(): diff --git a/workflow/test_configuration.py b/workflow/test_configuration.py index 888ec5dd43..32e40c67af 100644 --- a/workflow/test_configuration.py +++ b/workflow/test_configuration.py @@ -1,5 +1,5 @@ import sys -from wxflow.configuration import Configuration +from wxflow import Configuration expdir = sys.argv[1] From 066e14a01d3a679ec760dcda454d4cc03ad250c7 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 15:23:47 -0400 Subject: [PATCH 13/19] ci: import directly from wxflow --- ci/scripts/create_experiment.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ci/scripts/create_experiment.py b/ci/scripts/create_experiment.py index 4ca0ee7fa8..d2e6be075e 100755 --- a/ci/scripts/create_experiment.py +++ b/ci/scripts/create_experiment.py @@ -22,9 +22,9 @@ from pathlib import Path -from wxflow.yaml_file import YAMLFile -from wxflow.logger import Logger -from wxflow.executable import Executable +from wxflow import YAMLFile +from wxflow import Logger +from wxflow import Executable from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter From 52b9b3a81b85a87c21585d22e2c6658f136b00bd Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 15:44:37 -0400 Subject: [PATCH 14/19] scripts/ush/python/pygfs: import directly from wxflow --- scripts/exglobal_aero_analysis_finalize.py | 3 +-- scripts/exglobal_aero_analysis_initialize.py | 3 +-- scripts/exglobal_aero_analysis_run.py | 3 +-- scripts/exglobal_atm_analysis_finalize.py | 3 +-- scripts/exglobal_atm_analysis_initialize.py | 3 +-- scripts/exglobal_atm_analysis_run.py | 3 +-- scripts/exglobal_atmens_analysis_finalize.py | 3 +-- .../exglobal_atmens_analysis_initialize.py | 3 +-- scripts/exglobal_atmens_analysis_run.py | 3 +-- scripts/exglobal_atmos_upp.py | 4 +--- scripts/exglobal_forecast.py | 4 +--- scripts/exglobal_land_analysis.py | 3 +-- scripts/exglobal_prep_land_obs.py | 3 +-- ush/python/pygfs/task/aero_analysis.py | 18 +++++++-------- ush/python/pygfs/task/analysis.py | 9 ++------ ush/python/pygfs/task/atm_analysis.py | 16 +++++++------- ush/python/pygfs/task/atmens_analysis.py | 22 +++++++++---------- ush/python/pygfs/task/gfs_forecast.py | 3 +-- ush/python/pygfs/task/land_analysis.py | 18 +++++++-------- ush/python/pygfs/task/upp.py | 18 +++++++-------- ush/python/pygfs/ufswm/gfs.py | 2 +- ush/python/pygfs/ufswm/ufs.py | 3 +-- 22 files changed, 64 insertions(+), 86 deletions(-) diff --git a/scripts/exglobal_aero_analysis_finalize.py b/scripts/exglobal_aero_analysis_finalize.py index 595f069a76..e9464b47e5 100755 --- a/scripts/exglobal_aero_analysis_finalize.py +++ b/scripts/exglobal_aero_analysis_finalize.py @@ -6,8 +6,7 @@ # for a global aerosol variational analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis diff --git a/scripts/exglobal_aero_analysis_initialize.py b/scripts/exglobal_aero_analysis_initialize.py index 3b2024265b..3a57dc8401 100755 --- a/scripts/exglobal_aero_analysis_initialize.py +++ b/scripts/exglobal_aero_analysis_initialize.py @@ -7,8 +7,7 @@ # for a global aerosol variational analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis # Initialize root logger diff --git a/scripts/exglobal_aero_analysis_run.py b/scripts/exglobal_aero_analysis_run.py index d67f75e969..85f4b963a4 100755 --- a/scripts/exglobal_aero_analysis_run.py +++ b/scripts/exglobal_aero_analysis_run.py @@ -5,8 +5,7 @@ # which executes the global aerosol variational analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.aero_analysis import AerosolAnalysis # Initialize root logger diff --git a/scripts/exglobal_atm_analysis_finalize.py b/scripts/exglobal_atm_analysis_finalize.py index 0e0d0f5258..3f4313631c 100755 --- a/scripts/exglobal_atm_analysis_finalize.py +++ b/scripts/exglobal_atm_analysis_finalize.py @@ -6,8 +6,7 @@ # for a global atm variational analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py index b93123f70d..1793b24b0b 100755 --- a/scripts/exglobal_atm_analysis_initialize.py +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -7,8 +7,7 @@ # for a global atm variational analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis # Initialize root logger diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_run.py index 946a705543..8adbe4a267 100755 --- a/scripts/exglobal_atm_analysis_run.py +++ b/scripts/exglobal_atm_analysis_run.py @@ -5,8 +5,7 @@ # which executes the global atm variational analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atm_analysis import AtmAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py index c857e0763a..b49cb3c413 100755 --- a/scripts/exglobal_atmens_analysis_finalize.py +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -6,8 +6,7 @@ # for a global atm local ensemble analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py index d08956bdb8..1d578b44f2 100755 --- a/scripts/exglobal_atmens_analysis_initialize.py +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -7,8 +7,7 @@ # for a global atm local ensemble analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_run.py index 384627de02..b2eb9fb2e4 100755 --- a/scripts/exglobal_atmens_analysis_run.py +++ b/scripts/exglobal_atmens_analysis_run.py @@ -5,8 +5,7 @@ # which executes the global atm local ensemble analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.atmens_analysis import AtmEnsAnalysis # Initialize root logger diff --git a/scripts/exglobal_atmos_upp.py b/scripts/exglobal_atmos_upp.py index eb405d0058..6cdbc1bc51 100755 --- a/scripts/exglobal_atmos_upp.py +++ b/scripts/exglobal_atmos_upp.py @@ -2,9 +2,7 @@ import os -from wxflow.attrdict import AttrDict -from wxflow.logger import Logger, logit -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import AttrDict, Logger, logit, cast_strdict_as_dtypedict from pygfs.task.upp import UPP # initialize root logger diff --git a/scripts/exglobal_forecast.py b/scripts/exglobal_forecast.py index 7d081bc2cb..c2ea9495fc 100755 --- a/scripts/exglobal_forecast.py +++ b/scripts/exglobal_forecast.py @@ -2,9 +2,7 @@ import os -from wxflow.logger import Logger, logit -from wxflow.yaml_file import save_as_yaml -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, logit, save_as_yaml, cast_strdict_as_dtypedict from pygfs.task.gfs_forecast import GFSForecast # initialize root logger diff --git a/scripts/exglobal_land_analysis.py b/scripts/exglobal_land_analysis.py index c7bf9f2de2..70141475b0 100755 --- a/scripts/exglobal_land_analysis.py +++ b/scripts/exglobal_land_analysis.py @@ -5,8 +5,7 @@ # for a global Land Snow Depth analysis import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.land_analysis import LandAnalysis # Initialize root logger diff --git a/scripts/exglobal_prep_land_obs.py b/scripts/exglobal_prep_land_obs.py index 19b939c193..829d75c4b1 100755 --- a/scripts/exglobal_prep_land_obs.py +++ b/scripts/exglobal_prep_land_obs.py @@ -5,8 +5,7 @@ # which perform the pre-processing for IMS data import os -from wxflow.logger import Logger -from wxflow.configuration import cast_strdict_as_dtypedict +from wxflow import Logger, cast_strdict_as_dtypedict from pygfs.task.land_analysis import LandAnalysis diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py index 5dc94240b8..2955baa069 100644 --- a/ush/python/pygfs/task/aero_analysis.py +++ b/ush/python/pygfs/task/aero_analysis.py @@ -7,15 +7,15 @@ from logging import getLogger from typing import Dict, List, Any -from wxflow.attrdict import AttrDict -from wxflow.file_utils import FileHandler -from wxflow.timetools import add_to_datetime, to_fv3time, to_timedelta -from wxflow.fsutils import rm_p, chdir -from wxflow.timetools import to_fv3time -from wxflow.yaml_file import YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml -from wxflow.logger import logit -from wxflow.executable import Executable -from wxflow.exceptions import WorkflowException +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_fv3time, to_timedelta, + chdir, + to_fv3time, + YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml, + logit, + Executable, + WorkflowException) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py index 8bd7da8ac9..94c1413283 100644 --- a/ush/python/pygfs/task/analysis.py +++ b/ush/python/pygfs/task/analysis.py @@ -7,13 +7,8 @@ from netCDF4 import Dataset from typing import List, Dict, Any, Union -from wxflow.yaml_file import parse_j2yaml -from wxflow.file_utils import FileHandler -from wxflow.fsutils import rm_p -from wxflow.logger import logit -from wxflow.task import Task -from wxflow.executable import Executable -from wxflow.exceptions import WorkflowException +from wxflow import (parse_j2yaml, FileHandler, rm_p, logit, + Task, Executable, WorkflowException) logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py index 477f5032cc..6a24a5c0a9 100644 --- a/ush/python/pygfs/task/atm_analysis.py +++ b/ush/python/pygfs/task/atm_analysis.py @@ -7,14 +7,14 @@ from logging import getLogger from typing import Dict, List, Any -from wxflow.attrdict import AttrDict -from wxflow.file_utils import FileHandler -from wxflow.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH -from wxflow.fsutils import rm_p, chdir -from wxflow.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml -from wxflow.logger import logit -from wxflow.executable import Executable -from wxflow.exceptions import WorkflowException +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_fv3time, to_timedelta, to_YMDH, + chdir, + parse_yamltmpl, parse_j2yaml, save_as_yaml, + logit, + Executable, + WorkflowException) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py index 510baa5ab7..bfddf30d47 100644 --- a/ush/python/pygfs/task/atmens_analysis.py +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -5,17 +5,17 @@ import gzip import tarfile from logging import getLogger -from typing import Dict, List, Any - -from wxflow.attrdict import AttrDict -from wxflow.file_utils import FileHandler -from wxflow.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD -from wxflow.fsutils import rm_p, chdir -from wxflow.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml -from wxflow.logger import logit -from wxflow.executable import Executable -from wxflow.exceptions import WorkflowException -from wxflow.template import Template, TemplateConstants +from typing import Dict, List + +from wxflow import (AttrDict, + FileHandler, + add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD, + chdir, + parse_yamltmpl, parse_j2yaml, save_as_yaml, + logit, + Executable, + WorkflowException, + Template, TemplateConstants) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/gfs_forecast.py b/ush/python/pygfs/task/gfs_forecast.py index fc3876176f..1aa550fd62 100644 --- a/ush/python/pygfs/task/gfs_forecast.py +++ b/ush/python/pygfs/task/gfs_forecast.py @@ -2,8 +2,7 @@ import logging from typing import Dict, Any -from wxflow.logger import logit -from wxflow.task import Task +from wxflow import logit, Task from pygfs.ufswm.gfs import GFS logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/land_analysis.py b/ush/python/pygfs/task/land_analysis.py index 60f2e90f10..5439679db6 100644 --- a/ush/python/pygfs/task/land_analysis.py +++ b/ush/python/pygfs/task/land_analysis.py @@ -7,15 +7,15 @@ import numpy as np from netCDF4 import Dataset -from wxflow.attrdict import AttrDict -from wxflow.file_utils import FileHandler -from wxflow.timetools import to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime -from wxflow.fsutils import rm_p -from wxflow.yaml_file import parse_j2yaml, parse_yamltmpl, save_as_yaml -from wxflow.jinja import Jinja -from wxflow.logger import logit -from wxflow.executable import Executable -from wxflow.exceptions import WorkflowException +from wxflow import (AttrDict, + FileHandler, + to_fv3time, to_YMD, to_YMDH, to_timedelta, add_to_datetime, + rm_p, + parse_j2yaml, parse_yamltmpl, save_as_yaml, + Jinja, + logit, + Executable, + WorkflowException) from pygfs.task.analysis import Analysis logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/task/upp.py b/ush/python/pygfs/task/upp.py index faa1ccaf0a..b7127483e7 100644 --- a/ush/python/pygfs/task/upp.py +++ b/ush/python/pygfs/task/upp.py @@ -5,15 +5,15 @@ from typing import Dict, Any, Union from pprint import pformat -from wxflow.attrdict import AttrDict -from wxflow.yaml_file import parse_j2yaml -from wxflow.file_utils import FileHandler -from wxflow.jinja import Jinja -from wxflow.logger import logit -from wxflow.task import Task -from wxflow.timetools import add_to_datetime, to_timedelta -from wxflow.exceptions import WorkflowException -from wxflow.executable import Executable, which +from wxflow import (AttrDict, + parse_j2yaml, + FileHandler, + Jinja, + logit, + Task, + add_to_datetime, to_timedelta, + WorkflowException, + Executable, which) logger = getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/ufswm/gfs.py b/ush/python/pygfs/ufswm/gfs.py index fa1e271d92..2ed6cd0c08 100644 --- a/ush/python/pygfs/ufswm/gfs.py +++ b/ush/python/pygfs/ufswm/gfs.py @@ -1,7 +1,7 @@ import copy import logging -from wxflow.logger import logit +from wxflow import logit from pygfs.ufswm.ufs import UFS logger = logging.getLogger(__name__.split('.')[-1]) diff --git a/ush/python/pygfs/ufswm/ufs.py b/ush/python/pygfs/ufswm/ufs.py index 7ffbcd1a05..e9836e0b75 100644 --- a/ush/python/pygfs/ufswm/ufs.py +++ b/ush/python/pygfs/ufswm/ufs.py @@ -3,8 +3,7 @@ import logging from typing import Dict, Any -from wxflow.template import Template, TemplateConstants -from wxflow.logger import logit +from wxflow import logit, Template, TemplateConstants logger = logging.getLogger(__name__.split('.')[-1]) From 232efaa92cde8b508d88491f7fc5ebd009c0989b Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 16:38:01 -0400 Subject: [PATCH 15/19] remove submodule from ci --- ci/scripts/clone-build_ci.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index 32fa5dcd1d..a76131a609 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -47,7 +47,6 @@ fi git clone "${REPO_URL}" cd global-workflow || exit 1 -git submodule update --init --recursive pr_state=$("${GH}" pr view "${PR}" --json state --jq '.state') if [[ "${pr_state}" != "OPEN" ]]; then @@ -58,7 +57,6 @@ fi # checkout pull request "${GH}" pr checkout "${PR}" --repo "${REPO_URL}" -git submodule update --init --recursive HOMEgfs="${PWD}" source "${HOMEgfs}/ush/detect_machine.sh" From d431f6ae730271e6db1e22a055d3a3279067a6c4 Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 16:41:59 -0400 Subject: [PATCH 16/19] update docs to remove referecnes to submodule --- docs/source/clone.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/source/clone.rst b/docs/source/clone.rst index a4ef73189b..3b2d555520 100644 --- a/docs/source/clone.rst +++ b/docs/source/clone.rst @@ -11,11 +11,11 @@ Quick clone/build/link instructions (more detailed instructions below). .. note:: Here we are making the assumption that you are using the workflow to run an experiment and so are working from the authoritative repository. If you are using a development branch then follow the instructions in :doc:`development.rst`. Once you do that you can follow the instructions here with the only difference being the repository/fork you are cloning from. -Clone the `global-workflow` (note the use of `--recursive`) and `cd` into the `sorc` directory: +Clone the `global-workflow` and `cd` into the `sorc` directory: :: - git clone --recursive https://github.com/NOAA-EMC/global-workflow + git clone https://github.com/NOAA-EMC/global-workflow cd global-workflow/sorc For forecast-only (coupled or uncoupled) checkout the components: @@ -55,11 +55,11 @@ Clone workflow and component repositories Workflow ******** -There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using the `https` methods. (Note the use of `--recursive` to clone submodules of `global-workflow`) +There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using the `https` method. :: - git clone --recursive https://github.com/NOAA-EMC/global-workflow + git clone https://github.com/NOAA-EMC/global-workflow Check what you just cloned (by default you will have only the `develop` branch): From 3981747dfa7919efb71d6915e5fd546e43ab67ab Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 16:59:17 -0400 Subject: [PATCH 17/19] fix pycodestyle errors --- .github/workflows/pynorms.yaml | 2 +- .pycodestyle | 2 +- ci/scripts/create_experiment.py | 8 +++----- ush/gsi_utils.py | 1 + 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pynorms.yaml b/.github/workflows/pynorms.yaml index 7f823f8318..6ea99b59ed 100644 --- a/.github/workflows/pynorms.yaml +++ b/.github/workflows/pynorms.yaml @@ -21,4 +21,4 @@ jobs: - name: Run pycodestyle run: | cd $GITHUB_WORKSPACE/global-workflow - pycodestyle -v --config ./.pycodestyle --exclude='.git,.github' ./ + pycodestyle -v --config ./.pycodestyle ./ diff --git a/.pycodestyle b/.pycodestyle index 8bd18fa9d7..48b90f6208 100644 --- a/.pycodestyle +++ b/.pycodestyle @@ -3,4 +3,4 @@ count = False ignore = E402,W504 max-line-length = 160 statistics = True -exclude = Experimental +exclude = Experimental,.git,.vscode,*.fd,*.cd diff --git a/ci/scripts/create_experiment.py b/ci/scripts/create_experiment.py index d2e6be075e..c8b95aaf0e 100755 --- a/ci/scripts/create_experiment.py +++ b/ci/scripts/create_experiment.py @@ -20,18 +20,16 @@ with an error code of 0 upon success. """ +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter from pathlib import Path -from wxflow import YAMLFile -from wxflow import Logger -from wxflow import Executable - -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +from wxflow import YAMLFile, Logger, logit, Executable logger = Logger(level='DEBUG', colored_log=True) +@logit(logger) def input_args(): """ Method to collect user arguments for `create_experiment.py` diff --git a/ush/gsi_utils.py b/ush/gsi_utils.py index 97d66e8ace..94a2ae1348 100644 --- a/ush/gsi_utils.py +++ b/ush/gsi_utils.py @@ -2,6 +2,7 @@ # a collection of functions, classes, etc. # used for the GSI global analysis + def isTrue(str_in): """ isTrue(str_in) - function to translate shell variables to python logical variables From 9126d5514b06bbd903fcc0b42fe3ab73003762ef Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 17:23:19 -0400 Subject: [PATCH 18/19] link and copy business --- sorc/link_workflow.sh | 283 +++++++++++++++++++++--------------------- 1 file changed, 143 insertions(+), 140 deletions(-) diff --git a/sorc/link_workflow.sh b/sorc/link_workflow.sh index cc83b63718..554b2d568a 100755 --- a/sorc/link_workflow.sh +++ b/sorc/link_workflow.sh @@ -76,11 +76,15 @@ esac # Source fix version file source "${top_dir}/versions/fix.ver" +# LINK is always ln, LINK_OR_COPY can be ln or cp depending on RUN_EVNVIR being emc or nco, respectively LINK="ln -fs" if [[ "${RUN_ENVIR}" == "nco" ]]; then - LINK="cp -rp" + LINK_OR_COPY="cp -rp" +else + LINK_OR_COPY="ln -fs" fi + # Link wxflow in ush/python, workflow and ci/scripts # TODO: This will be unnecessary when wxflow is part of the virtualenv cd "${top_dir}/ush/python" || exit 1 @@ -93,11 +97,8 @@ cd "${top_dir}/ci/scripts" || exit 1 [[ -s "wxflow" ]] && rm -f wxflow ${LINK} "${top_dir}/sorc/wxflow/src/wxflow" . -# Link post -cd "${top_dir}/sorc" || exit 1 -[[ -d upp.fd ]] && rm -rf upp.fd -${LINK} ufs_model.fd/FV3/upp upp.fd +# Link fix directories if [[ -n "${FIX_DIR}" ]]; then if [[ ! -d "${top_dir}/fix" ]]; then mkdir "${top_dir}/fix" || exit 1; fi fi @@ -117,13 +118,13 @@ for dir in aer \ ugwd \ verif \ wave - do - if [[ -d "${dir}" ]]; then - [[ "${RUN_ENVIR}" == "nco" ]] && chmod -R 755 "${dir}" - rm -rf "${dir}" - fi - fix_ver="${dir}_ver" - ${LINK} "${FIX_DIR}/${dir}/${!fix_ver}" "${dir}" +do + if [[ -d "${dir}" ]]; then + [[ "${RUN_ENVIR}" == "nco" ]] && chmod -R 755 "${dir}" + rm -rf "${dir}" + fi + fix_ver="${dir}_ver" + ${LINK_OR_COPY} "${FIX_DIR}/${dir}/${!fix_ver}" "${dir}" done @@ -137,41 +138,42 @@ fi #--add files from external repositories #--------------------------------------- cd "${top_dir}/parm/post" || exit 1 - for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ - postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ - postxconfig-NT-GFS-GOES.txt postxconfig-NT-GFS-TWO.txt \ - postxconfig-NT-GFS.txt postxconfig-NT-gefs-aerosol.txt postxconfig-NT-gefs-chem.txt params_grib2_tbl_new \ - post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat \ - AEROSOL_LUTS.dat optics_luts_DUST.dat optics_luts_SALT.dat optics_luts_SOOT.dat optics_luts_SUSO.dat optics_luts_WASO.dat \ - ; do - ${LINK} "${script_dir}/upp.fd/parm/${file}" . - done +for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ + postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ + postxconfig-NT-GFS-GOES.txt postxconfig-NT-GFS-TWO.txt \ + postxconfig-NT-GFS.txt postxconfig-NT-gefs-aerosol.txt postxconfig-NT-gefs-chem.txt params_grib2_tbl_new \ + post_tag_gfs128 post_tag_gfs65 nam_micro_lookup.dat \ + AEROSOL_LUTS.dat optics_luts_DUST.dat optics_luts_SALT.dat optics_luts_SOOT.dat optics_luts_SUSO.dat optics_luts_WASO.dat +do + ${LINK_OR_COPY} "${script_dir}/upp.fd/parm/${file}" . +done cd "${top_dir}/scripts" || exit 8 - ${LINK} "${script_dir}/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" . +${LINK_OR_COPY} "${script_dir}/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" . cd "${top_dir}/ush" || exit 8 - for file in emcsfc_ice_blend.sh fv3gfs_driver_grid.sh fv3gfs_make_orog.sh global_cycle_driver.sh \ - emcsfc_snow.sh fv3gfs_filter_topo.sh global_cycle.sh fv3gfs_make_grid.sh ; do - ${LINK} "${script_dir}/ufs_utils.fd/ush/${file}" . - done - for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do - ${LINK} "${script_dir}/gfs_utils.fd/ush/${file}" . - done +for file in emcsfc_ice_blend.sh fv3gfs_driver_grid.sh fv3gfs_make_orog.sh global_cycle_driver.sh \ + emcsfc_snow.sh fv3gfs_filter_topo.sh global_cycle.sh fv3gfs_make_grid.sh +do + ${LINK_OR_COPY} "${script_dir}/ufs_utils.fd/ush/${file}" . +done +for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do + ${LINK_OR_COPY} "${script_dir}/gfs_utils.fd/ush/${file}" . +done #------------------------------ #--add GDASApp fix directory #------------------------------ if [[ -d "${script_dir}/gdas.cd" ]]; then cd "${top_dir}/fix" || exit 1 - [[ ! -d gdas ]] && mkdir -p gdas - cd gdas || exit 1 - for gdas_sub in crtm fv3jedi gsibec; do - if [[ -d "${gdas_sub}" ]]; then - rm -rf "${gdas_sub}" - fi - fix_ver="gdas_${gdas_sub}_ver" - ${LINK} "${FIX_DIR}/gdas/${gdas_sub}/${!fix_ver}" "${gdas_sub}" - done + [[ ! -d gdas ]] && mkdir -p gdas + cd gdas || exit 1 + for gdas_sub in crtm fv3jedi gsibec; do + if [[ -d "${gdas_sub}" ]]; then + rm -rf "${gdas_sub}" + fi + fix_ver="gdas_${gdas_sub}_ver" + ${LINK_OR_COPY} "${FIX_DIR}/gdas/${gdas_sub}/${!fix_ver}" "${gdas_sub}" + done fi #------------------------------ @@ -179,9 +181,9 @@ fi #------------------------------ if [[ -d "${script_dir}/gdas.cd" ]]; then cd "${top_dir}/ush" || exit 1 - ${LINK} "${script_dir}/gdas.cd/ush/ufsda" . - ${LINK} "${script_dir}/gdas.cd/ush/jediinc2fv3.py" . - ${LINK} "${script_dir}/gdas.cd/build/bin/imsfv3_scf2ioda.py" . + ${LINK_OR_COPY} "${script_dir}/gdas.cd/ush/ufsda" . + ${LINK_OR_COPY} "${script_dir}/gdas.cd/ush/jediinc2fv3.py" . + ${LINK_OR_COPY} "${script_dir}/gdas.cd/build/bin/imsfv3_scf2ioda.py" . fi @@ -191,24 +193,24 @@ fi if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then cd "${top_dir}/fix" || exit 1 - [[ ! -d gdas ]] && ( mkdir -p gdas || exit 1 ) - cd gdas || exit 1 - ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt" . + [[ ! -d gdas ]] && ( mkdir -p gdas || exit 1 ) + cd gdas || exit 1 + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt" . cd "${top_dir}/parm" || exit 1 - [[ -d mon ]] && rm -rf mon - mkdir -p mon - cd mon || exit 1 - ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" da_mon.parm - # ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/parm/gdas_minmon.parm" . - # ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm" . - ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm" . - # ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" . + [[ -d mon ]] && rm -rf mon + mkdir -p mon + cd mon || exit 1 + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" da_mon.parm + # ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/parm/gdas_minmon.parm" . + # ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm" . + # ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" . fi #------------------------------ @@ -220,28 +222,28 @@ cd "${top_dir}/exec" || exit 1 for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x supvit.x syndat_getjtbul.x \ syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x overgridid.x \ - mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x reg2grb2.x ; do - [[ -s "${utilexe}" ]] && rm -f "${utilexe}" - ${LINK} "${script_dir}/gfs_utils.fd/install/bin/${utilexe}" . + mkgfsawps.x enkf_chgres_recenter_nc.x tave.x vint.x reg2grb2.x +do + [[ -s "${utilexe}" ]] && rm -f "${utilexe}" + ${LINK_OR_COPY} "${script_dir}/gfs_utils.fd/install/bin/${utilexe}" . done [[ -s "ufs_model.x" ]] && rm -f ufs_model.x -${LINK} "${script_dir}/ufs_model.fd/tests/ufs_model.x" . +${LINK_OR_COPY} "${script_dir}/ufs_model.fd/tests/ufs_model.x" . [[ -s "upp.x" ]] && rm -f upp.x -${LINK} "${script_dir}/upp.fd/exec/upp.x" . +${LINK_OR_COPY} "${script_dir}/upp.fd/exec/upp.x" . -for ufs_utilsexe in \ - emcsfc_ice_blend emcsfc_snow2mdl global_cycle ; do +for ufs_utilsexe in emcsfc_ice_blend emcsfc_snow2mdl global_cycle; do [[ -s "${ufs_utilsexe}" ]] && rm -f "${ufs_utilsexe}" - ${LINK} "${script_dir}/ufs_utils.fd/exec/${ufs_utilsexe}" . + ${LINK_OR_COPY} "${script_dir}/ufs_utils.fd/exec/${ufs_utilsexe}" . done # GSI if [[ -d "${script_dir}/gsi_enkf.fd" ]]; then for gsiexe in enkf.x gsi.x; do [[ -s "${gsiexe}" ]] && rm -f "${gsiexe}" - ${LINK} "${script_dir}/gsi_enkf.fd/install/bin/${gsiexe}" . + ${LINK_OR_COPY} "${script_dir}/gsi_enkf.fd/install/bin/${gsiexe}" . done fi @@ -249,18 +251,20 @@ fi if [[ -d "${script_dir}/gsi_utils.fd" ]]; then for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x \ - interp_inc.x recentersigp.x;do + interp_inc.x recentersigp.x + do [[ -s "${exe}" ]] && rm -f "${exe}" - ${LINK} "${script_dir}/gsi_utils.fd/install/bin/${exe}" . + ${LINK_OR_COPY} "${script_dir}/gsi_utils.fd/install/bin/${exe}" . done fi # GSI Monitor if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ - radmon_bcoef.x radmon_bcor.x radmon_time.x; do + radmon_bcoef.x radmon_bcor.x radmon_time.x + do [[ -s "${exe}" ]] && rm -f "${exe}" - ${LINK} "${script_dir}/gsi_monitor.fd/install/bin/${exe}" . + ${LINK_OR_COPY} "${script_dir}/gsi_monitor.fd/install/bin/${exe}" . done fi @@ -292,7 +296,7 @@ if [[ -d "${script_dir}/gdas.cd" ]]; then "apply_incr.exe" ) for gdasexe in "${JEDI_EXE[@]}"; do [[ -s "${gdasexe}" ]] && rm -f "${gdasexe}" - ${LINK} "${script_dir}/gdas.cd/build/bin/${gdasexe}" . + ${LINK_OR_COPY} "${script_dir}/gdas.cd/build/bin/${gdasexe}" . done fi @@ -300,91 +304,90 @@ fi #--link source code directories #------------------------------ cd "${script_dir}" || exit 8 +if [[ -d ufs_model.fd ]]; then + [[ -d upp.fd ]] && rm -rf upp.fd + ${LINK} ufs_model.fd/FV3/upp upp.fd +fi - if [[ -d gsi_enkf.fd ]]; then - [[ -d gsi.fd ]] && rm -rf gsi.fd - ${LINK} gsi_enkf.fd/src/gsi gsi.fd +if [[ -d gsi_enkf.fd ]]; then + [[ -d gsi.fd ]] && rm -rf gsi.fd + ${LINK} gsi_enkf.fd/src/gsi gsi.fd - [[ -d enkf.fd ]] && rm -rf enkf.fd - ${LINK} gsi_enkf.fd/src/enkf enkf.fd - fi + [[ -d enkf.fd ]] && rm -rf enkf.fd + ${LINK} gsi_enkf.fd/src/enkf enkf.fd +fi - if [[ -d gsi_utils.fd ]]; then - [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd - ${LINK} gsi_utils.fd/src/netcdf_io/calc_analysis.fd calc_analysis.fd +if [[ -d gsi_utils.fd ]]; then + [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd + ${LINK} gsi_utils.fd/src/netcdf_io/calc_analysis.fd . - [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd - ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd + [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd . - [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd - ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd + [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd . - [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd - ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd + [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd . - [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd - ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd + [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd . - [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd - ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd + [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd . - [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd - ${LINK} gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd recentersigp.fd + [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd + ${LINK} gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd . - [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd - ${LINK} gsi_utils.fd/src/netcdf_io/interp_inc.fd interp_inc.fd - fi + [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd + ${LINK} gsi_utils.fd/src/netcdf_io/interp_inc.fd . +fi - if [[ -d gsi_monitor.fd ]] ; then - [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd - ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd +if [[ -d gsi_monitor.fd ]] ; then + [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd + ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd . - [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd - ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd + [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd + ${LINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd . - [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd - ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd + [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd - [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd - ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd + [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd - [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd - ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd + [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd - [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd - ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd - fi + [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd + ${LINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd +fi + +for prog in global_cycle.fd emcsfc_ice_blend.fd emcsfc_snow2mdl.fd ;do + [[ -d "${prog}" ]] && rm -rf "${prog}" + ${LINK} "ufs_utils.fd/sorc/${prog}" "${prog}" +done - [[ -d gfs_ncep_post.fd ]] && rm -rf gfs_ncep_post.fd - ${LINK} upp.fd/sorc/ncep_post.fd gfs_ncep_post.fd - - for prog in global_cycle.fd \ - emcsfc_ice_blend.fd \ - emcsfc_snow2mdl.fd ;do - [[ -d "${prog}" ]] && rm -rf "${prog}" - ${LINK} "ufs_utils.fd/sorc/${prog}" "${prog}" - done - - for prog in enkf_chgres_recenter_nc.fd \ - fbwndgfs.fd \ - gaussian_sfcanl.fd \ - gfs_bufr.fd \ - mkgfsawps.fd \ - overgridid.fd \ - rdbfmsua.fd \ - reg2grb2.fd \ - supvit.fd \ - syndat_getjtbul.fd \ - syndat_maksynrc.fd \ - syndat_qctropcy.fd \ - tave.fd \ - tocsbufr.fd \ - vint.fd \ - webtitle.fd - do - if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi - ${LINK} "gfs_utils.fd/src/${prog}" . - done +for prog in enkf_chgres_recenter_nc.fd \ + fbwndgfs.fd \ + gaussian_sfcanl.fd \ + gfs_bufr.fd \ + mkgfsawps.fd \ + overgridid.fd \ + rdbfmsua.fd \ + reg2grb2.fd \ + supvit.fd \ + syndat_getjtbul.fd \ + syndat_maksynrc.fd \ + syndat_qctropcy.fd \ + tave.fd \ + tocsbufr.fd \ + vint.fd \ + webtitle.fd +do + if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi + ${LINK_OR_COPY} "gfs_utils.fd/src/${prog}" . +done echo "${BASH_SOURCE[0]} completed successfully" From 3fe6f4ebc211b31bd643101cda20a53f1566b23c Mon Sep 17 00:00:00 2001 From: Rahul Mahajan Date: Fri, 7 Jul 2023 17:50:05 -0400 Subject: [PATCH 19/19] put back tic tic tic in ci script --- ci/scripts/clone-build_ci.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh index a76131a609..2adab196e0 100755 --- a/ci/scripts/clone-build_ci.sh +++ b/ci/scripts/clone-build_ci.sh @@ -64,6 +64,7 @@ source "${HOMEgfs}/ush/detect_machine.sh" # start output file { echo "Automated global-workflow Testing Results:" + echo '```' echo "Machine: ${MACHINE_ID^}" echo "Start: $(date) on $(hostname)" || true echo "---------------------------------------------------"