From f093a6ab2e0b101d01f8e2fdb51b94432fa69c68 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 2 Jan 2024 10:57:43 -0700 Subject: [PATCH 001/161] start with basics --- git_setup | 91 +++++++++++++++++++++++++++++++++++++++++++++ git_sparse_checkout | 69 ++++++++++++++++++++++++++++++++++ show-tags | 20 ++++++++++ 3 files changed, 180 insertions(+) create mode 100755 git_setup create mode 100755 git_sparse_checkout create mode 100755 show-tags diff --git a/git_setup b/git_setup new file mode 100755 index 0000000000..52901897d5 --- /dev/null +++ b/git_setup @@ -0,0 +1,91 @@ +#!/bin/bash +# Script to fetch submodules for CAM +# if argument -internal-only is supplied then only checkout CAM internal submodules, otherwise checkout everything needed for +# cam as a top level repository +# +set -e +script="install" +function usage { + echo -e "\nusage: $script [-i/--internal-only]\n" +} +# Set default arguments +internal_only=0 + +# Process arguments +while [ "$1" != "" ]; +do + case $1 in + + # Only checkout CISM internal submodules + -i | -internal-only | --internal-only) + internal_only=1 + ;; + + *) + echo "$script: illegal option $1" + usage + exit 1 + ;; + esac + shift +done + +######################## + +# Start with sparse checkouts +if ! test -f bash-scripts/bin/git_sparse_checkout; then + echo "Getting git_sparse_checkout script" + git clone -n https://github.com/frgomes/bash-scripts --depth 1 + pushd bash-scripts 1>/dev/null + git checkout HEAD -- bin/git_sparse_checkout + popd 1>/dev/null +fi + + + + + + +./bash-scripts/bin/git_sparse_checkout https://github.com/larson-group/clubb_release clubb_4ncar_20221129_59cb19f_branch src/physics/clubb -- src/CLUBB_core/ src/SILHS/ +git restore src/physics/clubb + +./bash-scripts/bin/git_sparse_checkout https://github.com/CFMIP/COSPv2.0 master src/physics/cosp2/src -- src/ +git restore src/physics/cosp2/src + +./bash-scripts/bin/git_sparse_checkout https://github.com/MPAS-Dev/MPAS-Model.git develop src/dynamics/mpas/dycore -- src/external/ src/operators/ src/tools/ \ + src/core_atmosphere/ src/framework/ +git restore src/dynamics/mpas/dycore + +submodules=('chem_proc' 'src/physics/carma/base' 'src/physics/pumas' 'src/physics/pumas-frozen' 'src/physics/ali_arms' 'src/atmos_phys' 'src/dynamics/fv3/atmos_cubed_sphere' 'src/hemco') +for mod in "${submodules[@]}" +do + echo "Initializing $mod" + git submodule update --init $mod +done + +if [ ${internal_only} -eq 1 ] + then + exit 0 +fi + + +submodules=('ccs_config' 'components/cice5' 'components/cice' 'components/cmeps' \ + 'components/cdeps' 'components/cpl7' 'share' 'libraries/mct' \ + 'libraries/parallelio' 'cime' 'libraries/FMS' 'components/mosart' \ + 'components/rtm') + +for mod in "${submodules[@]}" +do + echo "Initializing $mod" + git submodule update --init --recursive $mod +done + +sparse_submodules=('components/clm' 'components/cism') +for mod in "${sparse_submodules[@]}" +do + echo "Initializing $mod" + git submodule update --init $mod + pushd $mod 1>/dev/null + bash ./install -internal-only + popd 1>/dev/null +done diff --git a/git_sparse_checkout b/git_sparse_checkout new file mode 100755 index 0000000000..ec80069c2a --- /dev/null +++ b/git_sparse_checkout @@ -0,0 +1,69 @@ +#!/bin/bash -eu + +# see also: git_origin and git_clone + +function git_sparse_checkout { + local self=$(readlink -f "${BASH_SOURCE[0]}") + local app=$(basename $self) + local usage=\ +"USAGE: ${app} repository-URL [branch] [project-directory] [[--] [list-of-files-or-directories]] + where: + 'repository-URL' is a valid URL pointing to a Git repository. + 'branch' is a branch, atag or a commit id. Default: master. + 'project-directory' is a folder to be created and populated. Default: the project name. + 'list-of-files-or-directories' is a list of file names or directories separated by spaces. + Examples: + ${app} http://github.com/frgomes/bash-scripts -- README.md + ${app} http://github.com/frgomes/bash-scripts develop -- README.md bin/ + ${app} http://github.com/frgomes/bash-scripts develop tmpdir -- README.md bin/ docs/" + + # obtain repository-URL, e.g.: http://github.com/frgomes/bash-scripts + [[ $# != 0 ]] || (echo "${usage}" 1>&2 ; return 1) + local arg=${1} + [[ "${arg}" != "--" ]] || (echo "${usage}" 1>&2 ; return 1) + local url="${arg}" + [[ $# == 0 ]] || shift + + # obtain branch, which the default is master for historical reasons + if [[ "${arg}" != "--" ]] ;then arg="${1:-master}" ;fi + if [[ "${arg}" == "--" ]] ;then + local tag=master + else + local tag="${arg}" + [[ $# == 0 ]] || shift + fi + + # obtain the project directory, which defaults to the repository name + local prj=$(echo "$url" | sed 's:/:\n:g' | tail -1) + + if [[ "${arg}" != "--" ]] ;then arg="${1:-.}" ;fi + if [[ "${arg}" == "--" || "${arg}" == "." ]] ;then + local dir=$(readlink -f "./${prj}") + else + local dir=$(readlink -f "${arg}") + [[ $# == 0 ]] || shift + fi + + if [[ "${arg}" == "--" ]] ;then [[ $# == 0 ]] || shift; fi + if [[ "${1:-}" == "--" ]] ;then [[ $# == 0 ]] || shift; fi + + # Note: any remaining arguments after these above are considered as a + # list of files or directories to be downloaded. Names of directories + # must be followed by a slash /. + + local sparse=true + local opts='--depth=1' + + # now perform the sparse checkout + mkdir -p "${dir}" + git -C "${dir}" init + git -C "${dir}" config core.sparseCheckout ${sparse} + for path in $* ;do + echo "${path}" >> ${dir}/.git/info/sparse-checkout + done + git -C "${dir}" remote add origin ${url} + git -C "${dir}" fetch ${opts} origin ${tag} + git -C "${dir}" checkout ${tag} +} + +git_sparse_checkout $@ diff --git a/show-tags b/show-tags new file mode 100755 index 0000000000..56c7d9c5e4 --- /dev/null +++ b/show-tags @@ -0,0 +1,20 @@ +#!/bin/bash + +this_dir=$(pwd) +printf "\nSubmodule status\n" +printf "(currently checked out commit for each submodule)\n" +printf "(when the submodule is initialized and a tag exists, the commit is shown as: 'most recent tag-commits since tag-commit hash')\n" +printf "(when the submodule is not initialized, only the checked out commit is shown)\n\n" +grep path .gitmodules | sed 's/.*= //' | while read x +do + cd "$this_dir" + printf "$x\n - current commit: " + if [ "$(ls -A $x)" ] ; then + cd "$x" + git describe --tags --always + else + git submodule status $x | sed 's/^-//' | awk '{ print $1 }' + fi +done +printf "\n" + From f530e88acee6e959e9345b35a7e00b71d81641bf Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 3 Jan 2024 17:20:30 -0700 Subject: [PATCH 002/161] module based method, sparse working --- git_setup | 37 ++--- git_setup.py | 104 +++++++++++++ git_sparse_checkout | 10 +- modules/__init__.py | 0 modules/lstripreader.py | 44 ++++++ modules/utils.py | 325 ++++++++++++++++++++++++++++++++++++++++ 6 files changed, 501 insertions(+), 19 deletions(-) create mode 100644 git_setup.py create mode 100644 modules/__init__.py create mode 100644 modules/lstripreader.py create mode 100644 modules/utils.py diff --git a/git_setup b/git_setup index 52901897d5..a5b27b13f6 100755 --- a/git_setup +++ b/git_setup @@ -1,10 +1,10 @@ #!/bin/bash -# Script to fetch submodules for CAM -# if argument -internal-only is supplied then only checkout CAM internal submodules, otherwise checkout everything needed for -# cam as a top level repository +# Script to fetch submodules for esm applications +# if argument -internal-only is supplied then only checkout internal submodules, otherwise checkout everything needed for +# a top level repository # set -e -script="install" +script=$0 function usage { echo -e "\nusage: $script [-i/--internal-only]\n" } @@ -31,28 +31,31 @@ do done ######################## +declare -A submods +submod_list=$(git config -f .gitmodules --list) + +while IFS= read -r line; +do + readarray -d. -t strarr <<< "$line" + if [[ "strarr[3]" == "path" ]]; then + echo "path is $strarr[4]" + fi +done <<< "$submod_list" +exit 0 # Start with sparse checkouts -if ! test -f bash-scripts/bin/git_sparse_checkout; then +if ! test -f bin/git_sparse_checkout; then echo "Getting git_sparse_checkout script" - git clone -n https://github.com/frgomes/bash-scripts --depth 1 - pushd bash-scripts 1>/dev/null - git checkout HEAD -- bin/git_sparse_checkout - popd 1>/dev/null + git clone https://github.com/jedwards4b/gitscripts --depth 1 bin fi - - - - - -./bash-scripts/bin/git_sparse_checkout https://github.com/larson-group/clubb_release clubb_4ncar_20221129_59cb19f_branch src/physics/clubb -- src/CLUBB_core/ src/SILHS/ +./bin/git_sparse_checkout https://github.com/larson-group/clubb_release clubb_4ncar_20221129_59cb19f_branch src/physics/clubb -- src/CLUBB_core/ src/SILHS/ git restore src/physics/clubb -./bash-scripts/bin/git_sparse_checkout https://github.com/CFMIP/COSPv2.0 master src/physics/cosp2/src -- src/ +./bin/git_sparse_checkout https://github.com/CFMIP/COSPv2.0 master src/physics/cosp2/src -- src/ git restore src/physics/cosp2/src -./bash-scripts/bin/git_sparse_checkout https://github.com/MPAS-Dev/MPAS-Model.git develop src/dynamics/mpas/dycore -- src/external/ src/operators/ src/tools/ \ +./bin/git_sparse_checkout https://github.com/MPAS-Dev/MPAS-Model.git develop src/dynamics/mpas/dycore -- src/external/ src/operators/ src/tools/ \ src/core_atmosphere/ src/framework/ git restore src/dynamics/mpas/dycore diff --git a/git_setup.py b/git_setup.py new file mode 100644 index 0000000000..5d70486d63 --- /dev/null +++ b/git_setup.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +import os +import shutil +import logging +from modules import utils +from configparser import ConfigParser +from modules.lstripreader import LstripReader + +def parse_submodules_desc_section(section, section_items): + """Create a dict for this submodule description""" + desc = {} + esmrequired_options = ("T:T", "I:T", "I:F", "T:F") + for item in section_items: + name = item[0].strip().lower() + desc[name] = item[1].strip() + if not "esmrequired" in desc: + desc["esmrequired"] = "I:T" + + if desc["esmrequired"] not in esmrequired_options: + val = desc["esmrequired"] + utils.fatal_error(f"esmrequired set to {val} which is not a supported option {esmrequired_options}") + return desc + + +def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): + # first create the module directory + if not os.path.isdir(path): + os.makedirs(path) + # Check first if the module is already defined + # and the sparse-checkout file exists + cmd = ("git", "rev-parse", "--show-toplevel") + topdir = utils.execute_subprocess(cmd, output_to_caller=True).rstrip() + topgit = os.path.join(topdir, ".git", "modules") + gitsparse = os.path.join(topgit, name, "info","sparse-checkout") + if os.path.isfile(gitsparse): + logging.warning(f"submodule {name} is already initialized") + return + + #initialize a new git repo and set the sparse checkout flag + cmd = ("git", "-C", path, "init") + status = utils.execute_subprocess(cmd, status_to_caller=True) + cmd = ("git", "-C", path, "config", "core.sparseCheckout","true") + status = utils.execute_subprocess(cmd, status_to_caller=True) + # set the repository remote + cmd = ("git", "-C", path, "remote", "add", "origin", url) + status = utils.execute_subprocess(cmd, status_to_caller=True) + + if not os.path.isdir(topgit): + os.makedirs(topgit) + topgit = os.path.join(topgit,name) + + shutil.move(os.path.join(path, ".git"), topgit) + + shutil.copy(os.path.join(path,sparsefile), gitsparse) + + with open(os.path.join(path, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(topgit, path)) + + #Finally checkout the repo + cmd = ("git", "-C", path, "fetch", "--depth=1", "origin", "--tags") + status = utils.execute_subprocess(cmd, status_to_caller=True) + cmd = ("git", "-C", path, "checkout", tag) + status = utils.execute_subprocess(cmd, status_to_caller=True) + print(f"Successfully checked out {name}") + +def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): + root_dir = os.path.abspath(root_dir) + + msg = 'In directory : {0}'.format(root_dir) + logging.info(msg) + + file_path = os.path.join(root_dir, file_name) + if not os.path.exists(file_name): + msg = ('ERROR: submodules description file, "{0}", does not ' + 'exist in dir:\n {1}'.format(file_name, root_dir)) + utils.fatal_error(msg) + config = ConfigParser() + config.read_file(LstripReader(file_path), source=file_name) + for section in config.sections(): + name = section[11:-1] + submodule_desc = parse_submodules_desc_section(section,config.items(section)) + if submodule_desc["esmrequired"] not in esmrequired: + if "T:F" in esmrequired or submodule_desc["esmrequired"].startswith("I:"): + print(f"Skipping optional component {section}") + # TODO change to logging + # logging.info(f"Skipping optional component {section}") + continue + if "esmsparse" in submodule_desc: + if "esmtag" in submodule_desc: + tag = submodule_desc["esmtag"] + else: + tag = "master" + submodule_sparse_checkout(name, submodule_desc["url"], submodule_desc["path"], + submodule_desc["esmsparse"], tag) + + + + + +esmrequired = ("I:T", "T:T") +root_dir = os.getcwd() +gitmodules = read_gitmodules_file(root_dir, esmrequired) + + diff --git a/git_sparse_checkout b/git_sparse_checkout index ec80069c2a..9b52bb29a7 100755 --- a/git_sparse_checkout +++ b/git_sparse_checkout @@ -55,13 +55,19 @@ function git_sparse_checkout { local opts='--depth=1' # now perform the sparse checkout + mkdir -p "${dir}" git -C "${dir}" init git -C "${dir}" config core.sparseCheckout ${sparse} + git -C "${dir}" remote add origin ${url} + super=$(git rev-parse --show-toplevel) + pathtodotgit=$(realpath -m --relative-to=${dir} ${super})/.git + relpathfromsuper=$(realpath -m --relative-to=${super} ${dir}) + mv ${dir}/.git ${super}/.git/modules/${relpathfromsuper} + echo "gitdir: ${pathtodotgit}/modules/${relpathfromsuper}" > ${dir}/.git for path in $* ;do - echo "${path}" >> ${dir}/.git/info/sparse-checkout + echo "${path}" >> ${super}/.git/modules/${relpathfromsuper}/info/sparse-checkout done - git -C "${dir}" remote add origin ${url} git -C "${dir}" fetch ${opts} origin ${tag} git -C "${dir}" checkout ${tag} } diff --git a/modules/__init__.py b/modules/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/modules/lstripreader.py b/modules/lstripreader.py new file mode 100644 index 0000000000..530abd297e --- /dev/null +++ b/modules/lstripreader.py @@ -0,0 +1,44 @@ + +class LstripReader(object): + "LstripReader formats .gitmodules files to be acceptable for configparser" + def __init__(self, filename): + with open(filename, 'r') as infile: + lines = infile.readlines() + self._lines = list() + self._num_lines = len(lines) + self._index = 0 + for line in lines: + self._lines.append(line.lstrip()) + + def readlines(self): + """Return all the lines from this object's file""" + return self._lines + + def readline(self, size=-1): + """Format and return the next line or raise StopIteration""" + try: + line = self.next() + except StopIteration: + line = '' + + if (size > 0) and (len(line) < size): + return line[0:size] + + return line + + def __iter__(self): + """Begin an iteration""" + self._index = 0 + return self + + def next(self): + """Return the next line or raise StopIteration""" + if self._index >= self._num_lines: + raise StopIteration + + self._index = self._index + 1 + return self._lines[self._index - 1] + + def __next__(self): + return self.next() + diff --git a/modules/utils.py b/modules/utils.py new file mode 100644 index 0000000000..8271538797 --- /dev/null +++ b/modules/utils.py @@ -0,0 +1,325 @@ +#!/usr/bin/env python3 +""" +Common public utilities for manic package + +""" + +import logging +import os +import subprocess +import sys +from threading import Timer + +LOCAL_PATH_INDICATOR = '.' +# --------------------------------------------------------------------- +# +# screen and logging output and functions to massage text for output +# +# --------------------------------------------------------------------- + + +def log_process_output(output): + """Log each line of process output at debug level so it can be + filtered if necessary. By default, output is a single string, and + logging.debug(output) will only put log info heading on the first + line. This makes it hard to filter with grep. + + """ + output = output.split('\n') + for line in output: + logging.debug(line) + + +def printlog(msg, **kwargs): + """Wrapper script around print to ensure that everything printed to + the screen also gets logged. + + """ + logging.info(msg) + if kwargs: + print(msg, **kwargs) + else: + print(msg) + sys.stdout.flush() + + +def last_n_lines(the_string, n_lines, truncation_message=None): + """Returns the last n lines of the given string + + Args: + the_string: str + n_lines: int + truncation_message: str, optional + + Returns a string containing the last n lines of the_string + + If truncation_message is provided, the returned string begins with + the given message if and only if the string is greater than n lines + to begin with. + """ + + lines = the_string.splitlines(True) + if len(lines) <= n_lines: + return_val = the_string + else: + lines_subset = lines[-n_lines:] + str_truncated = ''.join(lines_subset) + if truncation_message: + str_truncated = truncation_message + '\n' + str_truncated + return_val = str_truncated + + return return_val + + +def indent_string(the_string, indent_level): + """Indents the given string by a given number of spaces + + Args: + the_string: str + indent_level: int + + Returns a new string that is the same as the_string, except that + each line is indented by 'indent_level' spaces. + + In python3, this can be done with textwrap.indent. + """ + + lines = the_string.splitlines(True) + padding = ' ' * indent_level + lines_indented = [padding + line for line in lines] + return ''.join(lines_indented) + +# --------------------------------------------------------------------- +# +# error handling +# +# --------------------------------------------------------------------- + + +def fatal_error(message): + """ + Error output function + """ + logging.error(message) + raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) + + +# --------------------------------------------------------------------- +# +# Data conversion / manipulation +# +# --------------------------------------------------------------------- +def str_to_bool(bool_str): + """Convert a sting representation of as boolean into a true boolean. + + Conversion should be case insensitive. + """ + value = None + str_lower = bool_str.lower() + if str_lower in ('true', 't'): + value = True + elif str_lower in ('false', 'f'): + value = False + if value is None: + msg = ('ERROR: invalid boolean string value "{0}". ' + 'Must be "true" or "false"'.format(bool_str)) + fatal_error(msg) + return value + + +REMOTE_PREFIXES = ['http://', 'https://', 'ssh://', 'git@'] + + +def is_remote_url(url): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + """ + remote_url = False + for prefix in REMOTE_PREFIXES: + if url.startswith(prefix): + remote_url = True + return remote_url + + +def split_remote_url(url): + """check if the user provided a local file path or a + remote. If remote, try to strip off protocol info. + + """ + remote_url = is_remote_url(url) + if not remote_url: + return url + + for prefix in REMOTE_PREFIXES: + url = url.replace(prefix, '') + + if '@' in url: + url = url.split('@')[1] + + if ':' in url: + url = url.split(':')[1] + + return url + + +def expand_local_url(url, field): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + Note: local paths of LOCAL_PATH_INDICATOR have special meaning and + represent local copy only, don't work with the remotes. + + """ + remote_url = is_remote_url(url) + if not remote_url: + if url.strip() == LOCAL_PATH_INDICATOR: + pass + else: + url = os.path.expandvars(url) + url = os.path.expanduser(url) + if not os.path.isabs(url): + msg = ('WARNING: Externals description for "{0}" contains a ' + 'url that is not remote and does not expand to an ' + 'absolute path. Version control operations may ' + 'fail.\n\nurl={1}'.format(field, url)) + printlog(msg) + else: + url = os.path.normpath(url) + return url + + +# --------------------------------------------------------------------- +# +# subprocess +# +# --------------------------------------------------------------------- + +# Give the user a helpful message if we detect that a command seems to +# be hanging. +_HANGING_SEC = 300 + + +def _hanging_msg(working_directory, command): + print(""" + +Command '{command}' +from directory {working_directory} +has taken {hanging_sec} seconds. It may be hanging. + +The command will continue to run, but you may want to abort +manage_externals with ^C and investigate. A possible cause of hangs is +when svn or git require authentication to access a private +repository. On some systems, svn and git requests for authentication +information will not be displayed to the user. In this case, the program +will appear to hang. Ensure you can run svn and git manually and access +all repositories without entering your authentication information. + +""".format(command=command, + working_directory=working_directory, + hanging_sec=_HANGING_SEC)) + + +def execute_subprocess(commands, status_to_caller=False, + output_to_caller=False): + """Wrapper around subprocess.check_output to handle common + exceptions. + + check_output runs a command with arguments and waits + for it to complete. + + check_output raises an exception on a nonzero return code. if + status_to_caller is true, execute_subprocess returns the subprocess + return code, otherwise execute_subprocess treats non-zero return + status as an error and raises an exception. + + """ + cwd = os.getcwd() + msg = 'In directory: {0}\nexecute_subprocess running command:'.format(cwd) + logging.info(msg) + commands_str = ' '.join(commands) + logging.info(commands_str) + return_to_caller = status_to_caller or output_to_caller + status = -1 + output = '' + hanging_timer = Timer(_HANGING_SEC, _hanging_msg, + kwargs={"working_directory": cwd, + "command": commands_str}) + hanging_timer.start() + try: + output = subprocess.check_output(commands, stderr=subprocess.STDOUT, + universal_newlines=True) + log_process_output(output) + status = 0 + except OSError as error: + msg = failed_command_msg( + 'Command execution failed. Does the executable exist?', + commands) + logging.error(error) + fatal_error(msg) + except ValueError as error: + msg = failed_command_msg( + 'DEV_ERROR: Invalid arguments trying to run subprocess', + commands) + logging.error(error) + fatal_error(msg) + except subprocess.CalledProcessError as error: + # Only report the error if we are NOT returning to the + # caller. If we are returning to the caller, then it may be a + # simple status check. If returning, it is the callers + # responsibility determine if an error occurred and handle it + # appropriately. + if not return_to_caller: + msg_context = ('Process did not run successfully; ' + 'returned status {0}'.format(error.returncode)) + msg = failed_command_msg(msg_context, commands, + output=error.output) + logging.error(error) + logging.error(msg) + log_process_output(error.output) + fatal_error(msg) + status = error.returncode + finally: + hanging_timer.cancel() + + if status_to_caller and output_to_caller: + ret_value = (status, output) + elif status_to_caller: + ret_value = status + elif output_to_caller: + ret_value = output + else: + ret_value = None + + return ret_value + + +def failed_command_msg(msg_context, command, output=None): + """Template for consistent error messages from subprocess calls. + + If 'output' is given, it should provide the output from the failed + command + """ + + if output: + output_truncated = last_n_lines(output, 20, + truncation_message='[... Output truncated for brevity ...]') + errmsg = ('Failed with output:\n' + + indent_string(output_truncated, 4) + + '\nERROR: ') + else: + errmsg = '' + + command_str = ' '.join(command) + errmsg += """In directory + {cwd} +{context}: + {command} +""".format(cwd=os.getcwd(), context=msg_context, command=command_str) + + if output: + errmsg += 'See above for output from failed command.\n' + + return errmsg From e0cf1ff87ad4ff09ad950d39d8777b2df377456e Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 09:29:53 -0700 Subject: [PATCH 003/161] now works two ways --- git_setup.py | 24 ++++++++--------- modules/gitinterface.py | 58 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 70 insertions(+), 12 deletions(-) create mode 100644 modules/gitinterface.py diff --git a/git_setup.py b/git_setup.py index 5d70486d63..56e3d2e39c 100644 --- a/git_setup.py +++ b/git_setup.py @@ -5,6 +5,7 @@ from modules import utils from configparser import ConfigParser from modules.lstripreader import LstripReader +from modules.gitinterface import GitInterface def parse_submodules_desc_section(section, section_items): """Create a dict for this submodule description""" @@ -28,8 +29,10 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): os.makedirs(path) # Check first if the module is already defined # and the sparse-checkout file exists - cmd = ("git", "rev-parse", "--show-toplevel") - topdir = utils.execute_subprocess(cmd, output_to_caller=True).rstrip() + fullpath = os.path.join(os.getcwd(),path) + git = GitInterface(os.getcwd()) + topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() + topgit = os.path.join(topdir, ".git", "modules") gitsparse = os.path.join(topgit, name, "info","sparse-checkout") if os.path.isfile(gitsparse): @@ -37,13 +40,12 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): return #initialize a new git repo and set the sparse checkout flag - cmd = ("git", "-C", path, "init") - status = utils.execute_subprocess(cmd, status_to_caller=True) - cmd = ("git", "-C", path, "config", "core.sparseCheckout","true") - status = utils.execute_subprocess(cmd, status_to_caller=True) + sprepo_git = GitInterface(os.path.join(topdir,path)) + sprepo_git.config_set_value("core", "sparseCheckout", "true") + # set the repository remote - cmd = ("git", "-C", path, "remote", "add", "origin", url) - status = utils.execute_subprocess(cmd, status_to_caller=True) + cmd = ("remote", "add", "origin", url) + sprepo_git.git_operation("remote", "add", "origin", url) if not os.path.isdir(topgit): os.makedirs(topgit) @@ -57,10 +59,8 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): f.write("gitdir: " + os.path.relpath(topgit, path)) #Finally checkout the repo - cmd = ("git", "-C", path, "fetch", "--depth=1", "origin", "--tags") - status = utils.execute_subprocess(cmd, status_to_caller=True) - cmd = ("git", "-C", path, "checkout", tag) - status = utils.execute_subprocess(cmd, status_to_caller=True) + sprepo_git.git_operation( "fetch", "--depth=1", "origin", "--tags") + sprepo_git.git_operation( "checkout", tag) print(f"Successfully checked out {name}") def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): diff --git a/modules/gitinterface.py b/modules/gitinterface.py new file mode 100644 index 0000000000..105acd3d2c --- /dev/null +++ b/modules/gitinterface.py @@ -0,0 +1,58 @@ +import os +from modules import utils + +class GitInterface: + def __init__(self, repo_path): + self.repo_path = repo_path + + try: + import git + self._use_module = True + try: + self.repo = git.Repo(repo_path) # Initialize GitPython repo + except git.exc.InvalidGitRepositoryError: + self.git = git + self._init_git_repo() + except ImportError: + self._use_module = False + if not os.path.exists(os.path.join(repo_path,".git")): + self._init_git_repo() + + def _git_command(self, operation, *args): + if self._use_module: + return getattr(self.repo.git, operation)(*args) + else: + return ["git", "-C",self.repo_path, operation] + list(args) + + def _init_git_repo(self): + if self._use_module: + self.repo = self.git.Repo.init(self.repo_path) + else: + command = ("git", "-C", self.repo_path, "init") + utils.execute_subprocess(command) + + + def git_operation(self, operation, *args, **kwargs): + command = self._git_command(operation, *args) + if isinstance(command, list): + return utils.execute_subprocess(command, output_to_caller=True) + else: + return command + + def config_get_value(self, section, name): + if self._use_module: + config = self.repo.config_reader() + return config.get_value(section, name) + else: + cmd = ("git","-C",self.repo_path,"config", "--get", f"{section}.{name}") + output = utils.execute_subprocess(cmd, output_to_caller=True) + return output.strip() + + def config_set_value(self, section, name, value): + if self._use_module: + with self.repo.config_writer() as writer: + writer.set_value(section, name, value) + writer.release() # Ensure changes are saved + else: + cmd = ("git","-C",self.repo_path,"config", f"{section}.{name}", value) + utils.execute_subprocess(cmd, output_to_caller=True) From f36bc9a68a578667f83b59dbc339d96e6fdc6995 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 14:03:45 -0700 Subject: [PATCH 004/161] complete the tools --- git_setup.py | 22 ++++++++++++++++++++-- modules/gitinterface.py | 7 ++++++- 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/git_setup.py b/git_setup.py index 56e3d2e39c..319e68cdf2 100644 --- a/git_setup.py +++ b/git_setup.py @@ -14,6 +14,9 @@ def parse_submodules_desc_section(section, section_items): for item in section_items: name = item[0].strip().lower() desc[name] = item[1].strip() + # e3sm needs to have ssh protocol urls, we don't + if name == "url" and desc[name].startswith("git@github"): + desc[name] = desc[name].replace("git@github.com:","https://github.com/") if not "esmrequired" in desc: desc["esmrequired"] = "I:T" @@ -29,7 +32,6 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): os.makedirs(path) # Check first if the module is already defined # and the sparse-checkout file exists - fullpath = os.path.join(os.getcwd(),path) git = GitInterface(os.getcwd()) topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() @@ -62,6 +64,17 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): sprepo_git.git_operation( "fetch", "--depth=1", "origin", "--tags") sprepo_git.git_operation( "checkout", tag) print(f"Successfully checked out {name}") + +def submodule_checkout(name, url, path, tag, esmrequired): + git = GitInterface(os.cwd()) + topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() + repodir = os.path.join(topdir, path) + git.git_operation("submodule","update","--init") + # Look for a .gitmodules file in the newly checkedout repo + if os.path.exists(os.path.join(repodir,".gitmodules")): + # recursively handle this checkout + read_gitmodules_file(repodir, esmrequired) + return def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): root_dir = os.path.abspath(root_dir) @@ -92,7 +105,12 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): tag = "master" submodule_sparse_checkout(name, submodule_desc["url"], submodule_desc["path"], submodule_desc["esmsparse"], tag) - + continue + Iesmrequired = [] + for setting in esmrequired: + if setting.startswith("I:"): + Iesmrequired.append(setting) + submodule_checkout(name, submodule_desc["url"], submodule_desc["path"], tag, Iesmrequired) diff --git a/modules/gitinterface.py b/modules/gitinterface.py index 105acd3d2c..06203bb9da 100644 --- a/modules/gitinterface.py +++ b/modules/gitinterface.py @@ -1,4 +1,5 @@ import os +import logging from modules import utils class GitInterface: @@ -13,12 +14,16 @@ def __init__(self, repo_path): except git.exc.InvalidGitRepositoryError: self.git = git self._init_git_repo() + msg = "Using GitPython interface to git" except ImportError: self._use_module = False if not os.path.exists(os.path.join(repo_path,".git")): self._init_git_repo() - + msg = "Using shell interface to git" + logging.info(msg) + def _git_command(self, operation, *args): + logging.info(operation) if self._use_module: return getattr(self.repo.git, operation)(*args) else: From 2beceaa56b02c1d607696a3d4259a54338214e35 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 15:45:11 -0700 Subject: [PATCH 005/161] more fixes --- git_setup.py | 13 +++++++------ modules/gitinterface.py | 1 + 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/git_setup.py b/git_setup.py index 319e68cdf2..82c044cff2 100644 --- a/git_setup.py +++ b/git_setup.py @@ -66,14 +66,15 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): print(f"Successfully checked out {name}") def submodule_checkout(name, url, path, tag, esmrequired): - git = GitInterface(os.cwd()) + git = GitInterface(os.getcwd()) topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() repodir = os.path.join(topdir, path) - git.git_operation("submodule","update","--init") + git.git_operation("submodule","update","--init", name) # Look for a .gitmodules file in the newly checkedout repo if os.path.exists(os.path.join(repodir,".gitmodules")): # recursively handle this checkout read_gitmodules_file(repodir, esmrequired) + print(f"Successfully checked out {name}") return def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): @@ -98,11 +99,11 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): # TODO change to logging # logging.info(f"Skipping optional component {section}") continue + if "esmtag" in submodule_desc: + tag = submodule_desc["esmtag"] + else: + tag = "master" if "esmsparse" in submodule_desc: - if "esmtag" in submodule_desc: - tag = submodule_desc["esmtag"] - else: - tag = "master" submodule_sparse_checkout(name, submodule_desc["url"], submodule_desc["path"], submodule_desc["esmsparse"], tag) continue diff --git a/modules/gitinterface.py b/modules/gitinterface.py index 06203bb9da..baf98f35d8 100644 --- a/modules/gitinterface.py +++ b/modules/gitinterface.py @@ -40,6 +40,7 @@ def _init_git_repo(self): def git_operation(self, operation, *args, **kwargs): command = self._git_command(operation, *args) if isinstance(command, list): + print(command) return utils.execute_subprocess(command, output_to_caller=True) else: return command From a3fcaf20dad5a7bcbd5c4e3dc0ac916911172494 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 15:59:04 -0700 Subject: [PATCH 006/161] submodule needs path not name --- git_setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/git_setup.py b/git_setup.py index 82c044cff2..79ee8f5517 100644 --- a/git_setup.py +++ b/git_setup.py @@ -69,10 +69,11 @@ def submodule_checkout(name, url, path, tag, esmrequired): git = GitInterface(os.getcwd()) topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() repodir = os.path.join(topdir, path) - git.git_operation("submodule","update","--init", name) + git.git_operation("submodule","update","--init", path) # Look for a .gitmodules file in the newly checkedout repo if os.path.exists(os.path.join(repodir,".gitmodules")): # recursively handle this checkout + print( read_gitmodules_file(repodir, esmrequired) print(f"Successfully checked out {name}") return From eed05bd0e63cd08b9d8d6d388cdfc65764ba8204 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 16:06:35 -0700 Subject: [PATCH 007/161] improve error checking --- git_setup.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/git_setup.py b/git_setup.py index 79ee8f5517..fc3616c268 100644 --- a/git_setup.py +++ b/git_setup.py @@ -73,9 +73,11 @@ def submodule_checkout(name, url, path, tag, esmrequired): # Look for a .gitmodules file in the newly checkedout repo if os.path.exists(os.path.join(repodir,".gitmodules")): # recursively handle this checkout - print( read_gitmodules_file(repodir, esmrequired) - print(f"Successfully checked out {name}") + if os.path.exists(os.path.join(repodir,".git")): + print(f"Successfully checked out {name}") + else: + utils.fatal_error(f"Failed to checkout {name}") return def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): From 20431710b064b0cf4876de54065a38d962facb23 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 16:14:05 -0700 Subject: [PATCH 008/161] add more logging --- git_setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/git_setup.py b/git_setup.py index fc3616c268..daf4181df7 100644 --- a/git_setup.py +++ b/git_setup.py @@ -73,6 +73,7 @@ def submodule_checkout(name, url, path, tag, esmrequired): # Look for a .gitmodules file in the newly checkedout repo if os.path.exists(os.path.join(repodir,".gitmodules")): # recursively handle this checkout + print(f"Recursively checking out submodules of {name}") read_gitmodules_file(repodir, esmrequired) if os.path.exists(os.path.join(repodir,".git")): print(f"Successfully checked out {name}") From 9c35b88bcf355d48a262e82d04daba36ed4071aa Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 16:34:46 -0700 Subject: [PATCH 009/161] submodule always usees shell --- modules/gitinterface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/gitinterface.py b/modules/gitinterface.py index baf98f35d8..2fc7a131bc 100644 --- a/modules/gitinterface.py +++ b/modules/gitinterface.py @@ -24,7 +24,7 @@ def __init__(self, repo_path): def _git_command(self, operation, *args): logging.info(operation) - if self._use_module: + if self._use_module and operation != "submodule": return getattr(self.repo.git, operation)(*args) else: return ["git", "-C",self.repo_path, operation] + list(args) From 8c5dfcdc0e7eba4cf369e15e40f19a68de42f6a5 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 16:42:31 -0700 Subject: [PATCH 010/161] fix toplevel --- git_setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/git_setup.py b/git_setup.py index daf4181df7..c22fa1355f 100644 --- a/git_setup.py +++ b/git_setup.py @@ -67,7 +67,8 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): def submodule_checkout(name, url, path, tag, esmrequired): git = GitInterface(os.getcwd()) - topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() + topdir = os.getcwd() +# topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() repodir = os.path.join(topdir, path) git.git_operation("submodule","update","--init", path) # Look for a .gitmodules file in the newly checkedout repo From d431e3b7cc23672a7cd081c795ee0607e17beb3b Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 16:44:33 -0700 Subject: [PATCH 011/161] fix toplevel --- git_setup.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/git_setup.py b/git_setup.py index c22fa1355f..5355b21069 100644 --- a/git_setup.py +++ b/git_setup.py @@ -65,11 +65,10 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): sprepo_git.git_operation( "checkout", tag) print(f"Successfully checked out {name}") -def submodule_checkout(name, url, path, tag, esmrequired): +def submodule_checkout(root, name, url, path, tag, esmrequired): git = GitInterface(os.getcwd()) - topdir = os.getcwd() # topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() - repodir = os.path.join(topdir, path) + repodir = os.path.join(root, path) git.git_operation("submodule","update","--init", path) # Look for a .gitmodules file in the newly checkedout repo if os.path.exists(os.path.join(repodir,".gitmodules")): @@ -116,7 +115,7 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): for setting in esmrequired: if setting.startswith("I:"): Iesmrequired.append(setting) - submodule_checkout(name, submodule_desc["url"], submodule_desc["path"], tag, Iesmrequired) + submodule_checkout(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag, Iesmrequired) From b6ee263ce6ee009f40288275866f45b471b8b6c8 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 4 Jan 2024 16:48:32 -0700 Subject: [PATCH 012/161] try again --- git_setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/git_setup.py b/git_setup.py index 5355b21069..3be8e3dfce 100644 --- a/git_setup.py +++ b/git_setup.py @@ -73,7 +73,7 @@ def submodule_checkout(root, name, url, path, tag, esmrequired): # Look for a .gitmodules file in the newly checkedout repo if os.path.exists(os.path.join(repodir,".gitmodules")): # recursively handle this checkout - print(f"Recursively checking out submodules of {name}") + print(f"Recursively checking out submodules of {name} {repodir}") read_gitmodules_file(repodir, esmrequired) if os.path.exists(os.path.join(repodir,".git")): print(f"Successfully checked out {name}") From 996dfb6323dfd25e93c3a55ed1efb18d1f0b148a Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 5 Jan 2024 09:04:48 -0700 Subject: [PATCH 013/161] add command line options and support for update --- git_setup.py | 190 ++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 159 insertions(+), 31 deletions(-) diff --git a/git_setup.py b/git_setup.py index 3be8e3dfce..8361fe9a8c 100644 --- a/git_setup.py +++ b/git_setup.py @@ -1,12 +1,119 @@ #!/usr/bin/env python +import sys import os import shutil import logging +import argparse from modules import utils from configparser import ConfigParser from modules.lstripreader import LstripReader from modules.gitinterface import GitInterface +from contextlib import contextmanager + + +@contextmanager +def pushd(new_dir): + previous_dir = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(previous_dir) + +def commandline_arguments(args=None): + description = ''' + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + ''' + parser = argparse.ArgumentParser( + description=description, + formatter_class=argparse.RawDescriptionHelpFormatter) + + # + # user options + # + parser.add_argument("components", nargs="*", + help="Specific component(s) to checkout. By default, " + "all required submodules are checked out.") + + parser.add_argument('-C', '--path', default=os.getcwd(), + help='Toplevel repository directory. Defaults to current directory.') + + parser.add_argument('-x', '--exclude', nargs='*', + help='Component(s) listed in the gitmodules file which should be ignored.') + + parser.add_argument('-o', '--optional', action='store_true', default=False, + help='By default only the required submodules ' + 'are checked out. This flag will also checkout the ' + 'optional submodules relative to the toplevel directory.') + + parser.add_argument('-S', '--status', action='store_true', default=False, + help='Output the status of the repositories managed by ' + '%(prog)s. By default only summary information ' + 'is provided. Use the verbose option to see details.') + + parser.add_argument('-u', '--update', action='store_true', default=False, + help='Update submodules to the tags defined in .gitmodules.') + + parser.add_argument('-v', '--verbose', action='count', default=0, + help='Output additional information to ' + 'the screen and log file. This flag can be ' + 'used up to two times, increasing the ' + 'verbosity level each time.') + + parser.add_argument('-V', '--version', action='store_true', default=False, + help='Print manage_externals version and exit.') + + # + # developer options + # + parser.add_argument('--backtrace', action='store_true', + help='DEVELOPER: show exception backtraces as extra ' + 'debugging output') + + parser.add_argument('-d', '--debug', action='store_true', default=False, + help='DEVELOPER: output additional debugging ' + 'information to the screen and log file.') + + logging_group = parser.add_mutually_exclusive_group() + + logging_group.add_argument('--logging', dest='do_logging', + action='store_true', + help='DEVELOPER: enable logging.') + logging_group.add_argument('--no-logging', dest='do_logging', + action='store_false', default=False, + help='DEVELOPER: disable logging ' + '(this is the default)') + if args: + options = parser.parse_args(args) + else: + options = parser.parse_args() + + if options.optional: + esmrequired = 'T:' + else: + esmrequired = 'T:T' + + if options.status: + action = 'status' + elif options.update: + action = 'update' + else: + action = 'install' + + if options.version: + version_info = '' + version_file_path = os.path.join(os.path.dirname(__file__),'version.txt') + with open(version_file_path) as f: + version_info = f.readlines()[0].strip() + print(version_info) + sys.exit(0) + + + + return options.rootdir, esmrequired, options.components, options.exclude, options.verbose, action + + def parse_submodules_desc_section(section, section_items): """Create a dict for this submodule description""" desc = {} @@ -66,8 +173,7 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): print(f"Successfully checked out {name}") def submodule_checkout(root, name, url, path, tag, esmrequired): - git = GitInterface(os.getcwd()) -# topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() + git = GitInterface(root) repodir = os.path.join(root, path) git.git_operation("submodule","update","--init", path) # Look for a .gitmodules file in the newly checkedout repo @@ -80,15 +186,26 @@ def submodule_checkout(root, name, url, path, tag, esmrequired): else: utils.fatal_error(f"Failed to checkout {name}") return - -def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): + +def submodule_update(root_dir, url, tag): + with pushd(root_dir): + git = GitInterface(root_dir) + # first make sure the url is correct + upstream = git.git_operation("ls-remote","--git-url") + if upstream != url: + # TODO - this needs to be a unique name + git.git_operation("remote","add","newbranch",url) + git.git_operation("checkout", tag) + + +def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", gitmodulelist=None, action='install'): root_dir = os.path.abspath(root_dir) msg = 'In directory : {0}'.format(root_dir) logging.info(msg) file_path = os.path.join(root_dir, file_name) - if not os.path.exists(file_name): + if not os.path.exists(file_path): msg = ('ERROR: submodules description file, "{0}", does not ' 'exist in dir:\n {1}'.format(file_name, root_dir)) utils.fatal_error(msg) @@ -97,31 +214,42 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules"): for section in config.sections(): name = section[11:-1] submodule_desc = parse_submodules_desc_section(section,config.items(section)) - if submodule_desc["esmrequired"] not in esmrequired: - if "T:F" in esmrequired or submodule_desc["esmrequired"].startswith("I:"): - print(f"Skipping optional component {section}") - # TODO change to logging - # logging.info(f"Skipping optional component {section}") - continue - if "esmtag" in submodule_desc: - tag = submodule_desc["esmtag"] - else: - tag = "master" - if "esmsparse" in submodule_desc: - submodule_sparse_checkout(name, submodule_desc["url"], submodule_desc["path"], - submodule_desc["esmsparse"], tag) - continue - Iesmrequired = [] - for setting in esmrequired: - if setting.startswith("I:"): - Iesmrequired.append(setting) - submodule_checkout(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag, Iesmrequired) - - - - -esmrequired = ("I:T", "T:T") -root_dir = os.getcwd() -gitmodules = read_gitmodules_file(root_dir, esmrequired) + if action == 'install': + # Recursively install submodules, honering esm tags in .gitmodules + if submodule_desc["esmrequired"] not in esmrequired: + if "T:F" in esmrequired or submodule_desc["esmrequired"].startswith("I:"): + print(f"Skipping optional component {section}") + # TODO change to logging + # logging.info(f"Skipping optional component {section}") + continue + if "esmtag" in submodule_desc: + tag = submodule_desc["esmtag"] + else: + tag = "master" + if "esmsparse" in submodule_desc: + submodule_sparse_checkout(name, submodule_desc["url"], submodule_desc["path"], + submodule_desc["esmsparse"], tag) + continue + Iesmrequired = [] + for setting in esmrequired: + if setting.startswith("I:"): + Iesmrequired.append(setting) + submodule_checkout(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag, Iesmrequired) + + if action == 'update': + # update the submodules to the tags defined in .gitmodules + if "esmtag" in submodule_desc: + submod_dir = os.path.join(root_dir,submodule_desc['path']) + if os.path.exists(os.path.join(submod_dir,".git")): + submodule_update(submod_dir, submodule_desc['url'], submodule_desc["esmtag"]) + + + + +if __name__ == '__main__': + root_dir, esmrequired, includelist, excludelist, verbose, action = commandline_arguments() + esmrequired = ("I:T", "T:T") + root_dir = os.getcwd() + read_gitmodules_file(root_dir, esmrequired, gitmodulelist, action, verbose) From 647a8deea48c13ad9dbbb55fbcf294ff6eec6546 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 5 Jan 2024 10:23:07 -0700 Subject: [PATCH 014/161] fix new issues --- git_setup.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/git_setup.py b/git_setup.py index 8361fe9a8c..82100a09c5 100644 --- a/git_setup.py +++ b/git_setup.py @@ -39,6 +39,11 @@ def commandline_arguments(args=None): parser.add_argument('-C', '--path', default=os.getcwd(), help='Toplevel repository directory. Defaults to current directory.') + parser.add_argument('-g', '--gitmodules', nargs='?', + default='.gitmodules', + help='The submodule description filename. ' + 'Default: %(default)s.') + parser.add_argument('-x', '--exclude', nargs='*', help='Component(s) listed in the gitmodules file which should be ignored.') @@ -109,9 +114,7 @@ def commandline_arguments(args=None): print(version_info) sys.exit(0) - - - return options.rootdir, esmrequired, options.components, options.exclude, options.verbose, action + return options.path, options.gitmodules, esmrequired, options.components, options.exclude, options.verbose, action def parse_submodules_desc_section(section, section_items): @@ -191,14 +194,15 @@ def submodule_update(root_dir, url, tag): with pushd(root_dir): git = GitInterface(root_dir) # first make sure the url is correct - upstream = git.git_operation("ls-remote","--git-url") + upstream = git.git_operation("ls-remote","--get-url").rstrip() + print(f"Here {upstream} and {url}") if upstream != url: # TODO - this needs to be a unique name git.git_operation("remote","add","newbranch",url) git.git_operation("checkout", tag) -def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", gitmodulelist=None, action='install'): +def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", includelist=None, excludelist=None, action='install'): root_dir = os.path.abspath(root_dir) msg = 'In directory : {0}'.format(root_dir) @@ -213,6 +217,10 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", gitmodu config.read_file(LstripReader(file_path), source=file_name) for section in config.sections(): name = section[11:-1] + if includelist and name not in includelist: + continue + if excludelist and name in excludelist: + continue submodule_desc = parse_submodules_desc_section(section,config.items(section)) if action == 'install': @@ -248,8 +256,6 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", gitmodu if __name__ == '__main__': - root_dir, esmrequired, includelist, excludelist, verbose, action = commandline_arguments() - esmrequired = ("I:T", "T:T") - root_dir = os.getcwd() - read_gitmodules_file(root_dir, esmrequired, gitmodulelist, action, verbose) + root_dir, file_name, esmrequired, includelist, excludelist, verbose, action = commandline_arguments() + read_gitmodules_file(root_dir, file_name=file_name, esmrequired=esmrequired, includelist=includelist, excludelist=excludelist, action=action) From fbb8eaf70bd134f8ec5de662294f033c1d18b8ed Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 5 Jan 2024 10:44:48 -0700 Subject: [PATCH 015/161] update esmrequired variable --- git_setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/git_setup.py b/git_setup.py index 82100a09c5..baacdb3d7b 100644 --- a/git_setup.py +++ b/git_setup.py @@ -95,9 +95,9 @@ def commandline_arguments(args=None): options = parser.parse_args() if options.optional: - esmrequired = 'T:' + esmrequired = ('T:T','T:F','I:T') else: - esmrequired = 'T:T' + esmrequired = ('T:T','I:T') if options.status: action = 'status' From e22d95b589470d1d4f93970951b555b1fdfa7462 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 5 Jan 2024 11:34:38 -0700 Subject: [PATCH 016/161] fix recursive issue --- git_setup.py | 19 +++++++------------ modules/gitinterface.py | 1 - 2 files changed, 7 insertions(+), 13 deletions(-) diff --git a/git_setup.py b/git_setup.py index baacdb3d7b..d8b0275348 100644 --- a/git_setup.py +++ b/git_setup.py @@ -183,7 +183,7 @@ def submodule_checkout(root, name, url, path, tag, esmrequired): if os.path.exists(os.path.join(repodir,".gitmodules")): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir}") - read_gitmodules_file(repodir, esmrequired) + read_gitmodules_file(repodir, ("I:T")) if os.path.exists(os.path.join(repodir,".git")): print(f"Successfully checked out {name}") else: @@ -195,7 +195,6 @@ def submodule_update(root_dir, url, tag): git = GitInterface(root_dir) # first make sure the url is correct upstream = git.git_operation("ls-remote","--get-url").rstrip() - print(f"Here {upstream} and {url}") if upstream != url: # TODO - this needs to be a unique name git.git_operation("remote","add","newbranch",url) @@ -226,11 +225,10 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", include if action == 'install': # Recursively install submodules, honering esm tags in .gitmodules if submodule_desc["esmrequired"] not in esmrequired: - if "T:F" in esmrequired or submodule_desc["esmrequired"].startswith("I:"): - print(f"Skipping optional component {section}") - # TODO change to logging - # logging.info(f"Skipping optional component {section}") - continue + print(f"Skipping optional component {section}") + # TODO change to logging + # logging.info(f"Skipping optional component {section}") + continue if "esmtag" in submodule_desc: tag = submodule_desc["esmtag"] else: @@ -239,11 +237,8 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", include submodule_sparse_checkout(name, submodule_desc["url"], submodule_desc["path"], submodule_desc["esmsparse"], tag) continue - Iesmrequired = [] - for setting in esmrequired: - if setting.startswith("I:"): - Iesmrequired.append(setting) - submodule_checkout(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag, Iesmrequired) + + submodule_checkout(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag, esmrequired) if action == 'update': # update the submodules to the tags defined in .gitmodules diff --git a/modules/gitinterface.py b/modules/gitinterface.py index 2fc7a131bc..a4e2529256 100644 --- a/modules/gitinterface.py +++ b/modules/gitinterface.py @@ -40,7 +40,6 @@ def _init_git_repo(self): def git_operation(self, operation, *args, **kwargs): command = self._git_command(operation, *args) if isinstance(command, list): - print(command) return utils.execute_subprocess(command, output_to_caller=True) else: return command From 99d5439757f52b01fa0ae9f0924c472b255083eb Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 5 Jan 2024 13:52:33 -0700 Subject: [PATCH 017/161] add status flag support --- git_setup.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/git_setup.py b/git_setup.py index d8b0275348..f5334faef9 100644 --- a/git_setup.py +++ b/git_setup.py @@ -200,7 +200,17 @@ def submodule_update(root_dir, url, tag): git.git_operation("remote","add","newbranch",url) git.git_operation("checkout", tag) - +def submodule_status(root_dir, name, url, path, tag): + with pushd(path): + git = GitInterface(os.path.join(root_dir,path)) + atag = git.git_operation("describe","--tags","--always").rstrip() + if tag and atag != tag: + print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") + elif tag: + print(f"Submodule {name} at tag {tag}") + else: + print(f"Submodule {name} has no tag defined in .gitmodules") + def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", includelist=None, excludelist=None, action='install'): root_dir = os.path.abspath(root_dir) @@ -222,10 +232,19 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", include continue submodule_desc = parse_submodules_desc_section(section,config.items(section)) + if action == 'status': + if "esmtag" in submodule_desc: + tag = submodule_desc["esmtag"] + else: + tag = None + submodule_status(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag) + + if action == 'install': # Recursively install submodules, honering esm tags in .gitmodules if submodule_desc["esmrequired"] not in esmrequired: - print(f"Skipping optional component {section}") + if 'T:F' in submodule_desc["esmrequired"]: + print(f"Skipping optional component {section}") # TODO change to logging # logging.info(f"Skipping optional component {section}") continue @@ -252,5 +271,6 @@ def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", include if __name__ == '__main__': root_dir, file_name, esmrequired, includelist, excludelist, verbose, action = commandline_arguments() + print(f"action is {action}") read_gitmodules_file(root_dir, file_name=file_name, esmrequired=esmrequired, includelist=includelist, excludelist=excludelist, action=action) From 671bb1df07b611db05947f63572e38c2f6286a9a Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 6 Jan 2024 09:28:06 -0700 Subject: [PATCH 018/161] add a gitmodules class --- git-fleximod.py | 302 ++++++++++++++++++++++++++++++++++++++++++ git_setup | 94 ------------- git_setup.py | 276 -------------------------------------- git_sparse_checkout | 75 ----------- modules/gitmodules.py | 53 ++++++++ modules/utils.py | 137 +++++++++++-------- show-tags | 20 --- 7 files changed, 437 insertions(+), 520 deletions(-) create mode 100644 git-fleximod.py delete mode 100755 git_setup delete mode 100644 git_setup.py delete mode 100755 git_sparse_checkout create mode 100644 modules/gitmodules.py delete mode 100755 show-tags diff --git a/git-fleximod.py b/git-fleximod.py new file mode 100644 index 0000000000..ded13eb9db --- /dev/null +++ b/git-fleximod.py @@ -0,0 +1,302 @@ +#!/usr/bin/env python +import sys +import os +import shutil +import logging +import argparse +from modules import utils +from modules.gitinterface import GitInterface +from modules.gitmodules import GitModules + + +def commandline_arguments(args=None): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + # + # user options + # + parser.add_argument( + "components", + nargs="*", + help="Specific component(s) to checkout. By default, " + "all required submodules are checked out.", + ) + + parser.add_argument( + "-C", + "--path", + default=os.getcwd(), + help="Toplevel repository directory. Defaults to current directory.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + + parser.add_argument( + "-x", + "--exclude", + nargs="*", + help="Component(s) listed in the gitmodules file which should be ignored.", + ) + + parser.add_argument( + "-o", + "--optional", + action="store_true", + default=False, + help="By default only the required submodules " + "are checked out. This flag will also checkout the " + "optional submodules relative to the toplevel directory.", + ) + + parser.add_argument( + "-S", + "--status", + action="store_true", + default=False, + help="Output the status of the repositories managed by " + "%(prog)s. By default only summary information " + "is provided. Use the verbose option to see details.", + ) + + parser.add_argument( + "-u", + "--update", + action="store_true", + default=False, + help="Update submodules to the tags defined in .gitmodules.", + ) + + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + + parser.add_argument( + "-V", + "--version", + action="store_true", + default=False, + help="Print manage_externals version and exit.", + ) + + # + # developer options + # + parser.add_argument( + "--backtrace", + action="store_true", + help="DEVELOPER: show exception backtraces as extra " "debugging output", + ) + + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + logging_group = parser.add_mutually_exclusive_group() + + logging_group.add_argument( + "--logging", + dest="do_logging", + action="store_true", + help="DEVELOPER: enable logging.", + ) + logging_group.add_argument( + "--no-logging", + dest="do_logging", + action="store_false", + default=False, + help="DEVELOPER: disable logging " "(this is the default)", + ) + if args: + options = parser.parse_args(args) + else: + options = parser.parse_args() + + if options.optional: + esmrequired = ("T:T", "T:F", "I:T") + else: + esmrequired = ("T:T", "I:T") + + if options.status: + action = "status" + elif options.update: + action = "update" + else: + action = "install" + + if options.version: + version_info = "" + version_file_path = os.path.join(os.path.dirname(__file__), "version.txt") + with open(version_file_path) as f: + version_info = f.readlines()[0].strip() + print(version_info) + sys.exit(0) + + return ( + options.path, + options.gitmodules, + esmrequired, + options.components, + options.exclude, + options.verbose, + action, + ) + + +def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): + # first create the module directory + if not os.path.isdir(path): + os.makedirs(path) + # Check first if the module is already defined + # and the sparse-checkout file exists + git = GitInterface(os.getcwd()) + topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() + + topgit = os.path.join(topdir, ".git", "modules") + gitsparse = os.path.join(topgit, name, "info", "sparse-checkout") + if os.path.isfile(gitsparse): + logging.warning("submodule {} is already initialized".format(name)) + return + + # initialize a new git repo and set the sparse checkout flag + sprepo_git = GitInterface(os.path.join(topdir, path)) + sprepo_git.config_set_value("core", "sparseCheckout", "true") + + # set the repository remote + sprepo_git.git_operation("remote", "add", "origin", url) + + if not os.path.isdir(topgit): + os.makedirs(topgit) + topgit = os.path.join(topgit, name) + + shutil.move(os.path.join(path, ".git"), topgit) + + shutil.copy(os.path.join(path, sparsefile), gitsparse) + + with open(os.path.join(path, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(topgit, path)) + + # Finally checkout the repo + sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") + sprepo_git.git_operation("checkout", tag) + print(f"Successfully checked out {name}") + + +def submodule_checkout(root, name, path): + git = GitInterface(root) + repodir = os.path.join(root, path) + git.git_operation("submodule", "update", "--init", path) + # Look for a .gitmodules file in the newly checkedout repo + if os.path.exists(os.path.join(repodir, ".gitmodules")): + # recursively handle this checkout + print(f"Recursively checking out submodules of {name} {repodir}") + gitmodules = GitModules(repodir) + submodules_install(gitmodules, repodir, ("I:T")) + if os.path.exists(os.path.join(repodir, ".git")): + print(f"Successfully checked out {name}") + else: + utils.fatal_error(f"Failed to checkout {name}") + return + + +def submodules_status(gitmodules, root_dir): + for name in gitmodules.sections(): + path = gitmodules.get(name, "path") + tag = gitmodules.git(name, "esmtag") + with utils.pushd(path): + git = GitInterface(os.path.join(root_dir, path)) + atag = git.git_operation("describe", "--tags", "--always").rstrip() + if tag and atag != tag: + print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") + elif tag: + print(f"Submodule {name} at tag {tag}") + else: + print(f"Submodule {name} has no tag defined in .gitmodules") + + +def submodules_update(gitmodules, root_dir): + for name in gitmodules.sections(): + esmtag = gitmodules.get(name, "esmtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + if os.path.exists(os.path.join(path, ".git")): + with utils.pushd(root_dir): + git = GitInterface(root_dir) + # first make sure the url is correct + upstream = git.git_operation("ls-remote", "--get-url").rstrip() + if upstream != url: + # TODO - this needs to be a unique name + git.git_operation("remote", "add", "newbranch", url) + git.git_operation("checkout", esmtag) + + +def submodules_install(gitmodules, root_dir, requiredlist): + for name in gitmodules.sections(): + esmrequired = gitmodules.get(name, "esmrequired") + esmsparse = gitmodules.get(name, "esmsparse") + esmtag = gitmodules.get(name, "esmtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + if esmrequired not in requiredlist: + if "T:F" in esmrequired: + print("Skipping optional component {}".format(name)) + continue + if esmsparse: + submodule_sparse_checkout(name, url, path, esmsparse, tag=esmtag) + else: + submodule_checkout(root_dir, name, path) + + +def _main_func(): + ( + root_dir, + file_name, + esmrequired, + includelist, + excludelist, + verbose, + action, + ) = commandline_arguments() + if verbose: + print(f"action is {action}") + gitmodules = GitModules( + confpath=root_dir, + conffile=file_name, + includelist=includelist, + excludelist=excludelist, + ) + + if action == "update": + submodules_update(gitmodules, root_dir) + elif action == "install": + submodules_install(gitmodules, root_dir, esmrequired) + elif action == "status": + submodules_status(gitmodules, root_dir) + else: + utils.fatal_error(f"unrecognized action request {action}") + + +if __name__ == "__main__": + _main_func() diff --git a/git_setup b/git_setup deleted file mode 100755 index a5b27b13f6..0000000000 --- a/git_setup +++ /dev/null @@ -1,94 +0,0 @@ -#!/bin/bash -# Script to fetch submodules for esm applications -# if argument -internal-only is supplied then only checkout internal submodules, otherwise checkout everything needed for -# a top level repository -# -set -e -script=$0 -function usage { - echo -e "\nusage: $script [-i/--internal-only]\n" -} -# Set default arguments -internal_only=0 - -# Process arguments -while [ "$1" != "" ]; -do - case $1 in - - # Only checkout CISM internal submodules - -i | -internal-only | --internal-only) - internal_only=1 - ;; - - *) - echo "$script: illegal option $1" - usage - exit 1 - ;; - esac - shift -done - -######################## -declare -A submods -submod_list=$(git config -f .gitmodules --list) - -while IFS= read -r line; -do - readarray -d. -t strarr <<< "$line" - if [[ "strarr[3]" == "path" ]]; then - echo "path is $strarr[4]" - fi -done <<< "$submod_list" -exit 0 - -# Start with sparse checkouts -if ! test -f bin/git_sparse_checkout; then - echo "Getting git_sparse_checkout script" - git clone https://github.com/jedwards4b/gitscripts --depth 1 bin -fi - -./bin/git_sparse_checkout https://github.com/larson-group/clubb_release clubb_4ncar_20221129_59cb19f_branch src/physics/clubb -- src/CLUBB_core/ src/SILHS/ -git restore src/physics/clubb - -./bin/git_sparse_checkout https://github.com/CFMIP/COSPv2.0 master src/physics/cosp2/src -- src/ -git restore src/physics/cosp2/src - -./bin/git_sparse_checkout https://github.com/MPAS-Dev/MPAS-Model.git develop src/dynamics/mpas/dycore -- src/external/ src/operators/ src/tools/ \ - src/core_atmosphere/ src/framework/ -git restore src/dynamics/mpas/dycore - -submodules=('chem_proc' 'src/physics/carma/base' 'src/physics/pumas' 'src/physics/pumas-frozen' 'src/physics/ali_arms' 'src/atmos_phys' 'src/dynamics/fv3/atmos_cubed_sphere' 'src/hemco') -for mod in "${submodules[@]}" -do - echo "Initializing $mod" - git submodule update --init $mod -done - -if [ ${internal_only} -eq 1 ] - then - exit 0 -fi - - -submodules=('ccs_config' 'components/cice5' 'components/cice' 'components/cmeps' \ - 'components/cdeps' 'components/cpl7' 'share' 'libraries/mct' \ - 'libraries/parallelio' 'cime' 'libraries/FMS' 'components/mosart' \ - 'components/rtm') - -for mod in "${submodules[@]}" -do - echo "Initializing $mod" - git submodule update --init --recursive $mod -done - -sparse_submodules=('components/clm' 'components/cism') -for mod in "${sparse_submodules[@]}" -do - echo "Initializing $mod" - git submodule update --init $mod - pushd $mod 1>/dev/null - bash ./install -internal-only - popd 1>/dev/null -done diff --git a/git_setup.py b/git_setup.py deleted file mode 100644 index f5334faef9..0000000000 --- a/git_setup.py +++ /dev/null @@ -1,276 +0,0 @@ -#!/usr/bin/env python -import sys -import os -import shutil -import logging -import argparse -from modules import utils -from configparser import ConfigParser -from modules.lstripreader import LstripReader -from modules.gitinterface import GitInterface - -from contextlib import contextmanager - - -@contextmanager -def pushd(new_dir): - previous_dir = os.getcwd() - os.chdir(new_dir) - try: - yield - finally: - os.chdir(previous_dir) - -def commandline_arguments(args=None): - description = ''' - %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models - ''' - parser = argparse.ArgumentParser( - description=description, - formatter_class=argparse.RawDescriptionHelpFormatter) - - # - # user options - # - parser.add_argument("components", nargs="*", - help="Specific component(s) to checkout. By default, " - "all required submodules are checked out.") - - parser.add_argument('-C', '--path', default=os.getcwd(), - help='Toplevel repository directory. Defaults to current directory.') - - parser.add_argument('-g', '--gitmodules', nargs='?', - default='.gitmodules', - help='The submodule description filename. ' - 'Default: %(default)s.') - - parser.add_argument('-x', '--exclude', nargs='*', - help='Component(s) listed in the gitmodules file which should be ignored.') - - parser.add_argument('-o', '--optional', action='store_true', default=False, - help='By default only the required submodules ' - 'are checked out. This flag will also checkout the ' - 'optional submodules relative to the toplevel directory.') - - parser.add_argument('-S', '--status', action='store_true', default=False, - help='Output the status of the repositories managed by ' - '%(prog)s. By default only summary information ' - 'is provided. Use the verbose option to see details.') - - parser.add_argument('-u', '--update', action='store_true', default=False, - help='Update submodules to the tags defined in .gitmodules.') - - parser.add_argument('-v', '--verbose', action='count', default=0, - help='Output additional information to ' - 'the screen and log file. This flag can be ' - 'used up to two times, increasing the ' - 'verbosity level each time.') - - parser.add_argument('-V', '--version', action='store_true', default=False, - help='Print manage_externals version and exit.') - - # - # developer options - # - parser.add_argument('--backtrace', action='store_true', - help='DEVELOPER: show exception backtraces as extra ' - 'debugging output') - - parser.add_argument('-d', '--debug', action='store_true', default=False, - help='DEVELOPER: output additional debugging ' - 'information to the screen and log file.') - - logging_group = parser.add_mutually_exclusive_group() - - logging_group.add_argument('--logging', dest='do_logging', - action='store_true', - help='DEVELOPER: enable logging.') - logging_group.add_argument('--no-logging', dest='do_logging', - action='store_false', default=False, - help='DEVELOPER: disable logging ' - '(this is the default)') - if args: - options = parser.parse_args(args) - else: - options = parser.parse_args() - - if options.optional: - esmrequired = ('T:T','T:F','I:T') - else: - esmrequired = ('T:T','I:T') - - if options.status: - action = 'status' - elif options.update: - action = 'update' - else: - action = 'install' - - if options.version: - version_info = '' - version_file_path = os.path.join(os.path.dirname(__file__),'version.txt') - with open(version_file_path) as f: - version_info = f.readlines()[0].strip() - print(version_info) - sys.exit(0) - - return options.path, options.gitmodules, esmrequired, options.components, options.exclude, options.verbose, action - - -def parse_submodules_desc_section(section, section_items): - """Create a dict for this submodule description""" - desc = {} - esmrequired_options = ("T:T", "I:T", "I:F", "T:F") - for item in section_items: - name = item[0].strip().lower() - desc[name] = item[1].strip() - # e3sm needs to have ssh protocol urls, we don't - if name == "url" and desc[name].startswith("git@github"): - desc[name] = desc[name].replace("git@github.com:","https://github.com/") - if not "esmrequired" in desc: - desc["esmrequired"] = "I:T" - - if desc["esmrequired"] not in esmrequired_options: - val = desc["esmrequired"] - utils.fatal_error(f"esmrequired set to {val} which is not a supported option {esmrequired_options}") - return desc - - -def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): - # first create the module directory - if not os.path.isdir(path): - os.makedirs(path) - # Check first if the module is already defined - # and the sparse-checkout file exists - git = GitInterface(os.getcwd()) - topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() - - topgit = os.path.join(topdir, ".git", "modules") - gitsparse = os.path.join(topgit, name, "info","sparse-checkout") - if os.path.isfile(gitsparse): - logging.warning(f"submodule {name} is already initialized") - return - - #initialize a new git repo and set the sparse checkout flag - sprepo_git = GitInterface(os.path.join(topdir,path)) - sprepo_git.config_set_value("core", "sparseCheckout", "true") - - # set the repository remote - cmd = ("remote", "add", "origin", url) - sprepo_git.git_operation("remote", "add", "origin", url) - - if not os.path.isdir(topgit): - os.makedirs(topgit) - topgit = os.path.join(topgit,name) - - shutil.move(os.path.join(path, ".git"), topgit) - - shutil.copy(os.path.join(path,sparsefile), gitsparse) - - with open(os.path.join(path, ".git"), "w") as f: - f.write("gitdir: " + os.path.relpath(topgit, path)) - - #Finally checkout the repo - sprepo_git.git_operation( "fetch", "--depth=1", "origin", "--tags") - sprepo_git.git_operation( "checkout", tag) - print(f"Successfully checked out {name}") - -def submodule_checkout(root, name, url, path, tag, esmrequired): - git = GitInterface(root) - repodir = os.path.join(root, path) - git.git_operation("submodule","update","--init", path) - # Look for a .gitmodules file in the newly checkedout repo - if os.path.exists(os.path.join(repodir,".gitmodules")): - # recursively handle this checkout - print(f"Recursively checking out submodules of {name} {repodir}") - read_gitmodules_file(repodir, ("I:T")) - if os.path.exists(os.path.join(repodir,".git")): - print(f"Successfully checked out {name}") - else: - utils.fatal_error(f"Failed to checkout {name}") - return - -def submodule_update(root_dir, url, tag): - with pushd(root_dir): - git = GitInterface(root_dir) - # first make sure the url is correct - upstream = git.git_operation("ls-remote","--get-url").rstrip() - if upstream != url: - # TODO - this needs to be a unique name - git.git_operation("remote","add","newbranch",url) - git.git_operation("checkout", tag) - -def submodule_status(root_dir, name, url, path, tag): - with pushd(path): - git = GitInterface(os.path.join(root_dir,path)) - atag = git.git_operation("describe","--tags","--always").rstrip() - if tag and atag != tag: - print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") - elif tag: - print(f"Submodule {name} at tag {tag}") - else: - print(f"Submodule {name} has no tag defined in .gitmodules") - -def read_gitmodules_file(root_dir, esmrequired, file_name=".gitmodules", includelist=None, excludelist=None, action='install'): - root_dir = os.path.abspath(root_dir) - - msg = 'In directory : {0}'.format(root_dir) - logging.info(msg) - - file_path = os.path.join(root_dir, file_name) - if not os.path.exists(file_path): - msg = ('ERROR: submodules description file, "{0}", does not ' - 'exist in dir:\n {1}'.format(file_name, root_dir)) - utils.fatal_error(msg) - config = ConfigParser() - config.read_file(LstripReader(file_path), source=file_name) - for section in config.sections(): - name = section[11:-1] - if includelist and name not in includelist: - continue - if excludelist and name in excludelist: - continue - submodule_desc = parse_submodules_desc_section(section,config.items(section)) - - if action == 'status': - if "esmtag" in submodule_desc: - tag = submodule_desc["esmtag"] - else: - tag = None - submodule_status(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag) - - - if action == 'install': - # Recursively install submodules, honering esm tags in .gitmodules - if submodule_desc["esmrequired"] not in esmrequired: - if 'T:F' in submodule_desc["esmrequired"]: - print(f"Skipping optional component {section}") - # TODO change to logging - # logging.info(f"Skipping optional component {section}") - continue - if "esmtag" in submodule_desc: - tag = submodule_desc["esmtag"] - else: - tag = "master" - if "esmsparse" in submodule_desc: - submodule_sparse_checkout(name, submodule_desc["url"], submodule_desc["path"], - submodule_desc["esmsparse"], tag) - continue - - submodule_checkout(root_dir, name, submodule_desc["url"], submodule_desc["path"], tag, esmrequired) - - if action == 'update': - # update the submodules to the tags defined in .gitmodules - if "esmtag" in submodule_desc: - submod_dir = os.path.join(root_dir,submodule_desc['path']) - if os.path.exists(os.path.join(submod_dir,".git")): - submodule_update(submod_dir, submodule_desc['url'], submodule_desc["esmtag"]) - - - - -if __name__ == '__main__': - root_dir, file_name, esmrequired, includelist, excludelist, verbose, action = commandline_arguments() - print(f"action is {action}") - read_gitmodules_file(root_dir, file_name=file_name, esmrequired=esmrequired, includelist=includelist, excludelist=excludelist, action=action) - diff --git a/git_sparse_checkout b/git_sparse_checkout deleted file mode 100755 index 9b52bb29a7..0000000000 --- a/git_sparse_checkout +++ /dev/null @@ -1,75 +0,0 @@ -#!/bin/bash -eu - -# see also: git_origin and git_clone - -function git_sparse_checkout { - local self=$(readlink -f "${BASH_SOURCE[0]}") - local app=$(basename $self) - local usage=\ -"USAGE: ${app} repository-URL [branch] [project-directory] [[--] [list-of-files-or-directories]] - where: - 'repository-URL' is a valid URL pointing to a Git repository. - 'branch' is a branch, atag or a commit id. Default: master. - 'project-directory' is a folder to be created and populated. Default: the project name. - 'list-of-files-or-directories' is a list of file names or directories separated by spaces. - Examples: - ${app} http://github.com/frgomes/bash-scripts -- README.md - ${app} http://github.com/frgomes/bash-scripts develop -- README.md bin/ - ${app} http://github.com/frgomes/bash-scripts develop tmpdir -- README.md bin/ docs/" - - # obtain repository-URL, e.g.: http://github.com/frgomes/bash-scripts - [[ $# != 0 ]] || (echo "${usage}" 1>&2 ; return 1) - local arg=${1} - [[ "${arg}" != "--" ]] || (echo "${usage}" 1>&2 ; return 1) - local url="${arg}" - [[ $# == 0 ]] || shift - - # obtain branch, which the default is master for historical reasons - if [[ "${arg}" != "--" ]] ;then arg="${1:-master}" ;fi - if [[ "${arg}" == "--" ]] ;then - local tag=master - else - local tag="${arg}" - [[ $# == 0 ]] || shift - fi - - # obtain the project directory, which defaults to the repository name - local prj=$(echo "$url" | sed 's:/:\n:g' | tail -1) - - if [[ "${arg}" != "--" ]] ;then arg="${1:-.}" ;fi - if [[ "${arg}" == "--" || "${arg}" == "." ]] ;then - local dir=$(readlink -f "./${prj}") - else - local dir=$(readlink -f "${arg}") - [[ $# == 0 ]] || shift - fi - - if [[ "${arg}" == "--" ]] ;then [[ $# == 0 ]] || shift; fi - if [[ "${1:-}" == "--" ]] ;then [[ $# == 0 ]] || shift; fi - - # Note: any remaining arguments after these above are considered as a - # list of files or directories to be downloaded. Names of directories - # must be followed by a slash /. - - local sparse=true - local opts='--depth=1' - - # now perform the sparse checkout - - mkdir -p "${dir}" - git -C "${dir}" init - git -C "${dir}" config core.sparseCheckout ${sparse} - git -C "${dir}" remote add origin ${url} - super=$(git rev-parse --show-toplevel) - pathtodotgit=$(realpath -m --relative-to=${dir} ${super})/.git - relpathfromsuper=$(realpath -m --relative-to=${super} ${dir}) - mv ${dir}/.git ${super}/.git/modules/${relpathfromsuper} - echo "gitdir: ${pathtodotgit}/modules/${relpathfromsuper}" > ${dir}/.git - for path in $* ;do - echo "${path}" >> ${super}/.git/modules/${relpathfromsuper}/info/sparse-checkout - done - git -C "${dir}" fetch ${opts} origin ${tag} - git -C "${dir}" checkout ${tag} -} - -git_sparse_checkout $@ diff --git a/modules/gitmodules.py b/modules/gitmodules.py new file mode 100644 index 0000000000..3ef3ed87fd --- /dev/null +++ b/modules/gitmodules.py @@ -0,0 +1,53 @@ +import os +from configparser import ConfigParser + +from modules.lstripreader import LstripReader + + +class GitModules(ConfigParser.ConfigParser): + def __init__( + self, + confpath=os.getcwd(), + conffile=".gitmodules", + includelist=None, + excludelist=None, + ): + ConfigParser.ConfigParser.__init__(self) + self.read_file(LstripReader(confpath), source=conffile) + self.conf_file = os.path.join(confpath, conffile) + self.includelist = includelist + self.excludelist = excludelist + + def set(self, name, option, value): + section = f'submodule "{name}"' + if not self.has_section(section): + self.add_section(section) + ConfigParser.ConfigParser.set(self, section, option, str(value)) + + def get(self, name, option): + section = f'submodule "{name}"' + try: + return ConfigParser.ConfigParser.get(self, section, option) + except ConfigParser.NoOptionError: + return None + + def save(self): + self.write(open(self.conf_file, "w")) + + def __del__(self): + self.save() + + def sections(self): + names = [] + for section in ConfigParser.ConfigParser.sections(self): + name = section[11:-1] + if self.includelist and name not in self.includelist: + continue + if self.excludelist and name in self.excludelist: + continue + names.append(name) + return names + + def items(self, name): + section = f'submodule "{name}"' + return ConfigParser.ConfigParser.items(section) diff --git a/modules/utils.py b/modules/utils.py index 8271538797..41c6bb25c8 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -10,12 +10,24 @@ import sys from threading import Timer -LOCAL_PATH_INDICATOR = '.' +LOCAL_PATH_INDICATOR = "." # --------------------------------------------------------------------- # -# screen and logging output and functions to massage text for output +# functions to massage text for output and other useful utilities # # --------------------------------------------------------------------- +from contextlib import contextmanager + + +@contextmanager +def pushd(new_dir): + """context for chdir. usage: with pushd(new_dir)""" + previous_dir = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(previous_dir) def log_process_output(output): @@ -25,7 +37,7 @@ def log_process_output(output): line. This makes it hard to filter with grep. """ - output = output.split('\n') + output = output.split("\n") for line in output: logging.debug(line) @@ -63,9 +75,9 @@ def last_n_lines(the_string, n_lines, truncation_message=None): return_val = the_string else: lines_subset = lines[-n_lines:] - str_truncated = ''.join(lines_subset) + str_truncated = "".join(lines_subset) if truncation_message: - str_truncated = truncation_message + '\n' + str_truncated + str_truncated = truncation_message + "\n" + str_truncated return_val = str_truncated return return_val @@ -85,9 +97,10 @@ def indent_string(the_string, indent_level): """ lines = the_string.splitlines(True) - padding = ' ' * indent_level + padding = " " * indent_level lines_indented = [padding + line for line in lines] - return ''.join(lines_indented) + return "".join(lines_indented) + # --------------------------------------------------------------------- # @@ -116,24 +129,26 @@ def str_to_bool(bool_str): """ value = None str_lower = bool_str.lower() - if str_lower in ('true', 't'): + if str_lower in ("true", "t"): value = True - elif str_lower in ('false', 'f'): + elif str_lower in ("false", "f"): value = False if value is None: - msg = ('ERROR: invalid boolean string value "{0}". ' - 'Must be "true" or "false"'.format(bool_str)) + msg = ( + 'ERROR: invalid boolean string value "{0}". ' + 'Must be "true" or "false"'.format(bool_str) + ) fatal_error(msg) return value -REMOTE_PREFIXES = ['http://', 'https://', 'ssh://', 'git@'] +REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"] def is_remote_url(url): """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. + remote. If so, it must be expanded to an absolute + path. """ remote_url = False @@ -145,7 +160,7 @@ def is_remote_url(url): def split_remote_url(url): """check if the user provided a local file path or a - remote. If remote, try to strip off protocol info. + remote. If remote, try to strip off protocol info. """ remote_url = is_remote_url(url) @@ -153,13 +168,13 @@ def split_remote_url(url): return url for prefix in REMOTE_PREFIXES: - url = url.replace(prefix, '') + url = url.replace(prefix, "") - if '@' in url: - url = url.split('@')[1] + if "@" in url: + url = url.split("@")[1] - if ':' in url: - url = url.split(':')[1] + if ":" in url: + url = url.split(":")[1] return url @@ -181,10 +196,12 @@ def expand_local_url(url, field): url = os.path.expandvars(url) url = os.path.expanduser(url) if not os.path.isabs(url): - msg = ('WARNING: Externals description for "{0}" contains a ' - 'url that is not remote and does not expand to an ' - 'absolute path. Version control operations may ' - 'fail.\n\nurl={1}'.format(field, url)) + msg = ( + 'WARNING: Externals description for "{0}" contains a ' + "url that is not remote and does not expand to an " + "absolute path. Version control operations may " + "fail.\n\nurl={1}".format(field, url) + ) printlog(msg) else: url = os.path.normpath(url) @@ -203,7 +220,8 @@ def expand_local_url(url, field): def _hanging_msg(working_directory, command): - print(""" + print( + """ Command '{command}' from directory {working_directory} @@ -217,13 +235,15 @@ def _hanging_msg(working_directory, command): will appear to hang. Ensure you can run svn and git manually and access all repositories without entering your authentication information. -""".format(command=command, - working_directory=working_directory, - hanging_sec=_HANGING_SEC)) +""".format( + command=command, + working_directory=working_directory, + hanging_sec=_HANGING_SEC, + ) + ) -def execute_subprocess(commands, status_to_caller=False, - output_to_caller=False): +def execute_subprocess(commands, status_to_caller=False, output_to_caller=False): """Wrapper around subprocess.check_output to handle common exceptions. @@ -237,32 +257,35 @@ def execute_subprocess(commands, status_to_caller=False, """ cwd = os.getcwd() - msg = 'In directory: {0}\nexecute_subprocess running command:'.format(cwd) + msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) logging.info(msg) - commands_str = ' '.join(commands) + commands_str = " ".join(commands) logging.info(commands_str) return_to_caller = status_to_caller or output_to_caller status = -1 - output = '' - hanging_timer = Timer(_HANGING_SEC, _hanging_msg, - kwargs={"working_directory": cwd, - "command": commands_str}) + output = "" + hanging_timer = Timer( + _HANGING_SEC, + _hanging_msg, + kwargs={"working_directory": cwd, "command": commands_str}, + ) hanging_timer.start() try: - output = subprocess.check_output(commands, stderr=subprocess.STDOUT, - universal_newlines=True) + output = subprocess.check_output( + commands, stderr=subprocess.STDOUT, universal_newlines=True + ) log_process_output(output) status = 0 except OSError as error: msg = failed_command_msg( - 'Command execution failed. Does the executable exist?', - commands) + "Command execution failed. Does the executable exist?", commands + ) logging.error(error) fatal_error(msg) except ValueError as error: msg = failed_command_msg( - 'DEV_ERROR: Invalid arguments trying to run subprocess', - commands) + "DEV_ERROR: Invalid arguments trying to run subprocess", commands + ) logging.error(error) fatal_error(msg) except subprocess.CalledProcessError as error: @@ -272,10 +295,11 @@ def execute_subprocess(commands, status_to_caller=False, # responsibility determine if an error occurred and handle it # appropriately. if not return_to_caller: - msg_context = ('Process did not run successfully; ' - 'returned status {0}'.format(error.returncode)) - msg = failed_command_msg(msg_context, commands, - output=error.output) + msg_context = ( + "Process did not run successfully; " + "returned status {0}".format(error.returncode) + ) + msg = failed_command_msg(msg_context, commands, output=error.output) logging.error(error) logging.error(msg) log_process_output(error.output) @@ -304,22 +328,25 @@ def failed_command_msg(msg_context, command, output=None): """ if output: - output_truncated = last_n_lines(output, 20, - truncation_message='[... Output truncated for brevity ...]') - errmsg = ('Failed with output:\n' + - indent_string(output_truncated, 4) + - '\nERROR: ') + output_truncated = last_n_lines( + output, 20, truncation_message="[... Output truncated for brevity ...]" + ) + errmsg = ( + "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: " + ) else: - errmsg = '' + errmsg = "" - command_str = ' '.join(command) + command_str = " ".join(command) errmsg += """In directory {cwd} {context}: {command} -""".format(cwd=os.getcwd(), context=msg_context, command=command_str) +""".format( + cwd=os.getcwd(), context=msg_context, command=command_str + ) if output: - errmsg += 'See above for output from failed command.\n' + errmsg += "See above for output from failed command.\n" return errmsg diff --git a/show-tags b/show-tags deleted file mode 100755 index 56c7d9c5e4..0000000000 --- a/show-tags +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - -this_dir=$(pwd) -printf "\nSubmodule status\n" -printf "(currently checked out commit for each submodule)\n" -printf "(when the submodule is initialized and a tag exists, the commit is shown as: 'most recent tag-commits since tag-commit hash')\n" -printf "(when the submodule is not initialized, only the checked out commit is shown)\n\n" -grep path .gitmodules | sed 's/.*= //' | while read x -do - cd "$this_dir" - printf "$x\n - current commit: " - if [ "$(ls -A $x)" ] ; then - cd "$x" - git describe --tags --always - else - git submodule status $x | sed 's/^-//' | awk '{ print $1 }' - fi -done -printf "\n" - From 815e6cd0d9f00fdf1cd06bd67ca912006c8d3a46 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 6 Jan 2024 10:34:47 -0700 Subject: [PATCH 019/161] now renamed git-fleximod for git interface --- git-fleximod.py => git-fleximod | 24 ++++++++++++++++++------ modules/gitmodules.py | 21 ++++++++++++--------- 2 files changed, 30 insertions(+), 15 deletions(-) rename git-fleximod.py => git-fleximod (92%) mode change 100644 => 100755 diff --git a/git-fleximod.py b/git-fleximod old mode 100644 new mode 100755 similarity index 92% rename from git-fleximod.py rename to git-fleximod index ded13eb9db..00fc2446c3 --- a/git-fleximod.py +++ b/git-fleximod @@ -8,6 +8,9 @@ from modules.gitinterface import GitInterface from modules.gitmodules import GitModules +logger = logging.getLogger(__name__) +logger.propogate = False + def commandline_arguments(args=None): description = """ @@ -135,9 +138,9 @@ def commandline_arguments(args=None): options = parser.parse_args() if options.optional: - esmrequired = ("T:T", "T:F", "I:T") + esmrequired = ["T:T", "T:F", "I:T"] else: - esmrequired = ("T:T", "I:T") + esmrequired = ["T:T", "I:T"] if options.status: action = "status" @@ -177,7 +180,7 @@ def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): topgit = os.path.join(topdir, ".git", "modules") gitsparse = os.path.join(topgit, name, "info", "sparse-checkout") if os.path.isfile(gitsparse): - logging.warning("submodule {} is already initialized".format(name)) + logger.warning("submodule {} is already initialized".format(name)) return # initialize a new git repo and set the sparse checkout flag @@ -213,7 +216,7 @@ def submodule_checkout(root, name, path): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir}") gitmodules = GitModules(repodir) - submodules_install(gitmodules, repodir, ("I:T")) + submodules_install(gitmodules, repodir, ["I:T"]) if os.path.exists(os.path.join(repodir, ".git")): print(f"Successfully checked out {name}") else: @@ -253,19 +256,27 @@ def submodules_update(gitmodules, root_dir): def submodules_install(gitmodules, root_dir, requiredlist): + print(gitmodules.sections()) for name in gitmodules.sections(): esmrequired = gitmodules.get(name, "esmrequired") esmsparse = gitmodules.get(name, "esmsparse") esmtag = gitmodules.get(name, "esmtag") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") - if esmrequired not in requiredlist: - if "T:F" in esmrequired: + + if esmrequired and esmrequired not in requiredlist: + if "T:F" == esmrequired: print("Skipping optional component {}".format(name)) continue if esmsparse: + logger.debug( + f"Callng submodule_sparse_checkout({name} {url}, {path}, {esmsparse}, {esmtag}" + ) submodule_sparse_checkout(name, url, path, esmsparse, tag=esmtag) else: + logger.debug( + "Calling submodule_checkout({},{},{})".format(root_dir, name, path) + ) submodule_checkout(root_dir, name, path) @@ -281,6 +292,7 @@ def _main_func(): ) = commandline_arguments() if verbose: print(f"action is {action}") + gitmodules = GitModules( confpath=root_dir, conffile=file_name, diff --git a/modules/gitmodules.py b/modules/gitmodules.py index 3ef3ed87fd..2858629270 100644 --- a/modules/gitmodules.py +++ b/modules/gitmodules.py @@ -4,7 +4,7 @@ from modules.lstripreader import LstripReader -class GitModules(ConfigParser.ConfigParser): +class GitModules(ConfigParser): def __init__( self, confpath=os.getcwd(), @@ -12,9 +12,9 @@ def __init__( includelist=None, excludelist=None, ): - ConfigParser.ConfigParser.__init__(self) - self.read_file(LstripReader(confpath), source=conffile) + ConfigParser.__init__(self) self.conf_file = os.path.join(confpath, conffile) + self.read_file(LstripReader(self.conf_file), source=conffile) self.includelist = includelist self.excludelist = excludelist @@ -22,12 +22,15 @@ def set(self, name, option, value): section = f'submodule "{name}"' if not self.has_section(section): self.add_section(section) - ConfigParser.ConfigParser.set(self, section, option, str(value)) + ConfigParser.set(self, section, option, str(value)) - def get(self, name, option): + # pylint: disable=redefined-builtin, arguments-differ + def get(self, name, option, raw=False, vars=None, fallback=None): section = f'submodule "{name}"' try: - return ConfigParser.ConfigParser.get(self, section, option) + return ConfigParser.get( + self, section, option, raw=raw, vars=vars, fallback=fallback + ) except ConfigParser.NoOptionError: return None @@ -39,7 +42,7 @@ def __del__(self): def sections(self): names = [] - for section in ConfigParser.ConfigParser.sections(self): + for section in ConfigParser.sections(self): name = section[11:-1] if self.includelist and name not in self.includelist: continue @@ -48,6 +51,6 @@ def sections(self): names.append(name) return names - def items(self, name): + def items(self, name, raw=False, vars=None): section = f'submodule "{name}"' - return ConfigParser.ConfigParser.items(section) + return ConfigParser.items(section, raw=raw, vars=vars) From a6ce960bc9d102007608ab7dde8b9fd4ad590dec Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 6 Jan 2024 11:41:25 -0700 Subject: [PATCH 020/161] clean up and refactor, rename to git-fleximod --- git-fleximod | 48 ++++++++++++++++++++++++++++++++++-------------- modules/utils.py | 13 +++++++++++++ 2 files changed, 47 insertions(+), 14 deletions(-) diff --git a/git-fleximod b/git-fleximod index 00fc2446c3..44a86c50bc 100755 --- a/git-fleximod +++ b/git-fleximod @@ -168,16 +168,14 @@ def commandline_arguments(args=None): ) -def submodule_sparse_checkout(name, url, path, sparsefile, tag="master"): +def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): # first create the module directory if not os.path.isdir(path): os.makedirs(path) # Check first if the module is already defined # and the sparse-checkout file exists - git = GitInterface(os.getcwd()) - topdir = git.git_operation("rev-parse", "--show-toplevel").rstrip() - - topgit = os.path.join(topdir, ".git", "modules") + git = GitInterface(root_dir) + topgit = os.path.join(root_dir, ".git", "modules") gitsparse = os.path.join(topgit, name, "info", "sparse-checkout") if os.path.isfile(gitsparse): logger.warning("submodule {} is already initialized".format(name)) @@ -227,7 +225,7 @@ def submodule_checkout(root, name, path): def submodules_status(gitmodules, root_dir): for name in gitmodules.sections(): path = gitmodules.get(name, "path") - tag = gitmodules.git(name, "esmtag") + tag = gitmodules.get(name, "esmtag") with utils.pushd(path): git = GitInterface(os.path.join(root_dir, path)) atag = git.git_operation("describe", "--tags", "--always").rstrip() @@ -236,7 +234,12 @@ def submodules_status(gitmodules, root_dir): elif tag: print(f"Submodule {name} at tag {tag}") else: - print(f"Submodule {name} has no tag defined in .gitmodules") + print( + f"Submodule {name} has no tag defined in .gitmodules, module at {atag}" + ) + status = git.git_operation("status") + if "nothing to commit" not in status: + print(status) def submodules_update(gitmodules, root_dir): @@ -245,18 +248,26 @@ def submodules_update(gitmodules, root_dir): path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") if os.path.exists(os.path.join(path, ".git")): - with utils.pushd(root_dir): - git = GitInterface(root_dir) + submoddir = os.path.join(root_dir, path) + with utils.pushd(submoddir): + git = GitInterface(submoddir) # first make sure the url is correct upstream = git.git_operation("ls-remote", "--get-url").rstrip() + newremote = "origin" if upstream != url: # TODO - this needs to be a unique name - git.git_operation("remote", "add", "newbranch", url) - git.git_operation("checkout", esmtag) + remotes = git.git_operation("remote", "-v") + newremote = "newremote" + git.git_operation("remote", "add", newremote, url) + + tags = git.git_operation("tag", "-l") + if esmtag not in tags: + git.git_operation("fetch", newremote, "--tags") + + git.git_operation("checkout", esmtag) def submodules_install(gitmodules, root_dir, requiredlist): - print(gitmodules.sections()) for name in gitmodules.sections(): esmrequired = gitmodules.get(name, "esmrequired") esmsparse = gitmodules.get(name, "esmsparse") @@ -270,9 +281,9 @@ def submodules_install(gitmodules, root_dir, requiredlist): continue if esmsparse: logger.debug( - f"Callng submodule_sparse_checkout({name} {url}, {path}, {esmsparse}, {esmtag}" + f"Callng submodule_sparse_checkout({root_dir}, {name}, {url}, {path}, {esmsparse}, {esmtag}" ) - submodule_sparse_checkout(name, url, path, esmsparse, tag=esmtag) + submodule_sparse_checkout(root_dir, name, url, path, esmsparse, tag=esmtag) else: logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) @@ -293,6 +304,15 @@ def _main_func(): if verbose: print(f"action is {action}") + if not os.path.isfile(os.path.join(root_dir, file_name)): + file_path = utils.find_upwards(root_dir, file_name) + + if file_path is None: + fatal_error( + "No {} found in {} or any of it's parents".format(file_name, root_dir) + ) + root_dir = os.path.dirname(file_path) + print(f"root_dir is {root_dir}") gitmodules = GitModules( confpath=root_dir, conffile=file_name, diff --git a/modules/utils.py b/modules/utils.py index 41c6bb25c8..f0753367e5 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -9,6 +9,7 @@ import subprocess import sys from threading import Timer +from pathlib import Path LOCAL_PATH_INDICATOR = "." # --------------------------------------------------------------------- @@ -55,6 +56,18 @@ def printlog(msg, **kwargs): sys.stdout.flush() +def find_upwards(root_dir, filename): + """Find a file in root dir or any of it's parents""" + d = Path(root_dir) + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.exists(): + return attempt + d = d.parent + return None + + def last_n_lines(the_string, n_lines, truncation_message=None): """Returns the last n lines of the given string From 816158ceb02abca2b360811beba25a5c1b98c5e2 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 6 Jan 2024 12:11:04 -0700 Subject: [PATCH 021/161] add version.txt and .pre-commit-config.yaml --- .pre-commit-config.yaml | 18 ++++++++++++++++++ version.txt | 1 + 2 files changed, 19 insertions(+) create mode 100644 .pre-commit-config.yaml create mode 100644 version.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..2f6089da72 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +exclude: ^utils/.*$ + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/pylint + rev: v2.11.1 + hooks: + - id: pylint + args: + - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member diff --git a/version.txt b/version.txt new file mode 100644 index 0000000000..a3dce6cd3a --- /dev/null +++ b/version.txt @@ -0,0 +1 @@ +v0.0.2 From 3be6537e39f7d9209be9b66b36756da022455612 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 6 Jan 2024 15:21:15 -0700 Subject: [PATCH 022/161] fix issue in update --- git-fleximod | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/git-fleximod b/git-fleximod index 44a86c50bc..e5978cff6d 100755 --- a/git-fleximod +++ b/git-fleximod @@ -257,11 +257,20 @@ def submodules_update(gitmodules, root_dir): if upstream != url: # TODO - this needs to be a unique name remotes = git.git_operation("remote", "-v") - newremote = "newremote" - git.git_operation("remote", "add", newremote, url) + if url in remotes: + for line in remotes: + if url in line and "fetch" in line: + newremote = line.split()[0] + break + else: + i = 0 + while newremote in remotes: + i = i + 1 + newremote = f"newremote.{i:02d}" + git.git_operation("remote", "add", newremote, url) tags = git.git_operation("tag", "-l") - if esmtag not in tags: + if esmtag and esmtag not in tags: git.git_operation("fetch", newremote, "--tags") git.git_operation("checkout", esmtag) From d709c124c284c1ad007caed586db93914d78be47 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 6 Jan 2024 15:47:38 -0700 Subject: [PATCH 023/161] make action a required subargument to look more like a git subcommand --- git-fleximod | 38 +++++++++++++------------------------- modules/gitmodules.py | 3 --- 2 files changed, 13 insertions(+), 28 deletions(-) diff --git a/git-fleximod b/git-fleximod index e5978cff6d..d13f1b9f08 100755 --- a/git-fleximod +++ b/git-fleximod @@ -23,6 +23,14 @@ def commandline_arguments(args=None): # # user options # + choices = ["update", "install", "status"] + parser.add_argument( + "action", + choices=choices, + default="install", + help=f"Subcommand of fleximod, choices are {choices}", + ) + parser.add_argument( "components", nargs="*", @@ -62,24 +70,6 @@ def commandline_arguments(args=None): "optional submodules relative to the toplevel directory.", ) - parser.add_argument( - "-S", - "--status", - action="store_true", - default=False, - help="Output the status of the repositories managed by " - "%(prog)s. By default only summary information " - "is provided. Use the verbose option to see details.", - ) - - parser.add_argument( - "-u", - "--update", - action="store_true", - default=False, - help="Update submodules to the tags defined in .gitmodules.", - ) - parser.add_argument( "-v", "--verbose", @@ -142,11 +132,8 @@ def commandline_arguments(args=None): else: esmrequired = ["T:T", "I:T"] - if options.status: - action = "status" - elif options.update: - action = "update" - else: + action = options.action + if not action: action = "install" if options.version: @@ -226,8 +213,9 @@ def submodules_status(gitmodules, root_dir): for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "esmtag") - with utils.pushd(path): - git = GitInterface(os.path.join(root_dir, path)) + newpath = os.path.join(root_dir, path) + with utils.pushd(newpath): + git = GitInterface(newpath) atag = git.git_operation("describe", "--tags", "--always").rstrip() if tag and atag != tag: print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") diff --git a/modules/gitmodules.py b/modules/gitmodules.py index 2858629270..5af10be406 100644 --- a/modules/gitmodules.py +++ b/modules/gitmodules.py @@ -37,9 +37,6 @@ def get(self, name, option, raw=False, vars=None, fallback=None): def save(self): self.write(open(self.conf_file, "w")) - def __del__(self): - self.save() - def sections(self): names = [] for section in ConfigParser.sections(self): From 83aea04a4366e246e5a107e88ef29aa347657e24 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 6 Jan 2024 16:02:03 -0700 Subject: [PATCH 024/161] add documentation --- modules/gitmodules.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/modules/gitmodules.py b/modules/gitmodules.py index 5af10be406..9e38a5806c 100644 --- a/modules/gitmodules.py +++ b/modules/gitmodules.py @@ -12,6 +12,12 @@ def __init__( includelist=None, excludelist=None, ): + """ + confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory). + conffile: Name of the configuration file (defaults to .gitmodules). + includelist: Optional list of submodules to include. + excludelist: Optional list of submodules to exclude. + """ ConfigParser.__init__(self) self.conf_file = os.path.join(confpath, conffile) self.read_file(LstripReader(self.conf_file), source=conffile) @@ -19,6 +25,11 @@ def __init__( self.excludelist = excludelist def set(self, name, option, value): + """ + Sets a configuration value for a specific submodule: + Ensures the appropriate section exists for the submodule. + Calls the parent class's set method to store the value. + """ section = f'submodule "{name}"' if not self.has_section(section): self.add_section(section) @@ -26,6 +37,11 @@ def set(self, name, option, value): # pylint: disable=redefined-builtin, arguments-differ def get(self, name, option, raw=False, vars=None, fallback=None): + """ + Retrieves a configuration value for a specific submodule: + Uses the parent class's get method to access the value. + Handles potential errors if the section or option doesn't exist. + """ section = f'submodule "{name}"' try: return ConfigParser.get( @@ -38,6 +54,7 @@ def save(self): self.write(open(self.conf_file, "w")) def sections(self): + """Strip the submodule part out of section and just use the name""" names = [] for section in ConfigParser.sections(self): name = section[11:-1] From 905824aefcd187a25f4133826bd0a0a76edcfcdd Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 09:09:23 -0700 Subject: [PATCH 025/161] add README and License --- License | 20 ++++++++++++++++++++ README.md | 53 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 73 insertions(+) create mode 100644 License create mode 100644 README.md diff --git a/License b/License new file mode 100644 index 0000000000..2c6fe768c2 --- /dev/null +++ b/License @@ -0,0 +1,20 @@ +Copyright 2024 National Center for Atmospheric Sciences (NCAR) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +“Softwareâ€), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS ISâ€, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..4ee56bc4a4 --- /dev/null +++ b/README.md @@ -0,0 +1,53 @@ +# git-fleximod + +Flexible Submodule Management for Git + +## Overview + +Git-fleximod is a Python-based tool that extends Git's submodule capabilities, offering additional features for managing submodules in a more flexible and efficient way. + +## Installation + + Install using pip: + pip install git-fleximod + +## Usage + + Basic Usage: + git fleximod [options] + Available Commands: + install: Install submodules according to configuration. + status: Display the status of submodules. + update: Update submodules to their latest commits. + Additional Options: + See git fleximod --help for more details. + +## Supported .gitmodules Variables + + fxtag: Specify a specific tag or branch to checkout for a submodule. + fxrequired: Mark a submodule's checkout behavior, with allowed values: + - T:T: Top-level and required (checked out only when this is the Toplevel module). + - T:F: Top-level and optional (checked out with --optional flag if this is the Toplevel module). + - I:T: Internal and required (always checked out). + - I:F: Internal and optional (checked out with --optional flag). + fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. + +## Sparse Checkouts + + To enable sparse checkout for a submodule, set the fxsparse variable + in the .gitmodules file to the path of a file containing the desired + sparse checkout paths. Git-fleximod will automatically configure + sparse checkout based on this file when applicable commands are run. + +## Examples + + Installing submodules with optional ones: git fleximod install --optional + Checking out a specific tag for a submodule: git fleximod update --fxtag=v1.2.3 submodule-name + +## Contributing + +We welcome contributions! Please see the CONTRIBUTING.md file for guidelines. + +## License + +Git-fleximod is released under the MIT License. From 3c723a86b39db2274c223e4a8ffb1d83f5b01ec1 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 10:56:42 -0700 Subject: [PATCH 026/161] update README --- README.md | 34 +++++++++++++++++++++++++++++----- git-fleximod | 39 +++++++++++++++++++++------------------ 2 files changed, 50 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index 4ee56bc4a4..da725ab5f5 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,9 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o ## Installation - Install using pip: - pip install git-fleximod +#TODO Install using pip: +# pip install git-fleximod + If you choose to locate git-fleximod in your path you can access it via command: git fleximod ## Usage @@ -18,7 +19,7 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o Available Commands: install: Install submodules according to configuration. status: Display the status of submodules. - update: Update submodules to their latest commits. + update: Update submodules to the tag indicated in .gitmodules variable fxtag. Additional Options: See git fleximod --help for more details. @@ -38,11 +39,34 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o in the .gitmodules file to the path of a file containing the desired sparse checkout paths. Git-fleximod will automatically configure sparse checkout based on this file when applicable commands are run. + See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) for details on the format of this file. ## Examples - Installing submodules with optional ones: git fleximod install --optional - Checking out a specific tag for a submodule: git fleximod update --fxtag=v1.2.3 submodule-name + Installing submodules including optional ones: git fleximod install --optional + Update a specific submodule to the fxtag indicated in .gitmodules: git fleximod update submodule-name + Example .gitmodules entry: + [submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxsparse = ../.cosp_sparse_checkout + fxtag = v2.1.4cesm + + This indicates that submodule named cosp2 at tag v2.1.4cesm should + be checked out into directory src/physics/cosp2/src relative to the + .gitmodules directory from the given url and that it should use the + sparse checkout as described in file ../.cosp_sparse_checkout relative + to the path directory. + + [submodule "cime"] + path = cime + url = https://github.com/jedwards4b/cime + fxrequired = T:T + fxtag = cime6.0.198_rme01 + + This indicates that submodule cime should be checked out into a directory cime + at tag cime6.0.198_rme01 from url https://github.com/jedwards4b/cime, this should + only be done if this .gitmodules file is at the TopLevel of the repository clone. ## Contributing diff --git a/git-fleximod b/git-fleximod index d13f1b9f08..f03984f83c 100755 --- a/git-fleximod +++ b/git-fleximod @@ -128,9 +128,9 @@ def commandline_arguments(args=None): options = parser.parse_args() if options.optional: - esmrequired = ["T:T", "T:F", "I:T"] + fxrequired = ["T:T", "T:F", "I:T"] else: - esmrequired = ["T:T", "I:T"] + fxrequired = ["T:T", "I:T"] action = options.action if not action: @@ -147,7 +147,7 @@ def commandline_arguments(args=None): return ( options.path, options.gitmodules, - esmrequired, + fxrequired, options.components, options.exclude, options.verbose, @@ -212,7 +212,7 @@ def submodule_checkout(root, name, path): def submodules_status(gitmodules, root_dir): for name in gitmodules.sections(): path = gitmodules.get(name, "path") - tag = gitmodules.get(name, "esmtag") + tag = gitmodules.get(name, "fxtag") newpath = os.path.join(root_dir, path) with utils.pushd(newpath): git = GitInterface(newpath) @@ -232,9 +232,10 @@ def submodules_status(gitmodules, root_dir): def submodules_update(gitmodules, root_dir): for name in gitmodules.sections(): - esmtag = gitmodules.get(name, "esmtag") + fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") + print(f"name={name} path={path} url={url} fxtag={fxtag}") if os.path.exists(os.path.join(path, ".git")): submoddir = os.path.join(root_dir, path) with utils.pushd(submoddir): @@ -258,29 +259,31 @@ def submodules_update(gitmodules, root_dir): git.git_operation("remote", "add", newremote, url) tags = git.git_operation("tag", "-l") - if esmtag and esmtag not in tags: + if fxtag and fxtag not in tags: git.git_operation("fetch", newremote, "--tags") - - git.git_operation("checkout", esmtag) + if fxtag: + git.git_operation("checkout", fxtag) + else: + print(f"No fxtag found for submodule {name}") def submodules_install(gitmodules, root_dir, requiredlist): for name in gitmodules.sections(): - esmrequired = gitmodules.get(name, "esmrequired") - esmsparse = gitmodules.get(name, "esmsparse") - esmtag = gitmodules.get(name, "esmtag") + fxrequired = gitmodules.get(name, "fxrequired") + fxsparse = gitmodules.get(name, "fxsparse") + fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") - if esmrequired and esmrequired not in requiredlist: - if "T:F" == esmrequired: + if fxrequired and fxrequired not in requiredlist: + if "T:F" == fxrequired: print("Skipping optional component {}".format(name)) continue - if esmsparse: + if fxsparse: logger.debug( - f"Callng submodule_sparse_checkout({root_dir}, {name}, {url}, {path}, {esmsparse}, {esmtag}" + f"Callng submodule_sparse_checkout({root_dir}, {name}, {url}, {path}, {fxsparse}, {fxtag}" ) - submodule_sparse_checkout(root_dir, name, url, path, esmsparse, tag=esmtag) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) else: logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) @@ -292,7 +295,7 @@ def _main_func(): ( root_dir, file_name, - esmrequired, + fxrequired, includelist, excludelist, verbose, @@ -320,7 +323,7 @@ def _main_func(): if action == "update": submodules_update(gitmodules, root_dir) elif action == "install": - submodules_install(gitmodules, root_dir, esmrequired) + submodules_install(gitmodules, root_dir, fxrequired) elif action == "status": submodules_status(gitmodules, root_dir) else: From 3903206397dfebfd0ca2f02a53761466b79cfc89 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 11:05:00 -0700 Subject: [PATCH 027/161] trying to improve markdown formatting --- README.md | 71 ++++++++++++++++++++++++++++++++++++------------------- 1 file changed, 47 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index da725ab5f5..c5a885bd7e 100644 --- a/README.md +++ b/README.md @@ -43,30 +43,53 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o ## Examples - Installing submodules including optional ones: git fleximod install --optional - Update a specific submodule to the fxtag indicated in .gitmodules: git fleximod update submodule-name - Example .gitmodules entry: - [submodule "cosp2"] - path = src/physics/cosp2/src - url = https://github.com/CFMIP/COSPv2.0 - fxsparse = ../.cosp_sparse_checkout - fxtag = v2.1.4cesm - - This indicates that submodule named cosp2 at tag v2.1.4cesm should - be checked out into directory src/physics/cosp2/src relative to the - .gitmodules directory from the given url and that it should use the - sparse checkout as described in file ../.cosp_sparse_checkout relative - to the path directory. - - [submodule "cime"] - path = cime - url = https://github.com/jedwards4b/cime - fxrequired = T:T - fxtag = cime6.0.198_rme01 - - This indicates that submodule cime should be checked out into a directory cime - at tag cime6.0.198_rme01 from url https://github.com/jedwards4b/cime, this should - only be done if this .gitmodules file is at the TopLevel of the repository clone. +Here are some common usage examples: + + Installing submodules, including optional ones: + Bash + + git fleximod install --optional + + +Updating a specific submodule to the fxtag indicated in .gitmodules: + +Bash + +git fleximod update submodule-name + +Example .gitmodules entry: +Ini, TOML + +[submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxsparse = ../.cosp_sparse_checkout + fxtag = v2.1.4cesm + +Explanation: + + This entry indicates that the submodule named cosp2 at tag + v2.1.4cesm should be checked out into the directory + src/physics/cosp2/src relative to the .gitmodules directory. It + should be checked out from the URL + https://github.com/CFMIP/COSPv2.0 and use sparse checkout as + described in the file ../.cosp_sparse_checkout relative to the + path directory. + +Additional example: +Ini, TOML + +[submodule "cime"] + path = cime + url = https://github.com/jedwards4b/cime + fxrequired = T:T + fxtag = cime6.0.198_rme01 + + +Explanation: + + This entry indicates that the submodule cime should be checked out into a directory named cime at tag cime6.0.198_rme01 from the URL https://github.com/jedwards4b/cime. + This should only be done if the .gitmodules file is at the top level of the repository clone. ## Contributing From 8bd89cf9b7bdd710b0aa23def9a4b2f21c047b84 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 11:09:47 -0700 Subject: [PATCH 028/161] trying to improve markdown --- README.md | 66 +++++++++++++++++++++++++++---------------------------- 1 file changed, 32 insertions(+), 34 deletions(-) diff --git a/README.md b/README.md index c5a885bd7e..f3a0f889cb 100644 --- a/README.md +++ b/README.md @@ -46,27 +46,24 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o Here are some common usage examples: Installing submodules, including optional ones: - Bash - + ```Bash git fleximod install --optional - - -Updating a specific submodule to the fxtag indicated in .gitmodules: - -Bash - -git fleximod update submodule-name - -Example .gitmodules entry: -Ini, TOML - -[submodule "cosp2"] - path = src/physics/cosp2/src - url = https://github.com/CFMIP/COSPv2.0 - fxsparse = ../.cosp_sparse_checkout - fxtag = v2.1.4cesm - -Explanation: + ``` + + Updating a specific submodule to the fxtag indicated in .gitmodules: + + ```Bash + git fleximod update submodule-name + ``` + Example .gitmodules entry: + ```Ini, TOML + [submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxsparse = ../.cosp_sparse_checkout + fxtag = v2.1.4cesm + ``` + Explanation: This entry indicates that the submodule named cosp2 at tag v2.1.4cesm should be checked out into the directory @@ -76,20 +73,21 @@ Explanation: described in the file ../.cosp_sparse_checkout relative to the path directory. -Additional example: -Ini, TOML - -[submodule "cime"] - path = cime - url = https://github.com/jedwards4b/cime - fxrequired = T:T - fxtag = cime6.0.198_rme01 - - -Explanation: - - This entry indicates that the submodule cime should be checked out into a directory named cime at tag cime6.0.198_rme01 from the URL https://github.com/jedwards4b/cime. - This should only be done if the .gitmodules file is at the top level of the repository clone. + Additional example: + ```Ini, TOML + [submodule "cime"] + path = cime + url = https://github.com/jedwards4b/cime + fxrequired = T:T + fxtag = cime6.0.198_rme01 + ``` + + Explanation: + + This entry indicates that the submodule cime should be checked out + into a directory named cime at tag cime6.0.198_rme01 from the URL + https://github.com/jedwards4b/cime. This should only be done if + the .gitmodules file is at the top level of the repository clone. ## Contributing From e83bc7a865540751093c393f9edd8f66a450cf40 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 11:12:13 -0700 Subject: [PATCH 029/161] trying to improve markdown --- README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index f3a0f889cb..199d2064c0 100644 --- a/README.md +++ b/README.md @@ -46,17 +46,17 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o Here are some common usage examples: Installing submodules, including optional ones: - ```Bash - git fleximod install --optional - ``` +```bash + git fleximod install --optional +``` Updating a specific submodule to the fxtag indicated in .gitmodules: - ```Bash + ```bash git fleximod update submodule-name ``` Example .gitmodules entry: - ```Ini, TOML + ```ini, toml [submodule "cosp2"] path = src/physics/cosp2/src url = https://github.com/CFMIP/COSPv2.0 @@ -74,7 +74,7 @@ Here are some common usage examples: path directory. Additional example: - ```Ini, TOML + ```ini, toml [submodule "cime"] path = cime url = https://github.com/jedwards4b/cime From 967edf4ced928a444162079227637c794ed562e0 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 11:14:22 -0700 Subject: [PATCH 030/161] trying to improve markdown --- README.md | 49 ++++++++++++++++++++++++------------------------- 1 file changed, 24 insertions(+), 25 deletions(-) diff --git a/README.md b/README.md index 199d2064c0..910858e79a 100644 --- a/README.md +++ b/README.md @@ -45,49 +45,48 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o Here are some common usage examples: - Installing submodules, including optional ones: +Installing submodules, including optional ones: ```bash git fleximod install --optional ``` - Updating a specific submodule to the fxtag indicated in .gitmodules: +Updating a specific submodule to the fxtag indicated in .gitmodules: - ```bash +```bash git fleximod update submodule-name - ``` - Example .gitmodules entry: - ```ini, toml +``` +Example .gitmodules entry: +```ini, toml [submodule "cosp2"] path = src/physics/cosp2/src url = https://github.com/CFMIP/COSPv2.0 fxsparse = ../.cosp_sparse_checkout fxtag = v2.1.4cesm - ``` - Explanation: - - This entry indicates that the submodule named cosp2 at tag - v2.1.4cesm should be checked out into the directory - src/physics/cosp2/src relative to the .gitmodules directory. It - should be checked out from the URL - https://github.com/CFMIP/COSPv2.0 and use sparse checkout as - described in the file ../.cosp_sparse_checkout relative to the - path directory. - - Additional example: - ```ini, toml +``` +Explanation: + +This entry indicates that the submodule named cosp2 at tag v2.1.4cesm +should be checked out into the directory src/physics/cosp2/src +relative to the .gitmodules directory. It should be checked out from +the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as +described in the file ../.cosp_sparse_checkout relative to the path +directory. + +Additional example: +```ini, toml [submodule "cime"] path = cime url = https://github.com/jedwards4b/cime fxrequired = T:T fxtag = cime6.0.198_rme01 - ``` +``` - Explanation: +Explanation: - This entry indicates that the submodule cime should be checked out - into a directory named cime at tag cime6.0.198_rme01 from the URL - https://github.com/jedwards4b/cime. This should only be done if - the .gitmodules file is at the top level of the repository clone. +This entry indicates that the submodule cime should be checked out +into a directory named cime at tag cime6.0.198_rme01 from the URL +https://github.com/jedwards4b/cime. This should only be done if +the .gitmodules file is at the top level of the repository clone. ## Contributing From 4926864512c75496530bc713ddadb26071f16ac9 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 13:17:00 -0700 Subject: [PATCH 031/161] add pip setup.py file --- setup.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 setup.py diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000..2d4cb3642b --- /dev/null +++ b/setup.py @@ -0,0 +1,26 @@ +import setuptools +import os + +with open("README.md", "r") as fh: + long_description = fh.read() +with open("version.txt", "r") as fh: + version = fh.read() + cwd = os.getcwd() +setuptools.setup( + name="git-fleximod", # This is the name of the package + version=version, # The initial release version + author="Jim Edwards", # Full name of the author + description="Extended support for git-submodule and git-sparse-checkout", + long_description=long_description, # Long description read from the the readme file + long_description_content_type="text/markdown", + packages=setuptools.find_packages(), # List of all python modules to be installed + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + ], # Information to filter the project on PyPi website + python_requires='>=3.6', # Minimum version requirement of the package + py_modules=['git-fleximod'], # Name of the python package + package_dir={'git-fleximod':'.'}, # Directory of the source code of the package + install_requires=[] # Install other dependencies if any +) From d2b3985e4c0d43eb5afbf497f50fd67adde2d0e3 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 13:22:48 -0700 Subject: [PATCH 032/161] bug fix --- git-fleximod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/git-fleximod b/git-fleximod index f03984f83c..0f0fc36347 100755 --- a/git-fleximod +++ b/git-fleximod @@ -169,7 +169,7 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master return # initialize a new git repo and set the sparse checkout flag - sprepo_git = GitInterface(os.path.join(topdir, path)) + sprepo_git = GitInterface(os.path.join(root_dir, path)) sprepo_git.config_set_value("core", "sparseCheckout", "true") # set the repository remote From 27375e88a3ee5d08ffc567f2d52f7ab6a7d1df7e Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 7 Jan 2024 14:23:55 -0700 Subject: [PATCH 033/161] improve messages --- git-fleximod | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/git-fleximod b/git-fleximod index 0f0fc36347..69491a5fde 100755 --- a/git-fleximod +++ b/git-fleximod @@ -235,7 +235,7 @@ def submodules_update(gitmodules, root_dir): fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") - print(f"name={name} path={path} url={url} fxtag={fxtag}") + logger.info(f"name={name} path={path} url={url} fxtag={fxtag}") if os.path.exists(os.path.join(path, ".git")): submoddir = os.path.join(root_dir, path) with utils.pushd(submoddir): @@ -261,10 +261,14 @@ def submodules_update(gitmodules, root_dir): tags = git.git_operation("tag", "-l") if fxtag and fxtag not in tags: git.git_operation("fetch", newremote, "--tags") - if fxtag: + atag = git.git_operation("describe", "--tags", "--always").rstrip() + if fxtag and fxtag != atag: + print(f"Updating {name} to {fxtag}") git.git_operation("checkout", fxtag) - else: + elif not fxtag: print(f"No fxtag found for submodule {name}") + else: + print(f"submodule {name} up to date.") def submodules_install(gitmodules, root_dir, requiredlist): From a17fe845273a99aa767558b88a020c1d82724f98 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 8 Jan 2024 12:03:03 -0700 Subject: [PATCH 034/161] fix some issues in sparse checkout --- git-fleximod | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/git-fleximod b/git-fleximod index 69491a5fde..51d0372655 100755 --- a/git-fleximod +++ b/git-fleximod @@ -162,11 +162,6 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master # Check first if the module is already defined # and the sparse-checkout file exists git = GitInterface(root_dir) - topgit = os.path.join(root_dir, ".git", "modules") - gitsparse = os.path.join(topgit, name, "info", "sparse-checkout") - if os.path.isfile(gitsparse): - logger.warning("submodule {} is already initialized".format(name)) - return # initialize a new git repo and set the sparse checkout flag sprepo_git = GitInterface(os.path.join(root_dir, path)) @@ -175,16 +170,31 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master # set the repository remote sprepo_git.git_operation("remote", "add", "origin", url) + + if os.path.isfile(os.path.join(root_dir, ".git")): + with open(os.path.join(root_dir, ".git")) as f: + gitpath = os.path.abspath(os.path.join(root_dir,f.read().split()[1])) + print(f"gitpath is {gitpath}") + topgit = os.path.join(gitpath, "modules") + else: + topgit = os.path.join(root_dir, ".git", "modules") + print(f"topgit is {topgit}") if not os.path.isdir(topgit): os.makedirs(topgit) topgit = os.path.join(topgit, name) + print(f"topgit is {topgit}") + if os.path.isdir(os.path.join(root_dir,path,".git")): + shutil.move(os.path.join(root_dir,path, ".git"), topgit) + with open(os.path.join(root_dir,path, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(topgit, path)) + + gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) + if os.path.isfile(gitsparse): + logger.warning("submodule {} is already initialized".format(name)) + return - shutil.move(os.path.join(path, ".git"), topgit) - - shutil.copy(os.path.join(path, sparsefile), gitsparse) - with open(os.path.join(path, ".git"), "w") as f: - f.write("gitdir: " + os.path.relpath(topgit, path)) + shutil.copy(os.path.join(root_dir,path, sparsefile), gitsparse) # Finally checkout the repo sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") From 8c9c123bd687e857a0d01205ddd0a3d9849fe6ec Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 8 Jan 2024 14:35:57 -0700 Subject: [PATCH 035/161] need to replace ssh with https --- git-fleximod | 39 +++++++++++++++++++++++---------------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/git-fleximod b/git-fleximod index 51d0372655..fbd6099a22 100755 --- a/git-fleximod +++ b/git-fleximod @@ -162,7 +162,7 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master # Check first if the module is already defined # and the sparse-checkout file exists git = GitInterface(root_dir) - + print(f"root_dir is {root_dir}") # initialize a new git repo and set the sparse checkout flag sprepo_git = GitInterface(os.path.join(root_dir, path)) sprepo_git.config_set_value("core", "sparseCheckout", "true") @@ -170,7 +170,6 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master # set the repository remote sprepo_git.git_operation("remote", "add", "origin", url) - if os.path.isfile(os.path.join(root_dir, ".git")): with open(os.path.join(root_dir, ".git")) as f: gitpath = os.path.abspath(os.path.join(root_dir,f.read().split()[1])) @@ -224,20 +223,23 @@ def submodules_status(gitmodules, root_dir): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") newpath = os.path.join(root_dir, path) - with utils.pushd(newpath): - git = GitInterface(newpath) - atag = git.git_operation("describe", "--tags", "--always").rstrip() - if tag and atag != tag: - print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") - elif tag: - print(f"Submodule {name} at tag {tag}") - else: - print( - f"Submodule {name} has no tag defined in .gitmodules, module at {atag}" - ) - status = git.git_operation("status") - if "nothing to commit" not in status: - print(status) + if not os.path.exists(os.path.join(newpath, ".git")): + print(f"Submodule {name} not checked out") + else: + with utils.pushd(newpath): + git = GitInterface(newpath) + atag = git.git_operation("describe", "--tags", "--always").rstrip() + if tag and atag != tag: + print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") + elif tag: + print(f"Submodule {name} at tag {tag}") + else: + print( + f"Submodule {name} has no tag defined in .gitmodules, module at {atag}" + ) + status = git.git_operation("status") + if "nothing to commit" not in status: + print(status) def submodules_update(gitmodules, root_dir): @@ -293,6 +295,11 @@ def submodules_install(gitmodules, root_dir, requiredlist): if "T:F" == fxrequired: print("Skipping optional component {}".format(name)) continue + # ssh urls cause problems for those who dont have git accounts with ssh keys defined + # but cime has one since e3sm prefers ssh to https + if url.startswith("git@"): + url = url.replace("git@github.com:", "https://github.com") + if fxsparse: logger.debug( f"Callng submodule_sparse_checkout({root_dir}, {name}, {url}, {path}, {fxsparse}, {fxtag}" From 2752eccf37f28d3b7505d7d3c1faeb352c1a1bd0 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 8 Jan 2024 14:45:48 -0700 Subject: [PATCH 036/161] change from ssh to https --- git-fleximod | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/git-fleximod b/git-fleximod index fbd6099a22..179bc2ca98 100755 --- a/git-fleximod +++ b/git-fleximod @@ -201,9 +201,12 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master print(f"Successfully checked out {name}") -def submodule_checkout(root, name, path): +def submodule_checkout(root, name, path, url=None): git = GitInterface(root) repodir = os.path.join(root, path) + # if url is provided update to the new url + if url: + git.git_operation("submodule", "set-url", path, url) git.git_operation("submodule", "update", "--init", path) # Look for a .gitmodules file in the newly checkedout repo if os.path.exists(os.path.join(repodir, ".gitmodules")): @@ -309,7 +312,7 @@ def submodules_install(gitmodules, root_dir, requiredlist): logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) - submodule_checkout(root_dir, name, path) + submodule_checkout(root_dir, name, path, url=url) def _main_func(): From 2092f3ff672051f4c34de4986f7b59debbc30920 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 8 Jan 2024 14:58:44 -0700 Subject: [PATCH 037/161] fix ssh issue --- git-fleximod | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/git-fleximod b/git-fleximod index 179bc2ca98..0fcbc1feef 100755 --- a/git-fleximod +++ b/git-fleximod @@ -205,10 +205,19 @@ def submodule_checkout(root, name, path, url=None): git = GitInterface(root) repodir = os.path.join(root, path) # if url is provided update to the new url + tmpurl = None if url: + # ssh urls cause problems for those who dont have git accounts with ssh keys defined + # but cime has one since e3sm prefers ssh to https + if url.startswith("git@"): + tmpurl = url + url = url.replace("git@github.com:", "https://github.com") git.git_operation("submodule", "set-url", path, url) git.git_operation("submodule", "update", "--init", path) # Look for a .gitmodules file in the newly checkedout repo + if tmpurl: + git.git_operation("submodule", "set-url", path, tmpurl) + if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir}") @@ -298,10 +307,6 @@ def submodules_install(gitmodules, root_dir, requiredlist): if "T:F" == fxrequired: print("Skipping optional component {}".format(name)) continue - # ssh urls cause problems for those who dont have git accounts with ssh keys defined - # but cime has one since e3sm prefers ssh to https - if url.startswith("git@"): - url = url.replace("git@github.com:", "https://github.com") if fxsparse: logger.debug( @@ -312,6 +317,7 @@ def submodules_install(gitmodules, root_dir, requiredlist): logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) + submodule_checkout(root_dir, name, path, url=url) From 4fd30a41f604b2bb6fbb8533429b60b4f1d0adc5 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 8 Jan 2024 15:07:31 -0700 Subject: [PATCH 038/161] remove debug print statments --- git-fleximod | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/git-fleximod b/git-fleximod index 0fcbc1feef..404b598f2b 100755 --- a/git-fleximod +++ b/git-fleximod @@ -162,7 +162,7 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master # Check first if the module is already defined # and the sparse-checkout file exists git = GitInterface(root_dir) - print(f"root_dir is {root_dir}") + # initialize a new git repo and set the sparse checkout flag sprepo_git = GitInterface(os.path.join(root_dir, path)) sprepo_git.config_set_value("core", "sparseCheckout", "true") @@ -173,15 +173,14 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master if os.path.isfile(os.path.join(root_dir, ".git")): with open(os.path.join(root_dir, ".git")) as f: gitpath = os.path.abspath(os.path.join(root_dir,f.read().split()[1])) - print(f"gitpath is {gitpath}") topgit = os.path.join(gitpath, "modules") else: topgit = os.path.join(root_dir, ".git", "modules") - print(f"topgit is {topgit}") + if not os.path.isdir(topgit): os.makedirs(topgit) topgit = os.path.join(topgit, name) - print(f"topgit is {topgit}") + if os.path.isdir(os.path.join(root_dir,path,".git")): shutil.move(os.path.join(root_dir,path, ".git"), topgit) with open(os.path.join(root_dir,path, ".git"), "w") as f: From 86a5a72abcbf9709a01ad01fa6c01229700a13f2 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 8 Jan 2024 16:59:21 -0700 Subject: [PATCH 039/161] fix issue with sparse checkout --- git-fleximod | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/git-fleximod b/git-fleximod index 404b598f2b..cb1c948d6c 100755 --- a/git-fleximod +++ b/git-fleximod @@ -170,21 +170,23 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master # set the repository remote sprepo_git.git_operation("remote", "add", "origin", url) + superroot = git.git_operation("rev-parse", "--show-superproject-working-tree") if os.path.isfile(os.path.join(root_dir, ".git")): with open(os.path.join(root_dir, ".git")) as f: gitpath = os.path.abspath(os.path.join(root_dir,f.read().split()[1])) - topgit = os.path.join(gitpath, "modules") + topgit = os.path.abspath(os.path.join(gitpath, "modules")) else: - topgit = os.path.join(root_dir, ".git", "modules") + topgit = os.path.abspath(os.path.join(root_dir, ".git", "modules")) if not os.path.isdir(topgit): os.makedirs(topgit) topgit = os.path.join(topgit, name) + logger.debug(f"root_dir is {root_dir} topgit is {topgit} superroot is {superroot}") if os.path.isdir(os.path.join(root_dir,path,".git")): shutil.move(os.path.join(root_dir,path, ".git"), topgit) with open(os.path.join(root_dir,path, ".git"), "w") as f: - f.write("gitdir: " + os.path.relpath(topgit, path)) + f.write("gitdir: " + os.path.relpath(topgit, os.path.join(root_dir,path))) gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) if os.path.isfile(gitsparse): From a9130f296d068f441994084f86c2fd376c66d6be Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 9 Jan 2024 09:42:17 -0700 Subject: [PATCH 040/161] more error checking --- git-fleximod | 4 +++- modules/gitmodules.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/git-fleximod b/git-fleximod index cb1c948d6c..2a00d41066 100755 --- a/git-fleximod +++ b/git-fleximod @@ -221,7 +221,7 @@ def submodule_checkout(root, name, path, url=None): if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout - print(f"Recursively checking out submodules of {name} {repodir}") + print(f"Recursively checking out submodules of {name} ") gitmodules = GitModules(repodir) submodules_install(gitmodules, repodir, ["I:T"]) if os.path.exists(os.path.join(repodir, ".git")): @@ -235,6 +235,8 @@ def submodules_status(gitmodules, root_dir): for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") + if not path: + utils.fatal_error("No path found in .gitmodules for {}".format(name)) newpath = os.path.join(root_dir, path) if not os.path.exists(os.path.join(newpath, ".git")): print(f"Submodule {name} not checked out") diff --git a/modules/gitmodules.py b/modules/gitmodules.py index 9e38a5806c..64b539a38f 100644 --- a/modules/gitmodules.py +++ b/modules/gitmodules.py @@ -51,7 +51,8 @@ def get(self, name, option, raw=False, vars=None, fallback=None): return None def save(self): - self.write(open(self.conf_file, "w")) + print("Called gitmodules save, not expected") + # self.write(open(self.conf_file, "w")) def sections(self): """Strip the submodule part out of section and just use the name""" From 3ccf8a3adf15825ff440f9430c9cf3f56b99fe4b Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 9 Jan 2024 17:44:28 -0700 Subject: [PATCH 041/161] hack to prevent file corruption --- git-fleximod | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/git-fleximod b/git-fleximod index 2a00d41066..e8a719aae7 100755 --- a/git-fleximod +++ b/git-fleximod @@ -203,6 +203,7 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master def submodule_checkout(root, name, path, url=None): + shutil.copy(os.path.join(root,".gitmodules"), os.path.join(root,".save.gitmodules")) git = GitInterface(root) repodir = os.path.join(root, path) # if url is provided update to the new url @@ -214,7 +215,9 @@ def submodule_checkout(root, name, path, url=None): tmpurl = url url = url.replace("git@github.com:", "https://github.com") git.git_operation("submodule", "set-url", path, url) - git.git_operation("submodule", "update", "--init", path) + git.git_operation("submodule", "update", "--init", "--", path) + shutil.copy(os.path.join(root,".save.gitmodules"), os.path.join(root,".gitmodules")) + # Look for a .gitmodules file in the newly checkedout repo if tmpurl: git.git_operation("submodule", "set-url", path, tmpurl) From 840499c5601b1c944ac509b38558da0e8323ed9b Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 9 Jan 2024 18:01:01 -0700 Subject: [PATCH 042/161] ignore untracked files in submodules --- git-fleximod | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/git-fleximod b/git-fleximod index e8a719aae7..04fcf20456 100755 --- a/git-fleximod +++ b/git-fleximod @@ -216,7 +216,8 @@ def submodule_checkout(root, name, path, url=None): url = url.replace("git@github.com:", "https://github.com") git.git_operation("submodule", "set-url", path, url) git.git_operation("submodule", "update", "--init", "--", path) - shutil.copy(os.path.join(root,".save.gitmodules"), os.path.join(root,".gitmodules")) + + shutil.move(os.path.join(root,".save.gitmodules"), os.path.join(root,".gitmodules")) # Look for a .gitmodules file in the newly checkedout repo if tmpurl: @@ -255,7 +256,7 @@ def submodules_status(gitmodules, root_dir): print( f"Submodule {name} has no tag defined in .gitmodules, module at {atag}" ) - status = git.git_operation("status") + status = git.git_operation("status","--ignore-submodules","untracked") if "nothing to commit" not in status: print(status) From a25b257efc706ebc03468be128ec9c566791b05e Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 10 Jan 2024 05:24:18 -0700 Subject: [PATCH 043/161] a better solution to the file coruption issue --- git-fleximod | 9 ++++----- modules/gitmodules.py | 6 ++++++ 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/git-fleximod b/git-fleximod index 04fcf20456..9726059103 100755 --- a/git-fleximod +++ b/git-fleximod @@ -203,22 +203,21 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master def submodule_checkout(root, name, path, url=None): - shutil.copy(os.path.join(root,".gitmodules"), os.path.join(root,".save.gitmodules")) git = GitInterface(root) repodir = os.path.join(root, path) # if url is provided update to the new url tmpurl = None if url: # ssh urls cause problems for those who dont have git accounts with ssh keys defined - # but cime has one since e3sm prefers ssh to https + # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was + # opened with a GitModules object we don't need to worry about restoring the file here + # it will be done by the GitModules class if url.startswith("git@"): tmpurl = url url = url.replace("git@github.com:", "https://github.com") git.git_operation("submodule", "set-url", path, url) git.git_operation("submodule", "update", "--init", "--", path) - shutil.move(os.path.join(root,".save.gitmodules"), os.path.join(root,".gitmodules")) - # Look for a .gitmodules file in the newly checkedout repo if tmpurl: git.git_operation("submodule", "set-url", path, tmpurl) @@ -345,7 +344,7 @@ def _main_func(): file_path = utils.find_upwards(root_dir, file_name) if file_path is None: - fatal_error( + utils.fatal_error( "No {} found in {} or any of it's parents".format(file_name, root_dir) ) root_dir = os.path.dirname(file_path) diff --git a/modules/gitmodules.py b/modules/gitmodules.py index 64b539a38f..fe05e3ca62 100644 --- a/modules/gitmodules.py +++ b/modules/gitmodules.py @@ -1,4 +1,5 @@ import os +import shutil from configparser import ConfigParser from modules.lstripreader import LstripReader @@ -20,6 +21,8 @@ def __init__( """ ConfigParser.__init__(self) self.conf_file = os.path.join(confpath, conffile) + # first create a backup of this file to be restored on deletion of the object + shutil.copy(self.conf_file, self.conf_file+".save") self.read_file(LstripReader(self.conf_file), source=conffile) self.includelist = includelist self.excludelist = excludelist @@ -54,6 +57,9 @@ def save(self): print("Called gitmodules save, not expected") # self.write(open(self.conf_file, "w")) + def __del__(self): + shutil.move(self.conf_file+".save", self.conf_file) + def sections(self): """Strip the submodule part out of section and just use the name""" names = [] From 657f110a761e2c49524e2f2dd64c139968b3ef2c Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 10 Jan 2024 10:48:30 -0700 Subject: [PATCH 044/161] awkward solution for ssh urls --- git-fleximod | 32 ++++++++++++++++++++++---------- 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/git-fleximod b/git-fleximod index 9726059103..206359e39b 100755 --- a/git-fleximod +++ b/git-fleximod @@ -202,11 +202,14 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master print(f"Successfully checked out {name}") -def submodule_checkout(root, name, path, url=None): +def submodule_checkout(root, name, path, url=None, tag=None): git = GitInterface(root) repodir = os.path.join(root, path) + # if url is provided update to the new url tmpurl = None + + # Look for a .gitmodules file in the newly checkedout repo if url: # ssh urls cause problems for those who dont have git accounts with ssh keys defined # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was @@ -215,22 +218,31 @@ def submodule_checkout(root, name, path, url=None): if url.startswith("git@"): tmpurl = url url = url.replace("git@github.com:", "https://github.com") - git.git_operation("submodule", "set-url", path, url) - git.git_operation("submodule", "update", "--init", "--", path) - - # Look for a .gitmodules file in the newly checkedout repo - if tmpurl: - git.git_operation("submodule", "set-url", path, tmpurl) - + + git.git_operation("clone", "-b", tag, url, path) + print(f"Using {url} for {name}") + # print(git.git_operation("submodule", "set-url", name, url)) + # if "cprnc" in repodir: + # with open(os.path.join(root,".gitmodules"),"r") as f: + # print(f.read()) + print(f"calling update for {name}") + if not tmpurl: + logger.debug(git.git_operation("submodule", "update", "--init", "--", name)) + if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout - print(f"Recursively checking out submodules of {name} ") + print(f"Recursively checking out submodules of {name} {repodir} {url}") gitmodules = GitModules(repodir) submodules_install(gitmodules, repodir, ["I:T"]) if os.path.exists(os.path.join(repodir, ".git")): print(f"Successfully checked out {name}") else: utils.fatal_error(f"Failed to checkout {name}") + + + if tmpurl: + print(git.git_operation("restore", ".gitmodules")) + print(f"Using {tmpurl} for {name}") return @@ -324,7 +336,7 @@ def submodules_install(gitmodules, root_dir, requiredlist): "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) - submodule_checkout(root_dir, name, path, url=url) + submodule_checkout(root_dir, name, path, url=url, tag=fxtag) def _main_func(): From 4879c7530523b8b7a16c11a9c113755d8b0e4a85 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 10 Jan 2024 10:57:31 -0700 Subject: [PATCH 045/161] remove debug print statements --- git-fleximod | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/git-fleximod b/git-fleximod index 206359e39b..f78dfa25e1 100755 --- a/git-fleximod +++ b/git-fleximod @@ -218,16 +218,10 @@ def submodule_checkout(root, name, path, url=None, tag=None): if url.startswith("git@"): tmpurl = url url = url.replace("git@github.com:", "https://github.com") - git.git_operation("clone", "-b", tag, url, path) - print(f"Using {url} for {name}") - # print(git.git_operation("submodule", "set-url", name, url)) - # if "cprnc" in repodir: - # with open(os.path.join(root,".gitmodules"),"r") as f: - # print(f.read()) - print(f"calling update for {name}") + if not tmpurl: - logger.debug(git.git_operation("submodule", "update", "--init", "--", name)) + logger.debug(git.git_operation("submodule", "update", "--init", "--", path)) if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout @@ -239,10 +233,9 @@ def submodule_checkout(root, name, path, url=None, tag=None): else: utils.fatal_error(f"Failed to checkout {name}") - if tmpurl: print(git.git_operation("restore", ".gitmodules")) - print(f"Using {tmpurl} for {name}") + return From 661ba5efc813fa910e7fc43172a513a177d2e013 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 10 Jan 2024 17:29:53 -0700 Subject: [PATCH 046/161] update for distribution --- setup.py | 25 +++++---- {modules => src/fleximod}/__init__.py | 0 {modules => src/fleximod}/gitinterface.py | 13 +++-- {modules => src/fleximod}/gitmodules.py | 12 +++-- {modules => src/fleximod}/lstripreader.py | 0 {modules => src/fleximod}/utils.py | 0 src/fleximod/version.py | 1 + git-fleximod => src/git-fleximod | 62 +++++++++++++---------- version.txt | 1 - 9 files changed, 70 insertions(+), 44 deletions(-) rename {modules => src/fleximod}/__init__.py (100%) rename {modules => src/fleximod}/gitinterface.py (87%) rename {modules => src/fleximod}/gitmodules.py (83%) rename {modules => src/fleximod}/lstripreader.py (100%) rename {modules => src/fleximod}/utils.py (100%) create mode 100644 src/fleximod/version.py rename git-fleximod => src/git-fleximod (90%) delete mode 100644 version.txt diff --git a/setup.py b/setup.py index 2d4cb3642b..1abf839259 100644 --- a/setup.py +++ b/setup.py @@ -1,26 +1,33 @@ import setuptools import os +from setuptools import setup, find_packages +from distutils.util import convert_path with open("README.md", "r") as fh: long_description = fh.read() -with open("version.txt", "r") as fh: - version = fh.read() - cwd = os.getcwd() + +main_ns = {} +ver_path = convert_path('src/fleximod/version.py') +with open(ver_path) as ver_file: + exec(ver_file.read(), main_ns) + + setuptools.setup( - name="git-fleximod", # This is the name of the package - version=version, # The initial release version + scripts=["src/git-fleximod"], # This is the name of the package + version=main_ns['__version__'], # The initial release version author="Jim Edwards", # Full name of the author description="Extended support for git-submodule and git-sparse-checkout", long_description=long_description, # Long description read from the the readme file long_description_content_type="text/markdown", - packages=setuptools.find_packages(), # List of all python modules to be installed + packages=['fleximod'], # List of all python modules to be installed + package_dir={'fleximod': 'src/fleximod'}, + package_data={"":['version.txt']}, classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], # Information to filter the project on PyPi website python_requires='>=3.6', # Minimum version requirement of the package - py_modules=['git-fleximod'], # Name of the python package - package_dir={'git-fleximod':'.'}, # Directory of the source code of the package - install_requires=[] # Install other dependencies if any +# py_modules=['git-fleximod'], # Name of the python package + install_requires=["PyGit"] # Install other dependencies if any ) diff --git a/modules/__init__.py b/src/fleximod/__init__.py similarity index 100% rename from modules/__init__.py rename to src/fleximod/__init__.py diff --git a/modules/gitinterface.py b/src/fleximod/gitinterface.py similarity index 87% rename from modules/gitinterface.py rename to src/fleximod/gitinterface.py index a4e2529256..c127163dc6 100644 --- a/modules/gitinterface.py +++ b/src/fleximod/gitinterface.py @@ -1,11 +1,12 @@ import os import logging -from modules import utils +from fleximod import utils class GitInterface: - def __init__(self, repo_path): + def __init__(self, repo_path, logger): + logger.debug("Initialize GitInterface for {}".format(repo_path)) self.repo_path = repo_path - + self.logger = logger try: import git self._use_module = True @@ -20,10 +21,10 @@ def __init__(self, repo_path): if not os.path.exists(os.path.join(repo_path,".git")): self._init_git_repo() msg = "Using shell interface to git" - logging.info(msg) + self.logger.info(msg) def _git_command(self, operation, *args): - logging.info(operation) + self.logger.info(operation) if self._use_module and operation != "submodule": return getattr(self.repo.git, operation)(*args) else: @@ -39,6 +40,7 @@ def _init_git_repo(self): def git_operation(self, operation, *args, **kwargs): command = self._git_command(operation, *args) + self.logger.info(command) if isinstance(command, list): return utils.execute_subprocess(command, output_to_caller=True) else: @@ -60,4 +62,5 @@ def config_set_value(self, section, name, value): writer.release() # Ensure changes are saved else: cmd = ("git","-C",self.repo_path,"config", f"{section}.{name}", value) + self.logger.info(cmd) utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/modules/gitmodules.py b/src/fleximod/gitmodules.py similarity index 83% rename from modules/gitmodules.py rename to src/fleximod/gitmodules.py index fe05e3ca62..a6f7589319 100644 --- a/modules/gitmodules.py +++ b/src/fleximod/gitmodules.py @@ -1,13 +1,12 @@ import os import shutil from configparser import ConfigParser - -from modules.lstripreader import LstripReader - +from fleximod.lstripreader import LstripReader class GitModules(ConfigParser): def __init__( self, + logger, confpath=os.getcwd(), conffile=".gitmodules", includelist=None, @@ -19,6 +18,8 @@ def __init__( includelist: Optional list of submodules to include. excludelist: Optional list of submodules to exclude. """ + self.logger = logger + self.logger.debug("Creating a GitModules object {} {} {} {}".format(confpath,conffile,includelist,excludelist)) ConfigParser.__init__(self) self.conf_file = os.path.join(confpath, conffile) # first create a backup of this file to be restored on deletion of the object @@ -33,6 +34,7 @@ def set(self, name, option, value): Ensures the appropriate section exists for the submodule. Calls the parent class's set method to store the value. """ + self.logger.debug("set called {} {} {}".format(name,option,value)) section = f'submodule "{name}"' if not self.has_section(section): self.add_section(section) @@ -45,6 +47,7 @@ def get(self, name, option, raw=False, vars=None, fallback=None): Uses the parent class's get method to access the value. Handles potential errors if the section or option doesn't exist. """ + self.logger.debug("get called {} {}".format(name,option)) section = f'submodule "{name}"' try: return ConfigParser.get( @@ -58,10 +61,12 @@ def save(self): # self.write(open(self.conf_file, "w")) def __del__(self): + self.logger.debug("Destroying GitModules object") shutil.move(self.conf_file+".save", self.conf_file) def sections(self): """Strip the submodule part out of section and just use the name""" + self.logger.debug("calling GitModules sections iterator") names = [] for section in ConfigParser.sections(self): name = section[11:-1] @@ -73,5 +78,6 @@ def sections(self): return names def items(self, name, raw=False, vars=None): + self.logger.debug("calling GitModules items for {}".format(name)) section = f'submodule "{name}"' return ConfigParser.items(section, raw=raw, vars=vars) diff --git a/modules/lstripreader.py b/src/fleximod/lstripreader.py similarity index 100% rename from modules/lstripreader.py rename to src/fleximod/lstripreader.py diff --git a/modules/utils.py b/src/fleximod/utils.py similarity index 100% rename from modules/utils.py rename to src/fleximod/utils.py diff --git a/src/fleximod/version.py b/src/fleximod/version.py new file mode 100644 index 0000000000..8ce9b3623b --- /dev/null +++ b/src/fleximod/version.py @@ -0,0 +1 @@ +__version__ = '0.1.3' diff --git a/git-fleximod b/src/git-fleximod similarity index 90% rename from git-fleximod rename to src/git-fleximod index f78dfa25e1..dd2adb0f9d 100755 --- a/git-fleximod +++ b/src/git-fleximod @@ -4,13 +4,12 @@ import os import shutil import logging import argparse -from modules import utils -from modules.gitinterface import GitInterface -from modules.gitmodules import GitModules - -logger = logging.getLogger(__name__) -logger.propogate = False - +from fleximod import utils +from fleximod.gitinterface import GitInterface +from fleximod.gitmodules import GitModules +from fleximod.version import __version__ +# logger variable is global +logger = None def commandline_arguments(args=None): description = """ @@ -84,9 +83,9 @@ def commandline_arguments(args=None): parser.add_argument( "-V", "--version", - action="store_true", - default=False, - help="Print manage_externals version and exit.", + action="version", + version=f"%(prog)s {__version__}", + help="Print version and exit.", ) # @@ -136,14 +135,21 @@ def commandline_arguments(args=None): if not action: action = "install" + if options.debug: + level = logging.DEBUG + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, + format="%(name)s - %(levelname)s - %(message)s", + handlers=[logging.FileHandler("fleximod.log"), logging.StreamHandler()], + ) if options.version: - version_info = "" - version_file_path = os.path.join(os.path.dirname(__file__), "version.txt") - with open(version_file_path) as f: - version_info = f.readlines()[0].strip() - print(version_info) - sys.exit(0) - + exit() + return ( options.path, options.gitmodules, @@ -161,10 +167,10 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master os.makedirs(path) # Check first if the module is already defined # and the sparse-checkout file exists - git = GitInterface(root_dir) + git = GitInterface(root_dir, logger) # initialize a new git repo and set the sparse checkout flag - sprepo_git = GitInterface(os.path.join(root_dir, path)) + sprepo_git = GitInterface(os.path.join(root_dir, path), logger) sprepo_git.config_set_value("core", "sparseCheckout", "true") # set the repository remote @@ -203,7 +209,7 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master def submodule_checkout(root, name, path, url=None, tag=None): - git = GitInterface(root) + git = GitInterface(root, logger) repodir = os.path.join(root, path) # if url is provided update to the new url @@ -226,7 +232,7 @@ def submodule_checkout(root, name, path, url=None, tag=None): if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir} {url}") - gitmodules = GitModules(repodir) + gitmodules = GitModules(logger,confpath=repodir) submodules_install(gitmodules, repodir, ["I:T"]) if os.path.exists(os.path.join(repodir, ".git")): print(f"Successfully checked out {name}") @@ -250,7 +256,7 @@ def submodules_status(gitmodules, root_dir): print(f"Submodule {name} not checked out") else: with utils.pushd(newpath): - git = GitInterface(newpath) + git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() if tag and atag != tag: print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") @@ -274,7 +280,7 @@ def submodules_update(gitmodules, root_dir): if os.path.exists(os.path.join(path, ".git")): submoddir = os.path.join(root_dir, path) with utils.pushd(submoddir): - git = GitInterface(submoddir) + git = GitInterface(submoddir, logger) # first make sure the url is correct upstream = git.git_operation("ls-remote", "--get-url").rstrip() newremote = "origin" @@ -342,8 +348,11 @@ def _main_func(): verbose, action, ) = commandline_arguments() - if verbose: - print(f"action is {action}") + # Get a logger for the package + global logger + logger = logging.getLogger(__name__) + + logger.info(f"action is {action}") if not os.path.isfile(os.path.join(root_dir, file_name)): file_path = utils.find_upwards(root_dir, file_name) @@ -353,8 +362,9 @@ def _main_func(): "No {} found in {} or any of it's parents".format(file_name, root_dir) ) root_dir = os.path.dirname(file_path) - print(f"root_dir is {root_dir}") + logger.info(f"root_dir is {root_dir}") gitmodules = GitModules( + logger, confpath=root_dir, conffile=file_name, includelist=includelist, diff --git a/version.txt b/version.txt deleted file mode 100644 index a3dce6cd3a..0000000000 --- a/version.txt +++ /dev/null @@ -1 +0,0 @@ -v0.0.2 From b22bab6c5c359a9635493dd90cc8d17af03321b9 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 11 Jan 2024 11:03:15 -0700 Subject: [PATCH 047/161] fix error with version flag --- src/git-fleximod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/git-fleximod b/src/git-fleximod index dd2adb0f9d..d45acb14ed 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -147,7 +147,7 @@ def commandline_arguments(args=None): format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.FileHandler("fleximod.log"), logging.StreamHandler()], ) - if options.version: + if hasattr(options, 'version'): exit() return ( From e81099a63708e4e5cace47b6a1fe39bafee694b5 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 11 Jan 2024 11:04:10 -0700 Subject: [PATCH 048/161] update version to 0.1.4 --- src/fleximod/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fleximod/version.py b/src/fleximod/version.py index 8ce9b3623b..7525d19930 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = '0.1.3' +__version__ = '0.1.4' From 1a80e973560b86ccee228274dbb2b38e17037abc Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 15 Jan 2024 07:52:01 -0700 Subject: [PATCH 049/161] fix issues with rerunning --- src/fleximod/version.py | 2 +- src/git-fleximod | 25 ++++++++++++++++++++++--- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/src/fleximod/version.py b/src/fleximod/version.py index 7525d19930..2fb25139f1 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = '0.1.4' +__version__ = '0.1.6' diff --git a/src/git-fleximod b/src/git-fleximod index d45acb14ed..d7bc2c1202 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -8,6 +8,7 @@ from fleximod import utils from fleximod.gitinterface import GitInterface from fleximod.gitmodules import GitModules from fleximod.version import __version__ +from configparser import NoOptionError # logger variable is global logger = None @@ -170,7 +171,21 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master git = GitInterface(root_dir, logger) # initialize a new git repo and set the sparse checkout flag - sprepo_git = GitInterface(os.path.join(root_dir, path), logger) + sprep_repo = os.path.join(root_dir, path) + sprepo_git = GitInterface(sprep_repo, logger) + if os.path.exists(os.path.join(sprep_repo,".git")): + try: + logger.info("Submodule {} found".format(name)) + chk = sprepo_git.config_get_value("core", "sparseCheckout") + if chk == "true": + logger.info("Sparse submodule {} already checked out".format(name)) + return + except NoOptionError: + logger.debug("Sparse submodule {} not present".format(name)) + except Exception as e: + utils.fatal_error("Unexpected error {} occured.".format(e)) + + sprepo_git.config_set_value("core", "sparseCheckout", "true") # set the repository remote @@ -211,7 +226,9 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master def submodule_checkout(root, name, path, url=None, tag=None): git = GitInterface(root, logger) repodir = os.path.join(root, path) - + if os.path.exists(os.path.join(repodir, ".git")): + logger.info("Submodule {} already checked out".format(name)) + return # if url is provided update to the new url tmpurl = None @@ -225,7 +242,9 @@ def submodule_checkout(root, name, path, url=None, tag=None): tmpurl = url url = url.replace("git@github.com:", "https://github.com") git.git_operation("clone", "-b", tag, url, path) - + # Now need to move the .git dir to the submodule location + + if not tmpurl: logger.debug(git.git_operation("submodule", "update", "--init", "--", path)) From 13eb8b87bcfba34eb70c5bd3a39e570d2527a452 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 15 Jan 2024 07:53:10 -0700 Subject: [PATCH 050/161] update setup.py --- setup.cfg | 2 ++ setup.py | 4 +++- 2 files changed, 5 insertions(+), 1 deletion(-) create mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000000..4f727fa0b1 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +license_file = License \ No newline at end of file diff --git a/setup.py b/setup.py index 1abf839259..f891593865 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,8 @@ scripts=["src/git-fleximod"], # This is the name of the package version=main_ns['__version__'], # The initial release version author="Jim Edwards", # Full name of the author + maintainer="jedwards4b", + license="MIT License", description="Extended support for git-submodule and git-sparse-checkout", long_description=long_description, # Long description read from the the readme file long_description_content_type="text/markdown", @@ -29,5 +31,5 @@ ], # Information to filter the project on PyPi website python_requires='>=3.6', # Minimum version requirement of the package # py_modules=['git-fleximod'], # Name of the python package - install_requires=["PyGit"] # Install other dependencies if any + install_requires=["GitPython"] # Install other dependencies if any ) From 5a590c541ca168c879d3726957c102f878af0f6d Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 15 Jan 2024 09:36:10 -0700 Subject: [PATCH 051/161] update version --- src/fleximod/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fleximod/version.py b/src/fleximod/version.py index 2fb25139f1..124e46203b 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = '0.1.6' +__version__ = '0.1.7' From 8c92b14204c80b0b444a3155a1ce52e5dca8411b Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 17 Jan 2024 16:23:33 -0700 Subject: [PATCH 052/161] provide more info for status of uninstalled submodules --- src/fleximod/version.py | 2 +- src/git-fleximod | 20 ++++++++++++++++++-- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/src/fleximod/version.py b/src/fleximod/version.py index 124e46203b..c3bb2961b6 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = '0.1.7' +__version__ = '0.1.8' diff --git a/src/git-fleximod b/src/git-fleximod index d7bc2c1202..3e743c3f5c 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -127,7 +127,8 @@ def commandline_arguments(args=None): else: options = parser.parse_args() - if options.optional: +# explicitly listing a component overrides the optional flag + if options.optional or options.components: fxrequired = ["T:T", "T:F", "I:T"] else: fxrequired = ["T:T", "I:T"] @@ -271,8 +272,23 @@ def submodules_status(gitmodules, root_dir): if not path: utils.fatal_error("No path found in .gitmodules for {}".format(name)) newpath = os.path.join(root_dir, path) + logger.debug("newpath is {}".format(newpath)) if not os.path.exists(os.path.join(newpath, ".git")): - print(f"Submodule {name} not checked out") + rootgit = GitInterface(root_dir, logger) + # submodule commands use path, not name + nhash = (rootgit.git_operation("submodule","status",path).split()[0])[1:] + url = gitmodules.get(name, "url") + tags = rootgit.git_operation("ls-remote","--tags",url) + atag = None + for htag in tags.split('\n'): + if tag in htag: + atag = (htag.split()[1])[10:] + break + if tag == atag: + print(f"Submodule {name} not checked out, aligned at tag {tag}") + else: + print(f"Submodule {name} not checked out, out of sync at tag {atag}, expected tag is {tag}") + else: with utils.pushd(newpath): git = GitInterface(newpath, logger) From 43225eac86892d074453cbdc622124b2eedf48cf Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 18 Jan 2024 16:49:52 -0700 Subject: [PATCH 053/161] add test action --- escomp_install | 25 +++++++++++++++++++ src/fleximod/version.py | 2 +- src/git-fleximod | 55 ++++++++++++++++++++++++----------------- 3 files changed, 59 insertions(+), 23 deletions(-) create mode 100644 escomp_install diff --git a/escomp_install b/escomp_install new file mode 100644 index 0000000000..ae782e72a4 --- /dev/null +++ b/escomp_install @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# updates git-fleximod in an ESCOMP model +# this script should be run from the model root directory, it expects +# git-fleximod to already be installed with the script in bin +# and the classes in lib/python/site-packages +import sys +import shutil +import os + +from glob import iglob + +fleximod_root = sys.argv[1] +fleximod_path = os.path.join(fleximod_root,"src","git-fleximod") +if os.path.isfile(fleximod_path): + with open(fleximod_path,"r") as f: + fleximod = f.readlines() + with open(os.path.join(".","bin","git-fleximod"),"w") as f: + for line in fleximod: + f.write(line) + if "import argparse" in line: + f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n') + + for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")): + shutil.copy(file, + os.path.join("lib","python","site-packages","fleximod",os.path.basename(file))) diff --git a/src/fleximod/version.py b/src/fleximod/version.py index c3bb2961b6..1c98a23a89 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = '0.1.8' +__version__ = '0.1.9' diff --git a/src/git-fleximod b/src/git-fleximod index 3e743c3f5c..fce79b8552 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -23,7 +23,7 @@ def commandline_arguments(args=None): # # user options # - choices = ["update", "install", "status"] + choices = ["update", "install", "status", "test"] parser.add_argument( "action", choices=choices, @@ -107,21 +107,6 @@ def commandline_arguments(args=None): "information to the screen and log file.", ) - logging_group = parser.add_mutually_exclusive_group() - - logging_group.add_argument( - "--logging", - dest="do_logging", - action="store_true", - help="DEVELOPER: enable logging.", - ) - logging_group.add_argument( - "--no-logging", - dest="do_logging", - action="store_false", - default=False, - help="DEVELOPER: disable logging " "(this is the default)", - ) if args: options = parser.parse_args(args) else: @@ -266,6 +251,7 @@ def submodule_checkout(root, name, path, url=None, tag=None): def submodules_status(gitmodules, root_dir): + testfails = 0 for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") @@ -288,23 +274,27 @@ def submodules_status(gitmodules, root_dir): print(f"Submodule {name} not checked out, aligned at tag {tag}") else: print(f"Submodule {name} not checked out, out of sync at tag {atag}, expected tag is {tag}") - + testfails += 1 else: with utils.pushd(newpath): git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() if tag and atag != tag: print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") + testfails += 1 elif tag: print(f"Submodule {name} at tag {tag}") else: print( f"Submodule {name} has no tag defined in .gitmodules, module at {atag}" ) + testfails += 1 + status = git.git_operation("status","--ignore-submodules","untracked") if "nothing to commit" not in status: print(status) - + + return testfails def submodules_update(gitmodules, root_dir): for name in gitmodules.sections(): @@ -372,7 +362,26 @@ def submodules_install(gitmodules, root_dir, requiredlist): submodule_checkout(root_dir, name, path, url=url, tag=fxtag) +def submodules_test(gitmodules, root_dir): + # First check that fxtags are present and in sync with submodule hashes + testfails = submodules_status(gitmodules, root_dir) + # Then make sure that urls are consistant with fxurls (not forks and not ssh) + # and that sparse checkout files exist + for name in gitmodules.sections(): + url = gitmodules.get(name, "url") + fxurl = gitmodules.get(name, "fxurl") + fxsparse = gitmodules.get(name, "fxsparse") + path = gitmodules.get(name, "path") + if not fxurl or url != fxurl: + print(f"submodule {name} url {url} not in sync with required {fxurl}") + testfails += 1 + if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): + print(f"sparse submodule {name} sparse checkout file {fxsparse} not found") + testfails += 1 + return testfails + + def _main_func(): ( root_dir, @@ -405,16 +414,18 @@ def _main_func(): includelist=includelist, excludelist=excludelist, ) - + retval = 0 if action == "update": submodules_update(gitmodules, root_dir) elif action == "install": submodules_install(gitmodules, root_dir, fxrequired) elif action == "status": submodules_status(gitmodules, root_dir) + elif action == "test": + retval = submodules_test(gitmodules, root_dir) else: utils.fatal_error(f"unrecognized action request {action}") - - + return(retval) + if __name__ == "__main__": - _main_func() + sys.exit(_main_func()) From bbb1b1d98b587ddb5dd455a9ce889c64d53f7151 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 30 Jan 2024 16:18:53 -0700 Subject: [PATCH 054/161] fix issue with status --- README.md | 12 +++++++++++- src/git-fleximod | 19 ++++++++++--------- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 910858e79a..cec74c5a66 100644 --- a/README.md +++ b/README.md @@ -20,6 +20,9 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o install: Install submodules according to configuration. status: Display the status of submodules. update: Update submodules to the tag indicated in .gitmodules variable fxtag. + test: Make sure that fxtags and submodule hashes are consistant, + make sure that official urls (as defined by fxurl) are set + make sure that fxtags are defined for all submodules Additional Options: See git fleximod --help for more details. @@ -32,6 +35,7 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o - I:T: Internal and required (always checked out). - I:F: Internal and optional (checked out with --optional flag). fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. + fcurl ## Sparse Checkouts @@ -39,7 +43,13 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o in the .gitmodules file to the path of a file containing the desired sparse checkout paths. Git-fleximod will automatically configure sparse checkout based on this file when applicable commands are run. - See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) for details on the format of this file. + See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) + for details on the format of this file. + +## Tests + + The git fleximod test action is designed to be used by, for example, github workflows + to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags ## Examples diff --git a/src/git-fleximod b/src/git-fleximod index fce79b8552..9e2620d7f7 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -4,6 +4,7 @@ import os import shutil import logging import argparse +import textwrap from fleximod import utils from fleximod.gitinterface import GitInterface from fleximod.gitmodules import GitModules @@ -23,11 +24,11 @@ def commandline_arguments(args=None): # # user options # - choices = ["update", "install", "status", "test"] + choices = ["update", "checkout", "status", "test"] parser.add_argument( "action", choices=choices, - default="install", + default="checkout", help=f"Subcommand of fleximod, choices are {choices}", ) @@ -120,7 +121,7 @@ def commandline_arguments(args=None): action = options.action if not action: - action = "install" + action = "checkout" if options.debug: level = logging.DEBUG @@ -238,7 +239,7 @@ def submodule_checkout(root, name, path, url=None, tag=None): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir} {url}") gitmodules = GitModules(logger,confpath=repodir) - submodules_install(gitmodules, repodir, ["I:T"]) + submodules_checkout(gitmodules, repodir, ["I:T"]) if os.path.exists(os.path.join(repodir, ".git")): print(f"Successfully checked out {name}") else: @@ -290,9 +291,9 @@ def submodules_status(gitmodules, root_dir): ) testfails += 1 - status = git.git_operation("status","--ignore-submodules","untracked") + status = git.git_operation("status","--ignore-submodules") if "nothing to commit" not in status: - print(status) + print(textwrap.indent(status,' ')) return testfails @@ -337,7 +338,7 @@ def submodules_update(gitmodules, root_dir): print(f"submodule {name} up to date.") -def submodules_install(gitmodules, root_dir, requiredlist): +def submodules_checkout(gitmodules, root_dir, requiredlist): for name in gitmodules.sections(): fxrequired = gitmodules.get(name, "fxrequired") fxsparse = gitmodules.get(name, "fxsparse") @@ -417,8 +418,8 @@ def _main_func(): retval = 0 if action == "update": submodules_update(gitmodules, root_dir) - elif action == "install": - submodules_install(gitmodules, root_dir, fxrequired) + elif action == "checkout": + submodules_checkout(gitmodules, root_dir, fxrequired) elif action == "status": submodules_status(gitmodules, root_dir) elif action == "test": From af78aa4de1632ad1bcb8e94bfa7b272294108132 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 30 Jan 2024 16:38:50 -0700 Subject: [PATCH 055/161] update version --- src/fleximod/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fleximod/version.py b/src/fleximod/version.py index 1c98a23a89..7fd229a32b 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = '0.1.9' +__version__ = '0.2.0' From 13a2f6b25dda439e74396073b52c3d748b838ea6 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 1 Feb 2024 12:21:58 -0700 Subject: [PATCH 056/161] cleaner format for status output, fix for special handling of ssh submodules, fix for 0 length log --- setup.cfg | 5 ++++- setup.py | 10 +++++++++- src/git-fleximod | 23 +++++++++++++++-------- 3 files changed, 28 insertions(+), 10 deletions(-) diff --git a/setup.cfg b/setup.cfg index 4f727fa0b1..9a13827a35 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,5 @@ [metadata] -license_file = License \ No newline at end of file +license_file = License +[build_manpages] +manpages = + man/git-fleximod.1:object=parser:pyfile=bin/git-fleximod diff --git a/setup.py b/setup.py index f891593865..f68a4ea9b7 100644 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ import os from setuptools import setup, find_packages from distutils.util import convert_path +from build_manpages import build_manpages, get_build_py_cmd, get_install_cmd with open("README.md", "r") as fh: long_description = fh.read() @@ -31,5 +32,12 @@ ], # Information to filter the project on PyPi website python_requires='>=3.6', # Minimum version requirement of the package # py_modules=['git-fleximod'], # Name of the python package - install_requires=["GitPython"] # Install other dependencies if any + install_requires=["GitPython"], # Install other dependencies if any + cmdclass={ + 'build_manpages': build_manpages, + # Re-define build_py and install commands so the manual pages + # are automatically re-generated and installed + 'build_py': get_build_py_cmd(), + 'install': get_install_cmd(), + } ) diff --git a/src/git-fleximod b/src/git-fleximod index 9e2620d7f7..f80d359c76 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -122,9 +122,11 @@ def commandline_arguments(args=None): action = options.action if not action: action = "checkout" + handlers=[logging.StreamHandler()] if options.debug: level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) elif options.verbose: level = logging.INFO else: @@ -133,8 +135,9 @@ def commandline_arguments(args=None): logging.basicConfig( level=level, format="%(name)s - %(levelname)s - %(message)s", - handlers=[logging.FileHandler("fleximod.log"), logging.StreamHandler()], + handlers=handlers ) + if hasattr(options, 'version'): exit() @@ -228,7 +231,11 @@ def submodule_checkout(root, name, path, url=None, tag=None): if url.startswith("git@"): tmpurl = url url = url.replace("git@github.com:", "https://github.com") - git.git_operation("clone", "-b", tag, url, path) + git.git_operation("clone", url, path) + smgit = GitInterface(path, logger) + if not tag: + tag = smgit.git_operation("describe", "--tags", "--always").rstrip() + smgit.git_operation("checkout",tag) # Now need to move the .git dir to the submodule location @@ -272,28 +279,28 @@ def submodules_status(gitmodules, root_dir): atag = (htag.split()[1])[10:] break if tag == atag: - print(f"Submodule {name} not checked out, aligned at tag {tag}") + print(f"e {name:>20} not checked out, aligned at tag {tag}") else: - print(f"Submodule {name} not checked out, out of sync at tag {atag}, expected tag is {tag}") + print(f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}") testfails += 1 else: with utils.pushd(newpath): git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() if tag and atag != tag: - print(f"Submodule {name} {atag} is out of sync with .gitmodules {tag}") + print(f"s {name:>20} {atag} is out of sync with .gitmodules {tag}") testfails += 1 elif tag: - print(f"Submodule {name} at tag {tag}") + print(f" {name:>20} at tag {tag}") else: print( - f"Submodule {name} has no tag defined in .gitmodules, module at {atag}" + f"e {name:>20} has no tag defined in .gitmodules, module at {atag}" ) testfails += 1 status = git.git_operation("status","--ignore-submodules") if "nothing to commit" not in status: - print(textwrap.indent(status,' ')) + print('M'+textwrap.indent(status,' ')) return testfails From c21372a088fa6a72c01397eb8c955b5e5cf411d8 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 1 Feb 2024 13:04:22 -0700 Subject: [PATCH 057/161] check for permission in fleximod.log file --- src/git-fleximod | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/git-fleximod b/src/git-fleximod index f80d359c76..cbfa5f4f3d 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -125,6 +125,10 @@ def commandline_arguments(args=None): handlers=[logging.StreamHandler()] if options.debug: + try: + open("fleximod.log","w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") level = logging.DEBUG handlers.append(logging.FileHandler("fleximod.log")) elif options.verbose: From 07eb5dd3edcde5bfd1f125c85d6935bd0fa3b3cc Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 1 Feb 2024 14:02:34 -0700 Subject: [PATCH 058/161] fix issue with ssh submodule checkout --- src/git-fleximod | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/src/git-fleximod b/src/git-fleximod index cbfa5f4f3d..2ebb2e5378 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -236,11 +236,28 @@ def submodule_checkout(root, name, path, url=None, tag=None): tmpurl = url url = url.replace("git@github.com:", "https://github.com") git.git_operation("clone", url, path) - smgit = GitInterface(path, logger) + smgit = GitInterface(repodir, logger) if not tag: tag = smgit.git_operation("describe", "--tags", "--always").rstrip() smgit.git_operation("checkout",tag) # Now need to move the .git dir to the submodule location + rootdotgit = os.path.join(root,".git") + if os.path.isfile(rootdotgit): + with open(rootdotgit) as f: + line = f.readline() + if line.startswith("gitdir: "): + rootdotgit = line[8:].rstrip() + + newpath = os.path.abspath(os.path.join(root,rootdotgit,"modules",path)) + print(f"root is {root} rootdotgit is {rootdotgit} newpath is {newpath}") + if not os.path.isdir(os.path.join(newpath,os.pardir)): + os.makedirs(os.path.abspath(os.path.join(newpath,os.pardir))) + + shutil.move(os.path.join(repodir,".git"), newpath) + with open(os.path.join(repodir,".git"), "w") as f: + f.write("gitdir: "+newpath) + + if not tmpurl: From a12a5b051580b5f26048ab87ffe0521e4ba135ef Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 1 Feb 2024 14:22:41 -0700 Subject: [PATCH 059/161] add --force option and cowardly refusal --- src/git-fleximod | 41 +++++++++++++++++++++++++++++++---------- 1 file changed, 31 insertions(+), 10 deletions(-) diff --git a/src/git-fleximod b/src/git-fleximod index 2ebb2e5378..5489ae164f 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -60,6 +60,13 @@ def commandline_arguments(args=None): nargs="*", help="Component(s) listed in the gitmodules file which should be ignored.", ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Override cautions and update or checkout over locally modified repository." + ) parser.add_argument( "-o", @@ -152,6 +159,7 @@ def commandline_arguments(args=None): options.components, options.exclude, options.verbose, + options.force, action, ) @@ -281,6 +289,7 @@ def submodule_checkout(root, name, path, url=None, tag=None): def submodules_status(gitmodules, root_dir): testfails = 0 + localmods = 0 for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") @@ -321,11 +330,17 @@ def submodules_status(gitmodules, root_dir): status = git.git_operation("status","--ignore-submodules") if "nothing to commit" not in status: + localmods = localmods+1 print('M'+textwrap.indent(status,' ')) - return testfails + return testfails, localmods -def submodules_update(gitmodules, root_dir): +def submodules_update(gitmodules, root_dir, force): + _,localmods = submodules_status(gitmodules, root_dir) + print("") + if localmods and not force: + print(f"Repository has local mods, cowardly refusing to continue, fix issues or use --force to override") + return for name in gitmodules.sections(): fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") @@ -358,15 +373,20 @@ def submodules_update(gitmodules, root_dir): git.git_operation("fetch", newremote, "--tags") atag = git.git_operation("describe", "--tags", "--always").rstrip() if fxtag and fxtag != atag: - print(f"Updating {name} to {fxtag}") + print(f"{name:>20} updated to {fxtag}") git.git_operation("checkout", fxtag) elif not fxtag: - print(f"No fxtag found for submodule {name}") + print(f"No fxtag found for submodule {name:>20}") else: - print(f"submodule {name} up to date.") + print(f"{name:>20} up to date.") -def submodules_checkout(gitmodules, root_dir, requiredlist): +def submodules_checkout(gitmodules, root_dir, requiredlist, force): + _,localmods = submodules_status(gitmodules, root_dir) + print("") + if localmods and not force: + print(f"Repository has local mods, cowardly refusing to continue, fix issues or use --force to override") + return for name in gitmodules.sections(): fxrequired = gitmodules.get(name, "fxrequired") fxsparse = gitmodules.get(name, "fxsparse") @@ -393,7 +413,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist): def submodules_test(gitmodules, root_dir): # First check that fxtags are present and in sync with submodule hashes - testfails = submodules_status(gitmodules, root_dir) + testfails,localmods = submodules_status(gitmodules, root_dir) # Then make sure that urls are consistant with fxurls (not forks and not ssh) # and that sparse checkout files exist for name in gitmodules.sections(): @@ -407,7 +427,7 @@ def submodules_test(gitmodules, root_dir): if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): print(f"sparse submodule {name} sparse checkout file {fxsparse} not found") testfails += 1 - return testfails + return testfails+localmods @@ -419,6 +439,7 @@ def _main_func(): includelist, excludelist, verbose, + force, action, ) = commandline_arguments() # Get a logger for the package @@ -445,9 +466,9 @@ def _main_func(): ) retval = 0 if action == "update": - submodules_update(gitmodules, root_dir) + submodules_update(gitmodules, root_dir, force) elif action == "checkout": - submodules_checkout(gitmodules, root_dir, fxrequired) + submodules_checkout(gitmodules, root_dir, fxrequired, force) elif action == "status": submodules_status(gitmodules, root_dir) elif action == "test": From ce22059f0f860c9e9f0961ee2637d21d967facfb Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 1 Feb 2024 14:37:08 -0700 Subject: [PATCH 060/161] start at root git dir unless --path flag is used --- src/git-fleximod | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/git-fleximod b/src/git-fleximod index 5489ae164f..d0e6149f28 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -5,6 +5,7 @@ import shutil import logging import argparse import textwrap +from pathlib import Path from fleximod import utils from fleximod.gitinterface import GitInterface from fleximod.gitmodules import GitModules @@ -13,6 +14,17 @@ from configparser import NoOptionError # logger variable is global logger = None +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return attempt + d = d.parent + return None + + def commandline_arguments(args=None): description = """ %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models @@ -42,8 +54,8 @@ def commandline_arguments(args=None): parser.add_argument( "-C", "--path", - default=os.getcwd(), - help="Toplevel repository directory. Defaults to current directory.", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", ) parser.add_argument( @@ -430,6 +442,7 @@ def submodules_test(gitmodules, root_dir): return testfails+localmods + def _main_func(): ( @@ -455,6 +468,7 @@ def _main_func(): utils.fatal_error( "No {} found in {} or any of it's parents".format(file_name, root_dir) ) + root_dir = os.path.dirname(file_path) logger.info(f"root_dir is {root_dir}") gitmodules = GitModules( From 8b7267959971c16d66803d92faa833055a8ff213 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 1 Feb 2024 14:43:10 -0700 Subject: [PATCH 061/161] remove test from visable options --- src/git-fleximod | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/git-fleximod b/src/git-fleximod index d0e6149f28..da8431a174 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -41,7 +41,7 @@ def commandline_arguments(args=None): "action", choices=choices, default="checkout", - help=f"Subcommand of fleximod, choices are {choices}", + help=f"Subcommand of fleximod, choices are {choices[:-1]}", ) parser.add_argument( From a990864cef359b6a2f6e9a75f420ec05f669f894 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 17:34:21 -0700 Subject: [PATCH 062/161] update setup.cfg --- setup.cfg | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 9a13827a35..599646552c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,4 +2,7 @@ license_file = License [build_manpages] manpages = - man/git-fleximod.1:object=parser:pyfile=bin/git-fleximod + # solution 1: --split up the commandline_arguments parser + #man/git-fleximod.1:function:commandline_arguments:pyfile=src/git-fleximod + # solution 2 -- make parser global + man/git-fleximod.2:pyfile=src/git-fleximod:object=parser From d2123b6022ad648ca78241a155fed93d13642ff4 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 17:34:43 -0700 Subject: [PATCH 063/161] updates for setup.py --- setup.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index f68a4ea9b7..53cc4e43ea 100644 --- a/setup.py +++ b/setup.py @@ -2,6 +2,8 @@ import os from setuptools import setup, find_packages from distutils.util import convert_path +from setuptools.command.build_py import build_py +from setuptools.command.install import install from build_manpages import build_manpages, get_build_py_cmd, get_install_cmd with open("README.md", "r") as fh: @@ -14,7 +16,8 @@ setuptools.setup( - scripts=["src/git-fleximod"], # This is the name of the package + name="git-fleximod", # package name + #scripts=["src/git-fleximod"], # This is the name of the package version=main_ns['__version__'], # The initial release version author="Jim Edwards", # Full name of the author maintainer="jedwards4b", @@ -38,6 +41,6 @@ # Re-define build_py and install commands so the manual pages # are automatically re-generated and installed 'build_py': get_build_py_cmd(), - 'install': get_install_cmd(), + 'install': get_install_cmd(install), } ) From 89dfbbe5e6a6d2ba532d4ec8a05ddd83ed3abb79 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 17:39:19 -0700 Subject: [PATCH 064/161] setup.cfg --- setup.cfg | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 599646552c..8df52e87a1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,7 @@ license_file = License [build_manpages] manpages = - # solution 1: --split up the commandline_arguments parser + # solution 1: --split up the commandline_arguments parser --better #man/git-fleximod.1:function:commandline_arguments:pyfile=src/git-fleximod # solution 2 -- make parser global man/git-fleximod.2:pyfile=src/git-fleximod:object=parser From 3fdff62ff545bf6fda5c7e475e7fc2c98f1e1f12 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 17:47:53 -0700 Subject: [PATCH 065/161] pointing to the better solution --- setup.cfg | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 8df52e87a1..7bd0d34d12 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,7 @@ license_file = License [build_manpages] manpages = - # solution 1: --split up the commandline_arguments parser --better - #man/git-fleximod.1:function:commandline_arguments:pyfile=src/git-fleximod + # solution 1: --split up the commandline_arguments parser + man/git-fleximod.1:function:commandline_arguments:pyfile=src/git-fleximod # solution 2 -- make parser global - man/git-fleximod.2:pyfile=src/git-fleximod:object=parser + #man/git-fleximod.2:pyfile=src/git-fleximod:object=parser From 49e8fadbf834e1c03effd933e7bd5698b7555cc0 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 18:11:22 -0700 Subject: [PATCH 066/161] minimal changes required --- src/git-fleximod | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/git-fleximod b/src/git-fleximod index d0e6149f28..26c00f5512 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -24,8 +24,7 @@ def find_root_dir(filename=".git"): d = d.parent return None - -def commandline_arguments(args=None): +def get_parser(): description = """ %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models """ @@ -127,6 +126,11 @@ def commandline_arguments(args=None): "information to the screen and log file.", ) + return parser + +def commandline_arguments(args=None): + parser = get_parser() + if args: options = parser.parse_args(args) else: From 8c09b5b016629660613f0c0acb3889c5b8d5209b Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 18:22:59 -0700 Subject: [PATCH 067/161] updates to setup.cfg --- setup.cfg | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index 7bd0d34d12..f6b737006f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,4 @@ license_file = License [build_manpages] manpages = - # solution 1: --split up the commandline_arguments parser man/git-fleximod.1:function:commandline_arguments:pyfile=src/git-fleximod - # solution 2 -- make parser global - #man/git-fleximod.2:pyfile=src/git-fleximod:object=parser From a9bd103363ec9fe041daf4ee36bc915fe7209e15 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 18:23:22 -0700 Subject: [PATCH 068/161] minor changes --- setup.cfg | 3 +++ 1 file changed, 3 insertions(+) diff --git a/setup.cfg b/setup.cfg index f6b737006f..7bd0d34d12 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,4 +2,7 @@ license_file = License [build_manpages] manpages = + # solution 1: --split up the commandline_arguments parser man/git-fleximod.1:function:commandline_arguments:pyfile=src/git-fleximod + # solution 2 -- make parser global + #man/git-fleximod.2:pyfile=src/git-fleximod:object=parser From 3b82247ef72c9c3f2f9e4d5697bbb735bb7181d2 Mon Sep 17 00:00:00 2001 From: Negin Sobhani Date: Thu, 1 Feb 2024 18:26:19 -0700 Subject: [PATCH 069/161] fix syntax and new function name --- setup.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 7bd0d34d12..66ff24ada6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,7 +2,7 @@ license_file = License [build_manpages] manpages = - # solution 1: --split up the commandline_arguments parser - man/git-fleximod.1:function:commandline_arguments:pyfile=src/git-fleximod + # solution 1: -- use parser function + man/git-fleximod.1:function=get_parser:pyfile=src/git-fleximod # solution 2 -- make parser global #man/git-fleximod.2:pyfile=src/git-fleximod:object=parser From 23e0813eb3340dd410e989a48a7b753a69d0ae74 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 2 Feb 2024 07:21:14 -0700 Subject: [PATCH 070/161] names were too similar, force needs to be optional --- src/git-fleximod | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/git-fleximod b/src/git-fleximod index 1a55e91689..984ae9e1de 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -241,7 +241,7 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master print(f"Successfully checked out {name}") -def submodule_checkout(root, name, path, url=None, tag=None): +def single_submodule_checkout(root, name, path, url=None, tag=None, force=False): git = GitInterface(root, logger) repodir = os.path.join(root, path) if os.path.exists(os.path.join(repodir, ".git")): @@ -291,7 +291,7 @@ def submodule_checkout(root, name, path, url=None, tag=None): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir} {url}") gitmodules = GitModules(logger,confpath=repodir) - submodules_checkout(gitmodules, repodir, ["I:T"]) + submodules_checkout(gitmodules, repodir, ["I:T"], force=force) if os.path.exists(os.path.join(repodir, ".git")): print(f"Successfully checked out {name}") else: @@ -397,7 +397,7 @@ def submodules_update(gitmodules, root_dir, force): print(f"{name:>20} up to date.") -def submodules_checkout(gitmodules, root_dir, requiredlist, force): +def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): _,localmods = submodules_status(gitmodules, root_dir) print("") if localmods and not force: @@ -425,7 +425,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force): "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) - submodule_checkout(root_dir, name, path, url=url, tag=fxtag) + single_submodule_checkout(root_dir, name, path, url=url, tag=fxtag, force=force) def submodules_test(gitmodules, root_dir): # First check that fxtags are present and in sync with submodule hashes From ae4c9c5962fe0ddbcc8043d9a647115dfb7e89e8 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 2 Feb 2024 09:30:33 -0700 Subject: [PATCH 071/161] more cleanup --- setup.py | 12 ++++++++---- src/git-fleximod | 12 ++++++++---- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/setup.py b/setup.py index 53cc4e43ea..259f9589cf 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,8 @@ -import setuptools +import sys import os +sys.path.insert(0,os.path.join(os.getenv("CONDA_PREFIX"),"lib","python3.12","site-packages")) + +import setuptools from setuptools import setup, find_packages from distutils.util import convert_path from setuptools.command.build_py import build_py @@ -17,7 +20,7 @@ setuptools.setup( name="git-fleximod", # package name - #scripts=["src/git-fleximod"], # This is the name of the package + scripts=["src/git-fleximod"], # This is the name of the package version=main_ns['__version__'], # The initial release version author="Jim Edwards", # Full name of the author maintainer="jedwards4b", @@ -25,8 +28,9 @@ description="Extended support for git-submodule and git-sparse-checkout", long_description=long_description, # Long description read from the the readme file long_description_content_type="text/markdown", - packages=['fleximod'], # List of all python modules to be installed - package_dir={'fleximod': 'src/fleximod'}, + packages=find_packages(), # List of all python modules to be installed + package_dir={'git-fleximod': 'src', + 'fleximod': 'src/fleximod'}, package_data={"":['version.txt']}, classifiers=[ "Programming Language :: Python :: 3", diff --git a/src/git-fleximod b/src/git-fleximod index 984ae9e1de..0706a56236 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -273,7 +273,6 @@ def single_submodule_checkout(root, name, path, url=None, tag=None, force=False) rootdotgit = line[8:].rstrip() newpath = os.path.abspath(os.path.join(root,rootdotgit,"modules",path)) - print(f"root is {root} rootdotgit is {rootdotgit} newpath is {newpath}") if not os.path.isdir(os.path.join(newpath,os.pardir)): os.makedirs(os.path.abspath(os.path.join(newpath,os.pardir))) @@ -321,14 +320,17 @@ def submodules_status(gitmodules, root_dir): tags = rootgit.git_operation("ls-remote","--tags",url) atag = None for htag in tags.split('\n'): - if tag in htag: + if tag and tag in htag: atag = (htag.split()[1])[10:] break - if tag == atag: + if tag and tag == atag: print(f"e {name:>20} not checked out, aligned at tag {tag}") - else: + elif tag: print(f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}") testfails += 1 + else: + print(f"e {name:>20} has no fxtag defined in .gitmodules") + testfails +=1 else: with utils.pushd(newpath): git = GitInterface(newpath, logger) @@ -482,6 +484,8 @@ def _main_func(): includelist=includelist, excludelist=excludelist, ) + if not gitmodules.sections(): + sys.exit("No submodule components found") retval = 0 if action == "update": submodules_update(gitmodules, root_dir, force) From 140529afedf13f88ba3673526917d6dd27750c3e Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 2 Feb 2024 10:00:15 -0700 Subject: [PATCH 072/161] pre-commit cleanup --- setup.py | 51 ++++++------ src/fleximod/gitinterface.py | 15 ++-- src/fleximod/gitmodules.py | 17 ++-- src/fleximod/lstripreader.py | 7 +- src/fleximod/version.py | 2 +- src/git-fleximod | 150 +++++++++++++++++++---------------- 6 files changed, 129 insertions(+), 113 deletions(-) diff --git a/setup.py b/setup.py index 259f9589cf..8f8a49cbec 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,13 @@ import sys import os -sys.path.insert(0,os.path.join(os.getenv("CONDA_PREFIX"),"lib","python3.12","site-packages")) + +sys.path.insert( + 0, os.path.join(os.getenv("CONDA_PREFIX"), "lib", "python3.12", "site-packages") +) import setuptools -from setuptools import setup, find_packages -from distutils.util import convert_path -from setuptools.command.build_py import build_py +from setuptools import find_packages +from setuptools.util import convert_path from setuptools.command.install import install from build_manpages import build_manpages, get_build_py_cmd, get_install_cmd @@ -13,38 +15,37 @@ long_description = fh.read() main_ns = {} -ver_path = convert_path('src/fleximod/version.py') +ver_path = convert_path("src/fleximod/version.py") with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) - + setuptools.setup( - name="git-fleximod", # package name - scripts=["src/git-fleximod"], # This is the name of the package - version=main_ns['__version__'], # The initial release version - author="Jim Edwards", # Full name of the author + name="git-fleximod", # package name + scripts=["src/git-fleximod"], # This is the name of the package + version=main_ns["__version__"], # The initial release version + author="Jim Edwards", # Full name of the author maintainer="jedwards4b", license="MIT License", description="Extended support for git-submodule and git-sparse-checkout", - long_description=long_description, # Long description read from the the readme file + long_description=long_description, # Long description read from the the readme file long_description_content_type="text/markdown", - packages=find_packages(), # List of all python modules to be installed - package_dir={'git-fleximod': 'src', - 'fleximod': 'src/fleximod'}, - package_data={"":['version.txt']}, + packages=find_packages(), # List of all python modules to be installed + package_dir={"git-fleximod": "src", "fleximod": "src/fleximod"}, + package_data={"": ["version.txt"]}, classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", - ], # Information to filter the project on PyPi website - python_requires='>=3.6', # Minimum version requirement of the package -# py_modules=['git-fleximod'], # Name of the python package - install_requires=["GitPython"], # Install other dependencies if any + ], # Information to filter the project on PyPi website + python_requires=">=3.6", # Minimum version requirement of the package + # py_modules=['git-fleximod'], # Name of the python package + install_requires=["GitPython"], # Install other dependencies if any cmdclass={ - 'build_manpages': build_manpages, - # Re-define build_py and install commands so the manual pages - # are automatically re-generated and installed - 'build_py': get_build_py_cmd(), - 'install': get_install_cmd(install), - } + "build_manpages": build_manpages, + # Re-define build_py and install commands so the manual pages + # are automatically re-generated and installed + "build_py": get_build_py_cmd(), + "install": get_install_cmd(install), + }, ) diff --git a/src/fleximod/gitinterface.py b/src/fleximod/gitinterface.py index c127163dc6..96bb9ac52b 100644 --- a/src/fleximod/gitinterface.py +++ b/src/fleximod/gitinterface.py @@ -1,7 +1,7 @@ import os -import logging from fleximod import utils + class GitInterface: def __init__(self, repo_path, logger): logger.debug("Initialize GitInterface for {}".format(repo_path)) @@ -9,6 +9,7 @@ def __init__(self, repo_path, logger): self.logger = logger try: import git + self._use_module = True try: self.repo = git.Repo(repo_path) # Initialize GitPython repo @@ -18,17 +19,17 @@ def __init__(self, repo_path, logger): msg = "Using GitPython interface to git" except ImportError: self._use_module = False - if not os.path.exists(os.path.join(repo_path,".git")): + if not os.path.exists(os.path.join(repo_path, ".git")): self._init_git_repo() msg = "Using shell interface to git" self.logger.info(msg) - + def _git_command(self, operation, *args): self.logger.info(operation) if self._use_module and operation != "submodule": return getattr(self.repo.git, operation)(*args) else: - return ["git", "-C",self.repo_path, operation] + list(args) + return ["git", "-C", self.repo_path, operation] + list(args) def _init_git_repo(self): if self._use_module: @@ -37,7 +38,7 @@ def _init_git_repo(self): command = ("git", "-C", self.repo_path, "init") utils.execute_subprocess(command) - + # pylint: disable=unused-argument def git_operation(self, operation, *args, **kwargs): command = self._git_command(operation, *args) self.logger.info(command) @@ -51,7 +52,7 @@ def config_get_value(self, section, name): config = self.repo.config_reader() return config.get_value(section, name) else: - cmd = ("git","-C",self.repo_path,"config", "--get", f"{section}.{name}") + cmd = ("git", "-C", self.repo_path, "config", "--get", f"{section}.{name}") output = utils.execute_subprocess(cmd, output_to_caller=True) return output.strip() @@ -61,6 +62,6 @@ def config_set_value(self, section, name, value): writer.set_value(section, name, value) writer.release() # Ensure changes are saved else: - cmd = ("git","-C",self.repo_path,"config", f"{section}.{name}", value) + cmd = ("git", "-C", self.repo_path, "config", f"{section}.{name}", value) self.logger.info(cmd) utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/src/fleximod/gitmodules.py b/src/fleximod/gitmodules.py index a6f7589319..11a2457247 100644 --- a/src/fleximod/gitmodules.py +++ b/src/fleximod/gitmodules.py @@ -3,6 +3,7 @@ from configparser import ConfigParser from fleximod.lstripreader import LstripReader + class GitModules(ConfigParser): def __init__( self, @@ -19,11 +20,15 @@ def __init__( excludelist: Optional list of submodules to exclude. """ self.logger = logger - self.logger.debug("Creating a GitModules object {} {} {} {}".format(confpath,conffile,includelist,excludelist)) + self.logger.debug( + "Creating a GitModules object {} {} {} {}".format( + confpath, conffile, includelist, excludelist + ) + ) ConfigParser.__init__(self) self.conf_file = os.path.join(confpath, conffile) # first create a backup of this file to be restored on deletion of the object - shutil.copy(self.conf_file, self.conf_file+".save") + shutil.copy(self.conf_file, self.conf_file + ".save") self.read_file(LstripReader(self.conf_file), source=conffile) self.includelist = includelist self.excludelist = excludelist @@ -34,7 +39,7 @@ def set(self, name, option, value): Ensures the appropriate section exists for the submodule. Calls the parent class's set method to store the value. """ - self.logger.debug("set called {} {} {}".format(name,option,value)) + self.logger.debug("set called {} {} {}".format(name, option, value)) section = f'submodule "{name}"' if not self.has_section(section): self.add_section(section) @@ -47,7 +52,7 @@ def get(self, name, option, raw=False, vars=None, fallback=None): Uses the parent class's get method to access the value. Handles potential errors if the section or option doesn't exist. """ - self.logger.debug("get called {} {}".format(name,option)) + self.logger.debug("get called {} {}".format(name, option)) section = f'submodule "{name}"' try: return ConfigParser.get( @@ -62,8 +67,8 @@ def save(self): def __del__(self): self.logger.debug("Destroying GitModules object") - shutil.move(self.conf_file+".save", self.conf_file) - + shutil.move(self.conf_file + ".save", self.conf_file) + def sections(self): """Strip the submodule part out of section and just use the name""" self.logger.debug("calling GitModules sections iterator") diff --git a/src/fleximod/lstripreader.py b/src/fleximod/lstripreader.py index 530abd297e..01d5580ee8 100644 --- a/src/fleximod/lstripreader.py +++ b/src/fleximod/lstripreader.py @@ -1,8 +1,8 @@ - class LstripReader(object): "LstripReader formats .gitmodules files to be acceptable for configparser" + def __init__(self, filename): - with open(filename, 'r') as infile: + with open(filename, "r") as infile: lines = infile.readlines() self._lines = list() self._num_lines = len(lines) @@ -19,7 +19,7 @@ def readline(self, size=-1): try: line = self.next() except StopIteration: - line = '' + line = "" if (size > 0) and (len(line) < size): return line[0:size] @@ -41,4 +41,3 @@ def next(self): def __next__(self): return self.next() - diff --git a/src/fleximod/version.py b/src/fleximod/version.py index 7fd229a32b..d3ec452c31 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = '0.2.0' +__version__ = "0.2.0" diff --git a/src/git-fleximod b/src/git-fleximod index 0706a56236..aa587ea3a9 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -11,9 +11,11 @@ from fleximod.gitinterface import GitInterface from fleximod.gitmodules import GitModules from fleximod.version import __version__ from configparser import NoOptionError + # logger variable is global logger = None + def find_root_dir(filename=".git"): d = Path.cwd() root = Path(d.root) @@ -23,7 +25,8 @@ def find_root_dir(filename=".git"): return attempt d = d.parent return None - + + def get_parser(): description = """ %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models @@ -76,7 +79,7 @@ def get_parser(): "--force", action="store_true", default=False, - help="Override cautions and update or checkout over locally modified repository." + help="Override cautions and update or checkout over locally modified repository.", ) parser.add_argument( @@ -128,6 +131,7 @@ def get_parser(): return parser + def commandline_arguments(args=None): parser = get_parser() @@ -136,7 +140,7 @@ def commandline_arguments(args=None): else: options = parser.parse_args() -# explicitly listing a component overrides the optional flag + # explicitly listing a component overrides the optional flag if options.optional or options.components: fxrequired = ["T:T", "T:F", "I:T"] else: @@ -145,11 +149,11 @@ def commandline_arguments(args=None): action = options.action if not action: action = "checkout" - handlers=[logging.StreamHandler()] + handlers = [logging.StreamHandler()] if options.debug: try: - open("fleximod.log","w") + open("fleximod.log", "w") except PermissionError: sys.exit("ABORT: Could not write file fleximod.log") level = logging.DEBUG @@ -160,21 +164,18 @@ def commandline_arguments(args=None): level = logging.WARNING # Configure the root logger logging.basicConfig( - level=level, - format="%(name)s - %(levelname)s - %(message)s", - handlers=handlers + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers ) - - if hasattr(options, 'version'): + + if hasattr(options, "version"): exit() - + return ( options.path, options.gitmodules, fxrequired, options.components, options.exclude, - options.verbose, options.force, action, ) @@ -191,7 +192,7 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master # initialize a new git repo and set the sparse checkout flag sprep_repo = os.path.join(root_dir, path) sprepo_git = GitInterface(sprep_repo, logger) - if os.path.exists(os.path.join(sprep_repo,".git")): + if os.path.exists(os.path.join(sprep_repo, ".git")): try: logger.info("Submodule {} found".format(name)) chk = sprepo_git.config_get_value("core", "sparseCheckout") @@ -203,7 +204,6 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master except Exception as e: utils.fatal_error("Unexpected error {} occured.".format(e)) - sprepo_git.config_set_value("core", "sparseCheckout", "true") # set the repository remote @@ -212,28 +212,31 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master superroot = git.git_operation("rev-parse", "--show-superproject-working-tree") if os.path.isfile(os.path.join(root_dir, ".git")): with open(os.path.join(root_dir, ".git")) as f: - gitpath = os.path.abspath(os.path.join(root_dir,f.read().split()[1])) + gitpath = os.path.abspath(os.path.join(root_dir, f.read().split()[1])) topgit = os.path.abspath(os.path.join(gitpath, "modules")) - else: + else: topgit = os.path.abspath(os.path.join(root_dir, ".git", "modules")) if not os.path.isdir(topgit): os.makedirs(topgit) topgit = os.path.join(topgit, name) - logger.debug(f"root_dir is {root_dir} topgit is {topgit} superroot is {superroot}") + logger.debug( + "root_dir is {} topgit is {} superroot is {}".format( + root_dir, topgit, superroot + ) + ) + + if os.path.isdir(os.path.join(root_dir, path, ".git")): + shutil.move(os.path.join(root_dir, path, ".git"), topgit) + with open(os.path.join(root_dir, path, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(topgit, os.path.join(root_dir, path))) - if os.path.isdir(os.path.join(root_dir,path,".git")): - shutil.move(os.path.join(root_dir,path, ".git"), topgit) - with open(os.path.join(root_dir,path, ".git"), "w") as f: - f.write("gitdir: " + os.path.relpath(topgit, os.path.join(root_dir,path))) - gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) if os.path.isfile(gitsparse): logger.warning("submodule {} is already initialized".format(name)) return - - shutil.copy(os.path.join(root_dir,path, sparsefile), gitsparse) + shutil.copy(os.path.join(root_dir, path, sparsefile), gitsparse) # Finally checkout the repo sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") @@ -258,38 +261,35 @@ def single_submodule_checkout(root, name, path, url=None, tag=None, force=False) # it will be done by the GitModules class if url.startswith("git@"): tmpurl = url - url = url.replace("git@github.com:", "https://github.com") + url = url.replace("git@github.com:", "https://github.com") git.git_operation("clone", url, path) smgit = GitInterface(repodir, logger) if not tag: tag = smgit.git_operation("describe", "--tags", "--always").rstrip() - smgit.git_operation("checkout",tag) + smgit.git_operation("checkout", tag) # Now need to move the .git dir to the submodule location - rootdotgit = os.path.join(root,".git") + rootdotgit = os.path.join(root, ".git") if os.path.isfile(rootdotgit): with open(rootdotgit) as f: line = f.readline() if line.startswith("gitdir: "): rootdotgit = line[8:].rstrip() - - newpath = os.path.abspath(os.path.join(root,rootdotgit,"modules",path)) - if not os.path.isdir(os.path.join(newpath,os.pardir)): - os.makedirs(os.path.abspath(os.path.join(newpath,os.pardir))) - - shutil.move(os.path.join(repodir,".git"), newpath) - with open(os.path.join(repodir,".git"), "w") as f: - f.write("gitdir: "+newpath) - - - - + + newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", path)) + if not os.path.isdir(os.path.join(newpath, os.pardir)): + os.makedirs(os.path.abspath(os.path.join(newpath, os.pardir))) + + shutil.move(os.path.join(repodir, ".git"), newpath) + with open(os.path.join(repodir, ".git"), "w") as f: + f.write("gitdir: " + newpath) + if not tmpurl: logger.debug(git.git_operation("submodule", "update", "--init", "--", path)) if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir} {url}") - gitmodules = GitModules(logger,confpath=repodir) + gitmodules = GitModules(logger, confpath=repodir) submodules_checkout(gitmodules, repodir, ["I:T"], force=force) if os.path.exists(os.path.join(repodir, ".git")): print(f"Successfully checked out {name}") @@ -315,22 +315,23 @@ def submodules_status(gitmodules, root_dir): if not os.path.exists(os.path.join(newpath, ".git")): rootgit = GitInterface(root_dir, logger) # submodule commands use path, not name - nhash = (rootgit.git_operation("submodule","status",path).split()[0])[1:] url = gitmodules.get(name, "url") - tags = rootgit.git_operation("ls-remote","--tags",url) + tags = rootgit.git_operation("ls-remote", "--tags", url) atag = None - for htag in tags.split('\n'): + for htag in tags.split("\n"): if tag and tag in htag: atag = (htag.split()[1])[10:] break if tag and tag == atag: print(f"e {name:>20} not checked out, aligned at tag {tag}") elif tag: - print(f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}") + print( + f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" + ) testfails += 1 else: print(f"e {name:>20} has no fxtag defined in .gitmodules") - testfails +=1 + testfails += 1 else: with utils.pushd(newpath): git = GitInterface(newpath, logger) @@ -346,24 +347,27 @@ def submodules_status(gitmodules, root_dir): ) testfails += 1 - status = git.git_operation("status","--ignore-submodules") + status = git.git_operation("status", "--ignore-submodules") if "nothing to commit" not in status: - localmods = localmods+1 - print('M'+textwrap.indent(status,' ')) - + localmods = localmods + 1 + print("M" + textwrap.indent(status, " ")) + return testfails, localmods + def submodules_update(gitmodules, root_dir, force): - _,localmods = submodules_status(gitmodules, root_dir) + _, localmods = submodules_status(gitmodules, root_dir) print("") if localmods and not force: - print(f"Repository has local mods, cowardly refusing to continue, fix issues or use --force to override") + print( + "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" + ) return for name in gitmodules.sections(): fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") - logger.info(f"name={name} path={path} url={url} fxtag={fxtag}") + logger.info("name={} path={} url={} fxtag={}".format(name, path, url, fxtag)) if os.path.exists(os.path.join(path, ".git")): submoddir = os.path.join(root_dir, path) with utils.pushd(submoddir): @@ -400,10 +404,12 @@ def submodules_update(gitmodules, root_dir, force): def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): - _,localmods = submodules_status(gitmodules, root_dir) + _, localmods = submodules_status(gitmodules, root_dir) print("") if localmods and not force: - print(f"Repository has local mods, cowardly refusing to continue, fix issues or use --force to override") + print( + "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" + ) return for name in gitmodules.sections(): fxrequired = gitmodules.get(name, "fxrequired") @@ -416,22 +422,28 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): if "T:F" == fxrequired: print("Skipping optional component {}".format(name)) continue - + if fxsparse: logger.debug( - f"Callng submodule_sparse_checkout({root_dir}, {name}, {url}, {path}, {fxsparse}, {fxtag}" + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) ) submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) else: logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) - - single_submodule_checkout(root_dir, name, path, url=url, tag=fxtag, force=force) + + single_submodule_checkout( + root_dir, name, path, url=url, tag=fxtag, force=force + ) + def submodules_test(gitmodules, root_dir): # First check that fxtags are present and in sync with submodule hashes - testfails,localmods = submodules_status(gitmodules, root_dir) + testfails, localmods = submodules_status(gitmodules, root_dir) + print("") # Then make sure that urls are consistant with fxurls (not forks and not ssh) # and that sparse checkout files exist for name in gitmodules.sections(): @@ -440,16 +452,14 @@ def submodules_test(gitmodules, root_dir): fxsparse = gitmodules.get(name, "fxsparse") path = gitmodules.get(name, "path") if not fxurl or url != fxurl: - print(f"submodule {name} url {url} not in sync with required {fxurl}") + print(f"{name:>20} url {url} not in sync with required {fxurl}") testfails += 1 if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): - print(f"sparse submodule {name} sparse checkout file {fxsparse} not found") + print(f"{name:>20} sparse checkout file {fxsparse} not found") testfails += 1 - return testfails+localmods - + return testfails + localmods - def _main_func(): ( root_dir, @@ -457,15 +467,14 @@ def _main_func(): fxrequired, includelist, excludelist, - verbose, force, action, ) = commandline_arguments() # Get a logger for the package global logger logger = logging.getLogger(__name__) - - logger.info(f"action is {action}") + + logger.info("action is {}".format(action)) if not os.path.isfile(os.path.join(root_dir, file_name)): file_path = utils.find_upwards(root_dir, file_name) @@ -476,7 +485,7 @@ def _main_func(): ) root_dir = os.path.dirname(file_path) - logger.info(f"root_dir is {root_dir}") + logger.info("root_dir is {}".format(root_dir)) gitmodules = GitModules( logger, confpath=root_dir, @@ -497,7 +506,8 @@ def _main_func(): retval = submodules_test(gitmodules, root_dir) else: utils.fatal_error(f"unrecognized action request {action}") - return(retval) - + return retval + + if __name__ == "__main__": sys.exit(_main_func()) From 0a5cf50fb696f84ea5a71bcdb973a8972c14bb55 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 2 Feb 2024 12:43:54 -0700 Subject: [PATCH 073/161] setup.py builds manpage but doesnt install --- .github/workflows/pre-commit | 13 ++++++ setup.py | 76 +++++++++++++++++++----------------- 2 files changed, 53 insertions(+), 36 deletions(-) create mode 100644 .github/workflows/pre-commit diff --git a/.github/workflows/pre-commit b/.github/workflows/pre-commit new file mode 100644 index 0000000000..1a6ad0082a --- /dev/null +++ b/.github/workflows/pre-commit @@ -0,0 +1,13 @@ +name: pre-commit +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 diff --git a/setup.py b/setup.py index 8f8a49cbec..048db75957 100644 --- a/setup.py +++ b/setup.py @@ -1,51 +1,55 @@ import sys import os +sys.path.insert(0, os.path.join(os.getenv("CONDA_PREFIX"), "lib", "python3.12")) sys.path.insert( 0, os.path.join(os.getenv("CONDA_PREFIX"), "lib", "python3.12", "site-packages") ) import setuptools -from setuptools import find_packages -from setuptools.util import convert_path -from setuptools.command.install import install +from setuptools import find_packages, convert_path from build_manpages import build_manpages, get_build_py_cmd, get_install_cmd -with open("README.md", "r") as fh: - long_description = fh.read() - main_ns = {} -ver_path = convert_path("src/fleximod/version.py") +ver_path = convert_path( + os.path.join(os.path.dirname(__file__), "src", "fleximod", "version.py") +) +print(f"ver_path is {ver_path}") with open(ver_path) as ver_file: exec(ver_file.read(), main_ns) +if __name__ == "__main__": + with open("README.md", "r") as fh: + long_description = fh.read() -setuptools.setup( - name="git-fleximod", # package name - scripts=["src/git-fleximod"], # This is the name of the package - version=main_ns["__version__"], # The initial release version - author="Jim Edwards", # Full name of the author - maintainer="jedwards4b", - license="MIT License", - description="Extended support for git-submodule and git-sparse-checkout", - long_description=long_description, # Long description read from the the readme file - long_description_content_type="text/markdown", - packages=find_packages(), # List of all python modules to be installed - package_dir={"git-fleximod": "src", "fleximod": "src/fleximod"}, - package_data={"": ["version.txt"]}, - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - ], # Information to filter the project on PyPi website - python_requires=">=3.6", # Minimum version requirement of the package - # py_modules=['git-fleximod'], # Name of the python package - install_requires=["GitPython"], # Install other dependencies if any - cmdclass={ - "build_manpages": build_manpages, - # Re-define build_py and install commands so the manual pages - # are automatically re-generated and installed - "build_py": get_build_py_cmd(), - "install": get_install_cmd(install), - }, -) + setuptools.setup( + name="git-fleximod", # package name + scripts=["src/git-fleximod"], # This is the name of the package + version=main_ns["__version__"], # The initial release version + author="Jim Edwards", # Full name of the author + maintainer="jedwards4b", + license="MIT License", + description="Extended support for git-submodule and git-sparse-checkout", + long_description=long_description, # Long description read from the the readme file + long_description_content_type="text/markdown", + packages=find_packages( + where="src" + ), # List of all python modules to be installed + package_dir={"": "src", "git-fleximod": "src", "fleximod": "src/fleximod"}, + package_data={"": ["src/fleximod/version.txt", "man/git-fleximod.1"]}, + classifiers=[ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + ], # Information to filter the project on PyPi website + python_requires=">=3.6", # Minimum version requirement of the package + # py_modules=['git-fleximod'], # Name of the python package + install_requires=["GitPython"], # Install other dependencies if any + cmdclass={ + "build_manpages": build_manpages, + # Re-define build_py and install commands so the manual pages + # are automatically re-generated and installed + "build_py": get_build_py_cmd(), + "install": get_install_cmd(), + }, + ) From 64e26fc22df3c3faa7190eff289dce5accc88669 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 2 Feb 2024 12:46:58 -0700 Subject: [PATCH 074/161] update version --- src/fleximod/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fleximod/version.py b/src/fleximod/version.py index d3ec452c31..493f7415d7 100644 --- a/src/fleximod/version.py +++ b/src/fleximod/version.py @@ -1 +1 @@ -__version__ = "0.2.0" +__version__ = "0.3.0" From 2a2ab6503091b3336bf759a32c0874959a275dcd Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 2 Feb 2024 12:58:20 -0700 Subject: [PATCH 075/161] update README --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index cec74c5a66..7c968e70b0 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o Basic Usage: git fleximod [options] Available Commands: - install: Install submodules according to configuration. + checkout: Checkout submodules according to git submodule hash configuration. status: Display the status of submodules. update: Update submodules to the tag indicated in .gitmodules variable fxtag. test: Make sure that fxtags and submodule hashes are consistant, @@ -35,7 +35,7 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o - I:T: Internal and required (always checked out). - I:F: Internal and optional (checked out with --optional flag). fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. - fcurl + fxurl: This is the url used in the test subcommand to assure that protected branches do not point to forks ## Sparse Checkouts @@ -55,9 +55,9 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o Here are some common usage examples: -Installing submodules, including optional ones: +Checkout submodules, including optional ones: ```bash - git fleximod install --optional + git fleximod checkout --optional ``` Updating a specific submodule to the fxtag indicated in .gitmodules: From ad0a976b38cd8de97fdce0d04050e412cf2e3905 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 2 Feb 2024 17:15:26 -0700 Subject: [PATCH 076/161] add support for fxhash variable in .gitmodules --- src/git-fleximod | 72 +++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 59 insertions(+), 13 deletions(-) diff --git a/src/git-fleximod b/src/git-fleximod index aa587ea3a9..5182bb256a 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -181,7 +181,9 @@ def commandline_arguments(args=None): ) -def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): +def submodule_sparse_checkout( + root_dir, name, url, path, sparsefile, tag="master", fxhash=None +): # first create the module directory if not os.path.isdir(path): os.makedirs(path) @@ -239,12 +241,19 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master shutil.copy(os.path.join(root_dir, path, sparsefile), gitsparse) # Finally checkout the repo - sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") - sprepo_git.git_operation("checkout", tag) - print(f"Successfully checked out {name}") + if fxhash: + sprepo_git.git_operation("fetch", "origin", "--tags") + sprepo_git.git_operation("checkout", fxhash) + print(f"Successfully checked out {name:>20} at {fxhash}") + else: + sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") + sprepo_git.git_operation("checkout", tag) + print(f"Successfully checked out {name:>20} at {tag}") -def single_submodule_checkout(root, name, path, url=None, tag=None, force=False): +def single_submodule_checkout( + root, name, path, url=None, tag=None, force=False, fxhash=None +): git = GitInterface(root, logger) repodir = os.path.join(root, path) if os.path.exists(os.path.join(repodir, ".git")): @@ -264,9 +273,12 @@ def single_submodule_checkout(root, name, path, url=None, tag=None, force=False) url = url.replace("git@github.com:", "https://github.com") git.git_operation("clone", url, path) smgit = GitInterface(repodir, logger) - if not tag: + if not tag and not fxhash: tag = smgit.git_operation("describe", "--tags", "--always").rstrip() - smgit.git_operation("checkout", tag) + if fxhash: + smgit.git_operation("checkout", fxhash) + else: + smgit.git_operation("checkout", tag) # Now need to move the .git dir to the submodule location rootdotgit = os.path.join(root, ".git") if os.path.isfile(rootdotgit): @@ -308,6 +320,9 @@ def submodules_status(gitmodules, root_dir): for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") + fxhash = gitmodules.get(name, "fxhash") + if tag and fxhash: + utils.fatal_error(f"{name:>20} cannot have both fxtag and fxhash") if not path: utils.fatal_error("No path found in .gitmodules for {}".format(name)) newpath = os.path.join(root_dir, path) @@ -329,21 +344,44 @@ def submodules_status(gitmodules, root_dir): f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" ) testfails += 1 + elif fxhash: + n = len(fxhash) + smhash = rootgit.git_operation( + "ls-tree", "--object-only", f"--abbrev={n}", "HEAD", path + ) + if smhash == fxhash: + print(f" {name:>20} not checked out, aligned at hash {fxhash}") + else: + print( + f"s {name:>20} not checked out, out of sync at hash {smhash}, expected hash is {fxhash}" + ) + testfails += 1 else: - print(f"e {name:>20} has no fxtag defined in .gitmodules") + print(f"e {name:>20} has no fxtag nor fxhash defined in .gitmodules") testfails += 1 else: with utils.pushd(newpath): git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() + ahash = git.git_operation("status").partition("\n")[0].split()[-1] if tag and atag != tag: print(f"s {name:>20} {atag} is out of sync with .gitmodules {tag}") testfails += 1 elif tag: print(f" {name:>20} at tag {tag}") + elif fxhash: + rootgit = GitInterface(root_dir, logger) + n = len(fxhash) + if ahash.startswith(fxhash): + print(f" {name:>20} at hash {fxhash}") + else: + print( + f"s {name:>20} {ahash} is out of sync with .gitmodules {fxhash}" + ) + testfails += 1 else: print( - f"e {name:>20} has no tag defined in .gitmodules, module at {atag}" + f"e {name:>20} has no fxtag nor fxhash defined in .gitmodules, module at {atag}" ) testfails += 1 @@ -365,6 +403,7 @@ def submodules_update(gitmodules, root_dir, force): return for name in gitmodules.sections(): fxtag = gitmodules.get(name, "fxtag") + fxhash = gitmodules.get(name, "fxhash") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") logger.info("name={} path={} url={} fxtag={}".format(name, path, url, fxtag)) @@ -394,11 +433,15 @@ def submodules_update(gitmodules, root_dir, force): if fxtag and fxtag not in tags: git.git_operation("fetch", newremote, "--tags") atag = git.git_operation("describe", "--tags", "--always").rstrip() + print(f"fxtag {fxtag} fxhash {fxhash}") if fxtag and fxtag != atag: print(f"{name:>20} updated to {fxtag}") git.git_operation("checkout", fxtag) - elif not fxtag: - print(f"No fxtag found for submodule {name:>20}") + elif fxhash: + print(f"{name:>20} updated to {fxhash}") + git.git_operation("checkout", fxhash) + elif not (fxtag or fxhash): + print(f"No fxtag nor fxhash found for submodule {name:>20}") else: print(f"{name:>20} up to date.") @@ -415,6 +458,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): fxrequired = gitmodules.get(name, "fxrequired") fxsparse = gitmodules.get(name, "fxsparse") fxtag = gitmodules.get(name, "fxtag") + fxhash = gitmodules.get(name, "fxhash") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") @@ -429,14 +473,16 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): root_dir, name, url, path, fxsparse, fxtag ) ) - submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) + submodule_sparse_checkout( + root_dir, name, url, path, fxsparse, tag=fxtag, fxhash=fxhash + ) else: logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) single_submodule_checkout( - root_dir, name, path, url=url, tag=fxtag, force=force + root_dir, name, path, url=url, tag=fxtag, force=force, fxhash=fxhash ) From bf56a97251d0f5e5e84910a56e4c8e3036d6b057 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 3 Feb 2024 11:27:03 -0700 Subject: [PATCH 077/161] working on poetry --- pyproject.toml | 28 ++++++++++++ setup.cfg | 8 ---- setup.py | 55 ----------------------- src/{fleximod => }/__init__.py | 0 src/classes/__init__.py | 0 src/{fleximod => classes}/gitinterface.py | 2 +- src/{fleximod => classes}/gitmodules.py | 2 +- src/{fleximod => classes}/lstripreader.py | 0 src/{fleximod => classes}/utils.py | 0 src/{fleximod => classes}/version.py | 0 src/git-fleximod | 11 ++--- tbump.toml | 43 ++++++++++++++++++ tests/__init__.py | 3 ++ tests/test_import.py | 9 ++++ 14 files changed, 91 insertions(+), 70 deletions(-) create mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100644 setup.py rename src/{fleximod => }/__init__.py (100%) create mode 100644 src/classes/__init__.py rename src/{fleximod => classes}/gitinterface.py (98%) rename src/{fleximod => classes}/gitmodules.py (98%) rename src/{fleximod => classes}/lstripreader.py (100%) rename src/{fleximod => classes}/utils.py (100%) rename src/{fleximod => classes}/version.py (100%) create mode 100644 tbump.toml create mode 100644 tests/__init__.py create mode 100644 tests/test_import.py diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..28e0e35a09 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,28 @@ +[tool.poetry] +name = "git-fleximod" +version = "0.3.0" +description = "Extended support for git-submodule and git-sparse-checkout" +authors = ["Jim Edwards "] +maintainers = ["Jim Edwards "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/jedwards4b/git-fleximod" +keywords = ["git", "submodule", "sparse-checkout"] +packages = [ + { include = "src/git-fleximod" }, + { include = "src/classes/*.py" }, + { include = "License" }, + { include = "tests" }, + +] + +[tool.poetry.dependencies] +python = "^3.8" +GitPython = "^3.1.0" + +[tools.poetry.urls] +"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 66ff24ada6..0000000000 --- a/setup.cfg +++ /dev/null @@ -1,8 +0,0 @@ -[metadata] -license_file = License -[build_manpages] -manpages = - # solution 1: -- use parser function - man/git-fleximod.1:function=get_parser:pyfile=src/git-fleximod - # solution 2 -- make parser global - #man/git-fleximod.2:pyfile=src/git-fleximod:object=parser diff --git a/setup.py b/setup.py deleted file mode 100644 index 048db75957..0000000000 --- a/setup.py +++ /dev/null @@ -1,55 +0,0 @@ -import sys -import os - -sys.path.insert(0, os.path.join(os.getenv("CONDA_PREFIX"), "lib", "python3.12")) -sys.path.insert( - 0, os.path.join(os.getenv("CONDA_PREFIX"), "lib", "python3.12", "site-packages") -) - -import setuptools -from setuptools import find_packages, convert_path -from build_manpages import build_manpages, get_build_py_cmd, get_install_cmd - -main_ns = {} -ver_path = convert_path( - os.path.join(os.path.dirname(__file__), "src", "fleximod", "version.py") -) -print(f"ver_path is {ver_path}") -with open(ver_path) as ver_file: - exec(ver_file.read(), main_ns) - -if __name__ == "__main__": - with open("README.md", "r") as fh: - long_description = fh.read() - - setuptools.setup( - name="git-fleximod", # package name - scripts=["src/git-fleximod"], # This is the name of the package - version=main_ns["__version__"], # The initial release version - author="Jim Edwards", # Full name of the author - maintainer="jedwards4b", - license="MIT License", - description="Extended support for git-submodule and git-sparse-checkout", - long_description=long_description, # Long description read from the the readme file - long_description_content_type="text/markdown", - packages=find_packages( - where="src" - ), # List of all python modules to be installed - package_dir={"": "src", "git-fleximod": "src", "fleximod": "src/fleximod"}, - package_data={"": ["src/fleximod/version.txt", "man/git-fleximod.1"]}, - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - ], # Information to filter the project on PyPi website - python_requires=">=3.6", # Minimum version requirement of the package - # py_modules=['git-fleximod'], # Name of the python package - install_requires=["GitPython"], # Install other dependencies if any - cmdclass={ - "build_manpages": build_manpages, - # Re-define build_py and install commands so the manual pages - # are automatically re-generated and installed - "build_py": get_build_py_cmd(), - "install": get_install_cmd(), - }, - ) diff --git a/src/fleximod/__init__.py b/src/__init__.py similarity index 100% rename from src/fleximod/__init__.py rename to src/__init__.py diff --git a/src/classes/__init__.py b/src/classes/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/fleximod/gitinterface.py b/src/classes/gitinterface.py similarity index 98% rename from src/fleximod/gitinterface.py rename to src/classes/gitinterface.py index 96bb9ac52b..35c593a095 100644 --- a/src/fleximod/gitinterface.py +++ b/src/classes/gitinterface.py @@ -1,5 +1,5 @@ import os -from fleximod import utils +from classes import utils class GitInterface: diff --git a/src/fleximod/gitmodules.py b/src/classes/gitmodules.py similarity index 98% rename from src/fleximod/gitmodules.py rename to src/classes/gitmodules.py index 11a2457247..dd2e1fc948 100644 --- a/src/fleximod/gitmodules.py +++ b/src/classes/gitmodules.py @@ -1,7 +1,7 @@ import os import shutil from configparser import ConfigParser -from fleximod.lstripreader import LstripReader +from classes.lstripreader import LstripReader class GitModules(ConfigParser): diff --git a/src/fleximod/lstripreader.py b/src/classes/lstripreader.py similarity index 100% rename from src/fleximod/lstripreader.py rename to src/classes/lstripreader.py diff --git a/src/fleximod/utils.py b/src/classes/utils.py similarity index 100% rename from src/fleximod/utils.py rename to src/classes/utils.py diff --git a/src/fleximod/version.py b/src/classes/version.py similarity index 100% rename from src/fleximod/version.py rename to src/classes/version.py diff --git a/src/git-fleximod b/src/git-fleximod index aa587ea3a9..1c06d0d3d0 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -6,10 +6,10 @@ import logging import argparse import textwrap from pathlib import Path -from fleximod import utils -from fleximod.gitinterface import GitInterface -from fleximod.gitmodules import GitModules -from fleximod.version import __version__ +from classes import utils +from classes.gitinterface import GitInterface +from classes.gitmodules import GitModules +from classes.version import __version__ from configparser import NoOptionError # logger variable is global @@ -43,7 +43,7 @@ def get_parser(): "action", choices=choices, default="checkout", - help=f"Subcommand of fleximod, choices are {choices[:-1]}", + help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", ) parser.add_argument( @@ -394,6 +394,7 @@ def submodules_update(gitmodules, root_dir, force): if fxtag and fxtag not in tags: git.git_operation("fetch", newremote, "--tags") atag = git.git_operation("describe", "--tags", "--always").rstrip() + if fxtag and fxtag != atag: print(f"{name:>20} updated to {fxtag}") git.git_operation("checkout", fxtag) diff --git a/tbump.toml b/tbump.toml new file mode 100644 index 0000000000..0e0a633036 --- /dev/null +++ b/tbump.toml @@ -0,0 +1,43 @@ +# Uncomment this if your project is hosted on GitHub: +github_url = "https://github.com/jedwards4b/git-fleximod/" + +[version] +current = "0.3.0" + +# Example of a semver regexp. +# Make sure this matches current_version before +# using tbump +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + ''' + +[git] +message_template = "Bump to {new_version}" +tag_template = "v{new_version}" + +# For each file to patch, add a [[file]] config +# section containing the path of the file, relative to the +# tbump.toml location. +[[file]] +src = "src/classes/version.py" + +[[file]] +src = "pyproject.toml" + +# You can specify a list of commands to +# run after the files have been patched +# and before the git commit is made + +# [[before_commit]] +# name = "check changelog" +# cmd = "grep -q {new_version} Changelog.rst" + +# Or run some commands after the git tag and the branch +# have been pushed: +# [[after_push]] +# name = "publish" +# cmd = "./publish.sh" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000..4d4c66c78e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,3 @@ +import sys, os + +sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src")) diff --git a/tests/test_import.py b/tests/test_import.py new file mode 100644 index 0000000000..becf751441 --- /dev/null +++ b/tests/test_import.py @@ -0,0 +1,9 @@ +# pylint: disable=unused-import +from classes import utils +from classes.gitinterface import GitInterface +from classes.gitmodules import GitModules +from classes.version import __version__ + + +def test_import(): + print("here") From 4f3bb556e165f9f1087de841c9b408cedb62f4a3 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 3 Feb 2024 13:59:24 -0700 Subject: [PATCH 078/161] more poetry and sphinx --- doc/Makefile | 20 +++++++ doc/conf.py | 26 +++++++++ doc/index.rst | 24 ++++++++ doc/make.bat | 35 ++++++++++++ pyproject.toml | 5 +- src/classes/cli.py | 120 ++++++++++++++++++++++++++++++++++++++++ src/classes/version.py | 1 - src/git-fleximod | 122 +---------------------------------------- tbump.toml | 2 +- 9 files changed, 232 insertions(+), 123 deletions(-) create mode 100644 doc/Makefile create mode 100644 doc/conf.py create mode 100644 doc/index.rst create mode 100644 doc/make.bat create mode 100644 src/classes/cli.py delete mode 100644 src/classes/version.py diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000000..d4bb2cbb9e --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 0000000000..423099eec9 --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,26 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "git-fleximod" +author = "Jim Edwards " +release = "0.4.0" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx_argparse_cli"] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "alabaster" +html_static_path = ["_static"] diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 0000000000..fa534d0d10 --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,24 @@ +.. git-fleximod documentation master file, created by + sphinx-quickstart on Sat Feb 3 12:02:22 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to git-fleximod's documentation! +======================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: +.. module:: sphinxcontrib.autoprogram +.. sphinx_argparse_cli:: + :module: classes.cli + :func: get_parser + :prog: git-fleximod + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 0000000000..32bb24529f --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/pyproject.toml b/pyproject.toml index 28e0e35a09..d49cbae72b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,10 @@ packages = [ { include = "src/classes/*.py" }, { include = "License" }, { include = "tests" }, - + { include = "doc/*.rst" }, + { include = "doc/Makefile" }, + { include = "doc/make.bat" }, + { include = "doc/conf.py" }, ] [tool.poetry.dependencies] diff --git a/src/classes/cli.py b/src/classes/cli.py new file mode 100644 index 0000000000..437d9beec4 --- /dev/null +++ b/src/classes/cli.py @@ -0,0 +1,120 @@ +from pathlib import Path +import argparse + +__version__ = "0.3.0" + + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return attempt + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + # + # user options + # + choices = ["update", "checkout", "status", "test"] + parser.add_argument( + "action", + choices=choices, + default="checkout", + help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", + ) + + parser.add_argument( + "components", + nargs="*", + help="Specific component(s) to checkout. By default, " + "all required submodules are checked out.", + ) + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + + parser.add_argument( + "-x", + "--exclude", + nargs="*", + help="Component(s) listed in the gitmodules file which should be ignored.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Override cautions and update or checkout over locally modified repository.", + ) + + parser.add_argument( + "-o", + "--optional", + action="store_true", + default=False, + help="By default only the required submodules " + "are checked out. This flag will also checkout the " + "optional submodules relative to the toplevel directory.", + ) + + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + + parser.add_argument( + "-V", + "--version", + action="version", + version=f"%(prog)s {__version__}", + help="Print version and exit.", + ) + + # + # developer options + # + parser.add_argument( + "--backtrace", + action="store_true", + help="DEVELOPER: show exception backtraces as extra " "debugging output", + ) + + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser diff --git a/src/classes/version.py b/src/classes/version.py deleted file mode 100644 index 493f7415d7..0000000000 --- a/src/classes/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "0.3.0" diff --git a/src/git-fleximod b/src/git-fleximod index 1c06d0d3d0..385515819c 100755 --- a/src/git-fleximod +++ b/src/git-fleximod @@ -3,137 +3,19 @@ import sys import os import shutil import logging -import argparse import textwrap -from pathlib import Path from classes import utils +from classes import cli from classes.gitinterface import GitInterface from classes.gitmodules import GitModules -from classes.version import __version__ from configparser import NoOptionError # logger variable is global logger = None -def find_root_dir(filename=".git"): - d = Path.cwd() - root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.is_dir(): - return attempt - d = d.parent - return None - - -def get_parser(): - description = """ - %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models - """ - parser = argparse.ArgumentParser( - description=description, formatter_class=argparse.RawDescriptionHelpFormatter - ) - - # - # user options - # - choices = ["update", "checkout", "status", "test"] - parser.add_argument( - "action", - choices=choices, - default="checkout", - help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", - ) - - parser.add_argument( - "components", - nargs="*", - help="Specific component(s) to checkout. By default, " - "all required submodules are checked out.", - ) - - parser.add_argument( - "-C", - "--path", - default=find_root_dir(), - help="Toplevel repository directory. Defaults to top git directory relative to current.", - ) - - parser.add_argument( - "-g", - "--gitmodules", - nargs="?", - default=".gitmodules", - help="The submodule description filename. " "Default: %(default)s.", - ) - - parser.add_argument( - "-x", - "--exclude", - nargs="*", - help="Component(s) listed in the gitmodules file which should be ignored.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - help="Override cautions and update or checkout over locally modified repository.", - ) - - parser.add_argument( - "-o", - "--optional", - action="store_true", - default=False, - help="By default only the required submodules " - "are checked out. This flag will also checkout the " - "optional submodules relative to the toplevel directory.", - ) - - parser.add_argument( - "-v", - "--verbose", - action="count", - default=0, - help="Output additional information to " - "the screen and log file. This flag can be " - "used up to two times, increasing the " - "verbosity level each time.", - ) - - parser.add_argument( - "-V", - "--version", - action="version", - version=f"%(prog)s {__version__}", - help="Print version and exit.", - ) - - # - # developer options - # - parser.add_argument( - "--backtrace", - action="store_true", - help="DEVELOPER: show exception backtraces as extra " "debugging output", - ) - - parser.add_argument( - "-d", - "--debug", - action="store_true", - default=False, - help="DEVELOPER: output additional debugging " - "information to the screen and log file.", - ) - - return parser - - def commandline_arguments(args=None): - parser = get_parser() + parser = cli.get_parser() if args: options = parser.parse_args(args) diff --git a/tbump.toml b/tbump.toml index 0e0a633036..35521d04d1 100644 --- a/tbump.toml +++ b/tbump.toml @@ -23,7 +23,7 @@ tag_template = "v{new_version}" # section containing the path of the file, relative to the # tbump.toml location. [[file]] -src = "src/classes/version.py" +src = "src/classes/cli.py" [[file]] src = "pyproject.toml" From 461ee4be93bc63eb9c735e03a7ce12c04ff3fe84 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 3 Feb 2024 14:02:07 -0700 Subject: [PATCH 079/161] Bump to 0.5.0 --- pyproject.toml | 2 +- src/classes/cli.py | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d49cbae72b..8b4246e256 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.3.0" +version = "0.5.0" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/src/classes/cli.py b/src/classes/cli.py index 437d9beec4..65f70cdd34 100644 --- a/src/classes/cli.py +++ b/src/classes/cli.py @@ -1,7 +1,7 @@ from pathlib import Path import argparse -__version__ = "0.3.0" +__version__ = "0.5.0" def find_root_dir(filename=".git"): diff --git a/tbump.toml b/tbump.toml index 35521d04d1..18e21a69f6 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.3.0" +current = "0.5.0" # Example of a semver regexp. # Make sure this matches current_version before From f0ab8150ba149c42d4690aa8964754bd58225a11 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 4 Feb 2024 09:15:55 -0700 Subject: [PATCH 080/161] reformed to work with poetry --- {src => git_fleximod}/__init__.py | 0 {src/classes => git_fleximod}/cli.py | 1 - src/git-fleximod => git_fleximod/git-fleximod.py | 12 ++++++------ {src/classes => git_fleximod}/gitinterface.py | 3 +-- {src/classes => git_fleximod}/gitmodules.py | 2 +- {src/classes => git_fleximod}/lstripreader.py | 0 {src/classes => git_fleximod}/utils.py | 0 pyproject.toml | 16 ++++++---------- src/classes/__init__.py | 0 9 files changed, 14 insertions(+), 20 deletions(-) rename {src => git_fleximod}/__init__.py (100%) rename {src/classes => git_fleximod}/cli.py (99%) rename src/git-fleximod => git_fleximod/git-fleximod.py (98%) rename {src/classes => git_fleximod}/gitinterface.py (98%) rename {src/classes => git_fleximod}/gitmodules.py (98%) rename {src/classes => git_fleximod}/lstripreader.py (100%) rename {src/classes => git_fleximod}/utils.py (100%) delete mode 100644 src/classes/__init__.py diff --git a/src/__init__.py b/git_fleximod/__init__.py similarity index 100% rename from src/__init__.py rename to git_fleximod/__init__.py diff --git a/src/classes/cli.py b/git_fleximod/cli.py similarity index 99% rename from src/classes/cli.py rename to git_fleximod/cli.py index 65f70cdd34..822b314387 100644 --- a/src/classes/cli.py +++ b/git_fleximod/cli.py @@ -3,7 +3,6 @@ __version__ = "0.5.0" - def find_root_dir(filename=".git"): d = Path.cwd() root = Path(d.root) diff --git a/src/git-fleximod b/git_fleximod/git-fleximod.py similarity index 98% rename from src/git-fleximod rename to git_fleximod/git-fleximod.py index 385515819c..9e4969d6c1 100755 --- a/src/git-fleximod +++ b/git_fleximod/git-fleximod.py @@ -4,10 +4,10 @@ import shutil import logging import textwrap -from classes import utils -from classes import cli -from classes.gitinterface import GitInterface -from classes.gitmodules import GitModules +from git_fleximod import utils +from git_fleximod import cli +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules from configparser import NoOptionError # logger variable is global @@ -343,7 +343,7 @@ def submodules_test(gitmodules, root_dir): return testfails + localmods -def _main_func(): +def main(): ( root_dir, file_name, @@ -393,4 +393,4 @@ def _main_func(): if __name__ == "__main__": - sys.exit(_main_func()) + sys.exit(main()) diff --git a/src/classes/gitinterface.py b/git_fleximod/gitinterface.py similarity index 98% rename from src/classes/gitinterface.py rename to git_fleximod/gitinterface.py index 35c593a095..eff155d54d 100644 --- a/src/classes/gitinterface.py +++ b/git_fleximod/gitinterface.py @@ -1,6 +1,5 @@ import os -from classes import utils - +from . import utils class GitInterface: def __init__(self, repo_path, logger): diff --git a/src/classes/gitmodules.py b/git_fleximod/gitmodules.py similarity index 98% rename from src/classes/gitmodules.py rename to git_fleximod/gitmodules.py index dd2e1fc948..ae0ebe121c 100644 --- a/src/classes/gitmodules.py +++ b/git_fleximod/gitmodules.py @@ -1,7 +1,7 @@ import os import shutil from configparser import ConfigParser -from classes.lstripreader import LstripReader +from .lstripreader import LstripReader class GitModules(ConfigParser): diff --git a/src/classes/lstripreader.py b/git_fleximod/lstripreader.py similarity index 100% rename from src/classes/lstripreader.py rename to git_fleximod/lstripreader.py diff --git a/src/classes/utils.py b/git_fleximod/utils.py similarity index 100% rename from src/classes/utils.py rename to git_fleximod/utils.py diff --git a/pyproject.toml b/pyproject.toml index 8b4246e256..6661789359 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,16 +8,12 @@ license = "MIT" readme = "README.md" homepage = "https://github.com/jedwards4b/git-fleximod" keywords = ["git", "submodule", "sparse-checkout"] -packages = [ - { include = "src/git-fleximod" }, - { include = "src/classes/*.py" }, - { include = "License" }, - { include = "tests" }, - { include = "doc/*.rst" }, - { include = "doc/Makefile" }, - { include = "doc/make.bat" }, - { include = "doc/conf.py" }, -] + +[[tool.poetry.packages]] +include = "git_fleximod" + +[tool.poetry.scripts] +git-fleximod = "git_fleximod.git-fleximod:main" [tool.poetry.dependencies] python = "^3.8" diff --git a/src/classes/__init__.py b/src/classes/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 From 7b2a86dcfdcc039034c2ad2e2b69103ac5bf319f Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 4 Feb 2024 09:22:09 -0700 Subject: [PATCH 081/161] add a simple import test --- tests/test_import.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tests/test_import.py b/tests/test_import.py index becf751441..d5ca878de5 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -1,9 +1,8 @@ # pylint: disable=unused-import -from classes import utils -from classes.gitinterface import GitInterface -from classes.gitmodules import GitModules -from classes.version import __version__ - +from git_fleximod import cli +from git_fleximod import utils +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules def test_import(): print("here") From e7fdd39fd8df3b829196f276e21c7945fd71401e Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 4 Feb 2024 10:30:30 -0700 Subject: [PATCH 082/161] now includes man pages --- doc/index.rst | 2 +- pyproject.toml | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/doc/index.rst b/doc/index.rst index fa534d0d10..0f9c1a7f7e 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -11,7 +11,7 @@ Welcome to git-fleximod's documentation! :caption: Contents: .. module:: sphinxcontrib.autoprogram .. sphinx_argparse_cli:: - :module: classes.cli + :module: git_fleximod.cli :func: get_parser :prog: git-fleximod diff --git a/pyproject.toml b/pyproject.toml index 6661789359..17843e0b83 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,18 +8,23 @@ license = "MIT" readme = "README.md" homepage = "https://github.com/jedwards4b/git-fleximod" keywords = ["git", "submodule", "sparse-checkout"] - -[[tool.poetry.packages]] -include = "git_fleximod" +packages = [ +{ include = "git_fleximod"}, +{ include = "doc"}, +] [tool.poetry.scripts] git-fleximod = "git_fleximod.git-fleximod:main" +fsspec = "fsspec.fuse:main" [tool.poetry.dependencies] python = "^3.8" GitPython = "^3.1.0" +sphinx = "^5.0.0" +fsspec = "^2023.12.2" +wheel = "^0.42.0" -[tools.poetry.urls] +[tool.poetry.urls] "Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" [build-system] From 26739ada8bf21eda4aefabdf8219ade9e09e473e Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 5 Feb 2024 08:09:44 -0700 Subject: [PATCH 083/161] update tbump path --- tbump.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tbump.toml b/tbump.toml index 18e21a69f6..c3720e25d7 100644 --- a/tbump.toml +++ b/tbump.toml @@ -23,7 +23,7 @@ tag_template = "v{new_version}" # section containing the path of the file, relative to the # tbump.toml location. [[file]] -src = "src/classes/cli.py" +src = "git_fleximod/cli.py" [[file]] src = "pyproject.toml" From 25bcc7ea7ac8cf9e02c5dfbcd81f257c6f50b21a Mon Sep 17 00:00:00 2001 From: James Edwards Date: Mon, 5 Feb 2024 08:09:53 -0700 Subject: [PATCH 084/161] Bump to 0.6.0 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 822b314387..e1c6d2d5cd 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,7 +1,7 @@ from pathlib import Path import argparse -__version__ = "0.5.0" +__version__ = "0.6.0" def find_root_dir(filename=".git"): d = Path.cwd() diff --git a/pyproject.toml b/pyproject.toml index 17843e0b83..eabb7657e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.5.0" +version = "0.6.0" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index c3720e25d7..b41ec17e20 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.5.0" +current = "0.6.0" # Example of a semver regexp. # Make sure this matches current_version before From ed01cb7a0f07e1c79136b50f6448463a45538acb Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 5 Feb 2024 09:06:54 -0700 Subject: [PATCH 085/161] fix issue with hyphen --- .../{git-fleximod.py => git_fleximod.py} | 0 poetry.lock | 682 ++++++++++++++++++ pyproject.toml | 1 + 3 files changed, 683 insertions(+) rename git_fleximod/{git-fleximod.py => git_fleximod.py} (100%) create mode 100644 poetry.lock diff --git a/git_fleximod/git-fleximod.py b/git_fleximod/git_fleximod.py similarity index 100% rename from git_fleximod/git-fleximod.py rename to git_fleximod/git_fleximod.py diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000..6c31fac48a --- /dev/null +++ b/poetry.lock @@ -0,0 +1,682 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fsspec" +version = "2023.12.2" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.41" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.0.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "5.3.0" +description = "Python documentation generator" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "f20e29bada880bbb0aad20d2a7ac282d09c3e89d4aac06072cb82a9157f4023a" diff --git a/pyproject.toml b/pyproject.toml index eabb7657e5..c6ccff2624 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ GitPython = "^3.1.0" sphinx = "^5.0.0" fsspec = "^2023.12.2" wheel = "^0.42.0" +pytest = "^8.0.0" [tool.poetry.urls] "Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" From 69a5dc5ae30514b2cefe8ee827b6d2a5e4322db9 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 5 Feb 2024 09:07:10 -0700 Subject: [PATCH 086/161] Bump to 0.6.1 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index e1c6d2d5cd..39e83b97c0 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,7 +1,7 @@ from pathlib import Path import argparse -__version__ = "0.6.0" +__version__ = "0.6.1" def find_root_dir(filename=".git"): d = Path.cwd() diff --git a/pyproject.toml b/pyproject.toml index c6ccff2624..225f7b39b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.6.0" +version = "0.6.1" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index b41ec17e20..03b63f1540 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.6.0" +current = "0.6.1" # Example of a semver regexp. # Make sure this matches current_version before From f456ca059ccd6bfacd15dce2b302eafb6c09ce2c Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 6 Feb 2024 11:45:42 -0700 Subject: [PATCH 087/161] fix issue with sparse checkout dir path --- git_fleximod/git_fleximod.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 9e4969d6c1..59656eeb08 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -65,8 +65,8 @@ def commandline_arguments(args=None): def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): # first create the module directory - if not os.path.isdir(path): - os.makedirs(path) + if not os.path.isdir(os.path.join(root_dir,path)): + os.makedirs(os.path.join(root_dir,path)) # Check first if the module is already defined # and the sparse-checkout file exists git = GitInterface(root_dir, logger) From ac3e6ddc7f1fddc3066d81a61aa7a042683315cb Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Tue, 6 Feb 2024 13:12:28 -0700 Subject: [PATCH 088/161] Bump to 0.6.2 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 39e83b97c0..719b156206 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,7 +1,7 @@ from pathlib import Path import argparse -__version__ = "0.6.1" +__version__ = "0.6.2" def find_root_dir(filename=".git"): d = Path.cwd() diff --git a/pyproject.toml b/pyproject.toml index 225f7b39b0..f5828a58e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.6.1" +version = "0.6.2" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index 03b63f1540..583cef45ae 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.6.1" +current = "0.6.2" # Example of a semver regexp. # Make sure this matches current_version before From 96ac371975428ddebf7744bf9304f7496c66a959 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 9 Feb 2024 14:41:10 -0700 Subject: [PATCH 089/161] add tests --- git_fleximod/gitinterface.py | 11 +++++-- poetry.lock | 13 +++++++- pyproject.toml | 3 +- tests/{test_import.py => test_a_import.py} | 0 tests/test_checkout.py | 32 ++++++++++++++++++ tests/test_sparse_checkout.py | 38 ++++++++++++++++++++++ 6 files changed, 93 insertions(+), 4 deletions(-) rename tests/{test_import.py => test_a_import.py} (100%) create mode 100644 tests/test_checkout.py create mode 100644 tests/test_sparse_checkout.py diff --git a/git_fleximod/gitinterface.py b/git_fleximod/gitinterface.py index eff155d54d..4d4c4f4b9e 100644 --- a/git_fleximod/gitinterface.py +++ b/git_fleximod/gitinterface.py @@ -1,4 +1,5 @@ import os +import sys from . import utils class GitInterface: @@ -26,7 +27,10 @@ def __init__(self, repo_path, logger): def _git_command(self, operation, *args): self.logger.info(operation) if self._use_module and operation != "submodule": - return getattr(self.repo.git, operation)(*args) + try: + return getattr(self.repo.git, operation)(*args) + except Exception as e: + sys.exit(e) else: return ["git", "-C", self.repo_path, operation] + list(args) @@ -42,7 +46,10 @@ def git_operation(self, operation, *args, **kwargs): command = self._git_command(operation, *args) self.logger.info(command) if isinstance(command, list): - return utils.execute_subprocess(command, output_to_caller=True) + try: + return utils.execute_subprocess(command, output_to_caller=True) + except Exception as e: + sys.exit(e) else: return command diff --git a/poetry.lock b/poetry.lock index 6c31fac48a..b59ed3942c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -404,6 +404,17 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pyfakefs" +version = "5.3.5" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.3.5-py3-none-any.whl", hash = "sha256:751015c1de94e1390128c82b48cdedc3f088bbdbe4bc713c79d02a27f0f61e69"}, + {file = "pyfakefs-5.3.5.tar.gz", hash = "sha256:7cdc500b35a214cb7a614e1940543acc6650e69a94ac76e30f33c9373bd9cf90"}, +] + [[package]] name = "pygments" version = "2.17.2" @@ -679,4 +690,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "f20e29bada880bbb0aad20d2a7ac282d09c3e89d4aac06072cb82a9157f4023a" +content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d" diff --git a/pyproject.toml b/pyproject.toml index f5828a58e7..6d0fa42157 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ packages = [ ] [tool.poetry.scripts] -git-fleximod = "git_fleximod.git-fleximod:main" +git-fleximod = "git_fleximod.git_fleximod:main" fsspec = "fsspec.fuse:main" [tool.poetry.dependencies] @@ -24,6 +24,7 @@ sphinx = "^5.0.0" fsspec = "^2023.12.2" wheel = "^0.42.0" pytest = "^8.0.0" +pyfakefs = "^5.3.5" [tool.poetry.urls] "Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" diff --git a/tests/test_import.py b/tests/test_a_import.py similarity index 100% rename from tests/test_import.py rename to tests/test_a_import.py diff --git a/tests/test_checkout.py b/tests/test_checkout.py new file mode 100644 index 0000000000..791587628b --- /dev/null +++ b/tests/test_checkout.py @@ -0,0 +1,32 @@ +import pytest +from pathlib import Path + +def test_basic_checkout(git_fleximod, test_repo): + # Prepare a simple .gitmodules + + gitmodules_content = """ + [submodule "test_submodule"] + path = modules/test + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = T:T + """ + (test_repo / ".gitmodules").write_text(gitmodules_content) + + # Run the command + result = git_fleximod("checkout") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo / "modules/test").exists() # Did the submodule directory get created? + + status = git_fleximod("status") + + assert "test_submodule d82ce7c is out of sync with .gitmodules MPIserial_2.4.0" in status.stdout + + result = git_fleximod("update") + assert result.returncode == 0 + + status = git_fleximod("status") + assert "test_submodule at tag MPIserial_2.4.0" in status.stdout diff --git a/tests/test_sparse_checkout.py b/tests/test_sparse_checkout.py new file mode 100644 index 0000000000..0633802cb0 --- /dev/null +++ b/tests/test_sparse_checkout.py @@ -0,0 +1,38 @@ +import pytest +from shutil import rmtree +from pathlib import Path +from git_fleximod.gitinterface import GitInterface + +def test_sparse_checkout(git_fleximod, test_repo_base): + # Prepare a simple .gitmodules + gitmodules_content = (test_repo_base / ".gitmodules").read_text() + """ + [submodule "test_sparse_submodule"] + path = modules/sparse_test + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.5.0 + fxurl = https://github.com/ESMCI/mpi-serial.git + fxsparse = ../.sparse_file_list + """ + (test_repo_base / ".gitmodules").write_text(gitmodules_content) + + # Add the sparse checkout file + sparse_content = """m4 +""" + (test_repo_base / "modules" / ".sparse_file_list").write_text(sparse_content) + + result = git_fleximod("checkout") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo_base / "modules/sparse_test").exists() # Did the submodule directory get created? + assert Path(test_repo_base / "modules/sparse_test/m4").exists() # Did the submodule sparse directory get created? + assert not Path(test_repo_base / "modules/sparse_test/README").exists() # Did only the submodule sparse directory get created? + status = git_fleximod("status test_sparse_submodule") + + assert "test_sparse_submodule at tag MPIserial_2.5.0" in status.stdout + + result = git_fleximod("update") + assert result.returncode == 0 + + status = git_fleximod("status test_sparse_submodule") + assert "test_sparse_submodule at tag MPIserial_2.5.0" in status.stdout From 8e04947c985c02063c246d20ac5b1ef4ecd18fd4 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 9 Feb 2024 15:51:58 -0700 Subject: [PATCH 090/161] add pytest to workflow --- .github/workflows/pytest.yaml | 73 +++++++++++++++++++++++++++++++++++ 1 file changed, 73 insertions(+) create mode 100644 .github/workflows/pytest.yaml diff --git a/.github/workflows/pytest.yaml b/.github/workflows/pytest.yaml new file mode 100644 index 0000000000..8d2c2cb6ef --- /dev/null +++ b/.github/workflows/pytest.yaml @@ -0,0 +1,73 @@ +# Run this job on pushes to `main`, and for pull requests. If you don't specify +# `branches: [main], then this actions runs _twice_ on pull requests, which is +# annoying. + +on: + push: + branches: [main] + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and + # reference the matrixe python version here. + - uses: actions/setup-python@v5 + with: + python-version: '3.9' + + # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow + # from installing Poetry every time, which can be slow. Note the use of the Poetry version + # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache + # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be + # mildly cleaner by using an environment variable, but I don't really care. + - name: cache poetry install + uses: actions/cache@v4 + with: + path: ~/.local + key: poetry-1.7.1 + + # Install Poetry. You could do this manually, or there are several actions that do this. + # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to + # Poetry's default install script, which feels correct. I pin the Poetry version here + # because Poetry does occasionally change APIs between versions and I don't want my + # actions to break if it does. + # + # The key configuration value here is `virtualenvs-in-project: true`: this creates the + # venv as a `.venv` in your testing directory, which allows the next step to easily + # cache it. + - uses: snok/install-poetry@v1 + with: + version: 1.7.1 + virtualenvs-create: true + virtualenvs-in-project: true + + # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache + # key: if you're using multiple Python versions, or multiple OSes, you'd need to include + # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock. + - name: cache deps + id: cache-deps + uses: actions/cache@v4 + with: + path: .venv + key: pydeps-${{ hashFiles('**/poetry.lock') }} + + # Install dependencies. `--no-root` means "install all dependencies but not the project + # itself", which is what you want to avoid caching _your_ code. The `if` statement + # ensures this only runs on a cache miss. + - run: poetry install --no-interaction --no-root + if: steps.cache-deps.outputs.cache-hit != 'true' + + # Now install _your_ project. This isn't necessary for many types of projects -- particularly + # things like Django apps don't need this. But it's a good idea since it fully-exercises the + # pyproject.toml and makes that if you add things like console-scripts at some point that + # they'll be installed and working. + - run: poetry install --no-interaction + + # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` + # so this line is super-simple. But it could be as complex as you need. + - run: poetry run pytest + From c1aef37f7b1e0b30399ed20d093a0756e658b25c Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 9 Feb 2024 15:52:42 -0700 Subject: [PATCH 091/161] add pytest to workflow --- .github/workflows/pytest.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/pytest.yaml b/.github/workflows/pytest.yaml index 8d2c2cb6ef..4809a2787f 100644 --- a/.github/workflows/pytest.yaml +++ b/.github/workflows/pytest.yaml @@ -6,6 +6,7 @@ on: push: branches: [main] pull_request: + branches: [main] jobs: test: From e8b6d3cc382785d787ee5fc482a974ef0c1be2b8 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 9 Feb 2024 15:55:32 -0700 Subject: [PATCH 092/161] add pytest fixtures --- tests/conftest.py | 51 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 tests/conftest.py diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000..1cb059dac6 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,51 @@ +import pytest +from git_fleximod.gitinterface import GitInterface +import os +import subprocess +import logging +from pathlib import Path + +@pytest.fixture(scope='session') +def logger(): + logging.basicConfig( + level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()] + ) + logger = logging.getLogger(__name__) + return logger + +@pytest.fixture +def test_repo_base(tmp_path, logger): + test_dir = tmp_path / "test_repo" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + #subprocess.run(["git", "init"], cwd=test_dir) + assert test_dir.joinpath(".git").is_dir() + return test_dir + +@pytest.fixture(params=["modules/test"]) +def test_repo(request, test_repo_base, logger): + subrepo_path = request.param + gitp = GitInterface(str(test_repo_base), logger) + gitp.git_operation("submodule", "add", "--depth","1","--name","test_submodule", "https://github.com/ESMCI/mpi-serial.git", subrepo_path) + # subprocess.run( + assert test_repo_base.joinpath(".gitmodules").is_file() + return test_repo_base + +@pytest.fixture +def git_fleximod(test_repo): + def _run_fleximod(args, input=None): + cmd = ["git", "fleximod"] + args.split() + result = subprocess.run(cmd, cwd=test_repo, input=input, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True) + return result + return _run_fleximod + + +@pytest.fixture +def deinit_submodule(test_repo, logger): + def _deinit(submodule_path): + gitp = GitInterface(str(test_repo), logger) + gitp.git_operation( "submodule", "deinit", "-f", submodule_path) + yield _deinit From c1edd2c894d6f28903c70caa2ff5c7a6e892da26 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 11 Feb 2024 10:49:54 -0700 Subject: [PATCH 093/161] all working --- README.md | 18 +++++---- git_fleximod/git_fleximod.py | 7 ++-- pyproject.toml | 6 +++ tests/conftest.py | 73 +++++++++++++++++++++++++---------- tests/test_checkout.py | 54 ++++++++++++++------------ tests/test_sparse_checkout.py | 52 ++++++++++++------------- 6 files changed, 127 insertions(+), 83 deletions(-) diff --git a/README.md b/README.md index 7c968e70b0..259ba8b54f 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@ # git-fleximod -Flexible Submodule Management for Git +Flexible, Enhanced Submodule Management for Git ## Overview -Git-fleximod is a Python-based tool that extends Git's submodule capabilities, offering additional features for managing submodules in a more flexible and efficient way. +Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way. ## Installation @@ -30,10 +30,10 @@ Git-fleximod is a Python-based tool that extends Git's submodule capabilities, o fxtag: Specify a specific tag or branch to checkout for a submodule. fxrequired: Mark a submodule's checkout behavior, with allowed values: - - T:T: Top-level and required (checked out only when this is the Toplevel module). - - T:F: Top-level and optional (checked out with --optional flag if this is the Toplevel module). - - I:T: Internal and required (always checked out). - - I:F: Internal and optional (checked out with --optional flag). + - ToplevelOnlyRequired: Top-level and required (checked out only when this is the Toplevel module). + - ToplevelOnlyOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). + - AlwaysRequired: Always required (always checked out). + - AlwaysOptional: Always optional (checked out with --optional flag). fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. fxurl: This is the url used in the test subcommand to assure that protected branches do not point to forks @@ -71,6 +71,7 @@ Example .gitmodules entry: path = src/physics/cosp2/src url = https://github.com/CFMIP/COSPv2.0 fxsparse = ../.cosp_sparse_checkout + fxrequired = AlwaysRequired fxtag = v2.1.4cesm ``` Explanation: @@ -80,14 +81,15 @@ should be checked out into the directory src/physics/cosp2/src relative to the .gitmodules directory. It should be checked out from the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as described in the file ../.cosp_sparse_checkout relative to the path -directory. +directory. It should be checked out anytime this .gitmodules entry is +read. Additional example: ```ini, toml [submodule "cime"] path = cime url = https://github.com/jedwards4b/cime - fxrequired = T:T + fxrequired = ToplevelOnlyRequired fxtag = cime6.0.198_rme01 ``` diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 1b6c357b94..df76127b38 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -24,9 +24,9 @@ def commandline_arguments(args=None): # explicitly listing a component overrides the optional flag if options.optional or options.components: - fxrequired = ["T:T", "T:F", "I:T"] + fxrequired = ["ToplevelOnlyRequired", "ToplevelOnlyOptional", "AlwaysRequired", "AlwaysOptional"] else: - fxrequired = ["T:T", "I:T"] + fxrequired = ["ToplevelOnlyRequired", "AlwaysRequired"] action = options.action if not action: @@ -66,6 +66,7 @@ def commandline_arguments(args=None): def submodule_sparse_checkout( root_dir, name, url, path, sparsefile, tag="master", fxhash=None ): + logger.info("Called sparse_checkout for {}".format(name)) # first create the module directory if not os.path.isdir(os.path.join(root_dir,path)): os.makedirs(os.path.join(root_dir,path)) @@ -344,7 +345,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): url = gitmodules.get(name, "url") if fxrequired and fxrequired not in requiredlist: - if "T:F" == fxrequired: + if "Optional" in fxrequired: print("Skipping optional component {}".format(name)) continue diff --git a/pyproject.toml b/pyproject.toml index 6d0fa42157..772b154216 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,12 @@ pyfakefs = "^5.3.5" [tool.poetry.urls] "Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" +[tool.pytest.ini_options] +markers = [ + "skip_after_first: only run on first iteration" +] + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + diff --git a/tests/conftest.py b/tests/conftest.py index 1cb059dac6..d6fec44f3e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,39 +13,72 @@ def logger(): logger = logging.getLogger(__name__) return logger +@pytest.fixture(params=[ + {"subrepo_path": "modules/test", "submodule_name": "test_submodule", "gitmodules_content" : """ + [submodule "test_submodule"] + path = modules/test + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelOnlyRequired +"""}, + {"subrepo_path": "modules/test_optional", "submodule_name": "test_optional", "gitmodules_content": """ + [submodule "test_optional"] + path = modules/test_optional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelOnlyRequired +"""}, + {"subrepo_path": "modules/test_alwaysoptional", "submodule_name": "test_alwaysoptional", "gitmodules_content": """ + [submodule "test_alwaysoptional"] + path = modules/test_alwaysoptional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.3.0 + fxurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysOptional +"""}, + {"subrepo_path": "modules/test_sparse", "submodule_name": "test_sparse", "gitmodules_content": """ + [submodule "test_sparse"] + path = modules/test_sparse + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.5.0 + fxurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysRequired + fxsparse = ../.sparse_file_list +"""}, +]) + +def shared_repos(request): + return request.param + +@pytest.fixture +def test_repo(shared_repos, test_repo_base, logger): + subrepo_path = shared_repos["subrepo_path"] + submodule_name = shared_repos["submodule_name"] + + gitp = GitInterface(str(test_repo_base), logger) + gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) + assert test_repo_base.joinpath(".gitmodules").is_file() + return test_repo_base + @pytest.fixture def test_repo_base(tmp_path, logger): - test_dir = tmp_path / "test_repo" + test_dir = tmp_path / "testrepo" test_dir.mkdir() str_path = str(test_dir) gitp = GitInterface(str_path, logger) - #subprocess.run(["git", "init"], cwd=test_dir) assert test_dir.joinpath(".git").is_dir() + (test_dir / "modules").mkdir() return test_dir -@pytest.fixture(params=["modules/test"]) -def test_repo(request, test_repo_base, logger): - subrepo_path = request.param - gitp = GitInterface(str(test_repo_base), logger) - gitp.git_operation("submodule", "add", "--depth","1","--name","test_submodule", "https://github.com/ESMCI/mpi-serial.git", subrepo_path) - # subprocess.run( - assert test_repo_base.joinpath(".gitmodules").is_file() - return test_repo_base - @pytest.fixture -def git_fleximod(test_repo): +def git_fleximod(test_repo_base): def _run_fleximod(args, input=None): cmd = ["git", "fleximod"] + args.split() - result = subprocess.run(cmd, cwd=test_repo, input=input, + result = subprocess.run(cmd, cwd=test_repo_base, input=input, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) return result return _run_fleximod - -@pytest.fixture -def deinit_submodule(test_repo, logger): - def _deinit(submodule_path): - gitp = GitInterface(str(test_repo), logger) - gitp.git_operation( "submodule", "deinit", "-f", submodule_path) - yield _deinit diff --git a/tests/test_checkout.py b/tests/test_checkout.py index 791587628b..6d17ce18fd 100644 --- a/tests/test_checkout.py +++ b/tests/test_checkout.py @@ -1,32 +1,38 @@ import pytest from pathlib import Path -def test_basic_checkout(git_fleximod, test_repo): - # Prepare a simple .gitmodules - - gitmodules_content = """ - [submodule "test_submodule"] +@pytest.fixture(params=[ + {"subrepo_path": "modules/test", "submodule_name": "test_submodule", "gitmodules_content" : """ [submodule "test_submodule"] path = modules/test url = https://github.com/ESMCI/mpi-serial.git fxtag = MPIserial_2.4.0 fxurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = T:T - """ - (test_repo / ".gitmodules").write_text(gitmodules_content) + fxrequired = ToplevelOnlyRequired +"""}, +]) +def test_config(request): + return request.param + +def test_basic_checkout(git_fleximod, test_repo, test_config): + # Prepare a simple .gitmodules + gm = test_config['gitmodules_content'] + file_path = (test_repo / ".gitmodules") + if not file_path.exists(): + file_path.write_text(gm) - # Run the command - result = git_fleximod("checkout") - - # Assertions - assert result.returncode == 0 - assert Path(test_repo / "modules/test").exists() # Did the submodule directory get created? - - status = git_fleximod("status") - - assert "test_submodule d82ce7c is out of sync with .gitmodules MPIserial_2.4.0" in status.stdout - - result = git_fleximod("update") - assert result.returncode == 0 - - status = git_fleximod("status") - assert "test_submodule at tag MPIserial_2.4.0" in status.stdout + # Run the command + result = git_fleximod("checkout test_submodule") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo / "modules/test").exists() # Did the submodule directory get created? + + status = git_fleximod("status") + + assert "test_submodule d82ce7c is out of sync with .gitmodules MPIserial_2.4.0" in status.stdout + + result = git_fleximod("update") + assert result.returncode == 0 + + status = git_fleximod("status") + assert "test_submodule at tag MPIserial_2.4.0" in status.stdout diff --git a/tests/test_sparse_checkout.py b/tests/test_sparse_checkout.py index 0633802cb0..7276e18d35 100644 --- a/tests/test_sparse_checkout.py +++ b/tests/test_sparse_checkout.py @@ -3,36 +3,32 @@ from pathlib import Path from git_fleximod.gitinterface import GitInterface -def test_sparse_checkout(git_fleximod, test_repo_base): - # Prepare a simple .gitmodules - gitmodules_content = (test_repo_base / ".gitmodules").read_text() + """ - [submodule "test_sparse_submodule"] - path = modules/sparse_test - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.5.0 - fxurl = https://github.com/ESMCI/mpi-serial.git - fxsparse = ../.sparse_file_list - """ - (test_repo_base / ".gitmodules").write_text(gitmodules_content) +def test_sparse_checkout(shared_repos, git_fleximod, test_repo_base): + repo_name = shared_repos["submodule_name"] + if repo_name == "test_sparse": + gm = shared_repos["gitmodules_content"] + (test_repo_base / ".gitmodules").write_text(gm) - # Add the sparse checkout file - sparse_content = """m4 + # Add the sparse checkout file + sparse_content = """m4 """ - (test_repo_base / "modules" / ".sparse_file_list").write_text(sparse_content) - result = git_fleximod("checkout") - - # Assertions - assert result.returncode == 0 - assert Path(test_repo_base / "modules/sparse_test").exists() # Did the submodule directory get created? - assert Path(test_repo_base / "modules/sparse_test/m4").exists() # Did the submodule sparse directory get created? - assert not Path(test_repo_base / "modules/sparse_test/README").exists() # Did only the submodule sparse directory get created? - status = git_fleximod("status test_sparse_submodule") - - assert "test_sparse_submodule at tag MPIserial_2.5.0" in status.stdout + (test_repo_base / "modules" / ".sparse_file_list").write_text(sparse_content) - result = git_fleximod("update") - assert result.returncode == 0 + result = git_fleximod(f"checkout {repo_name}") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo_base / f"modules/{repo_name}").exists() # Did the submodule directory get created? + assert Path(test_repo_base / f"modules/{repo_name}/m4").exists() # Did the submodule sparse directory get created? + assert not Path(test_repo_base / f"modules/{repo_name}/README").exists() # Did only the submodule sparse directory get created? + status = git_fleximod(f"status {repo_name}") + + assert f"{repo_name} at tag MPIserial_2.5.0" in status.stdout + + result = git_fleximod(f"update {repo_name}") + assert result.returncode == 0 + + status = git_fleximod(f"status {repo_name}") + assert f"{repo_name} at tag MPIserial_2.5.0" in status.stdout - status = git_fleximod("status test_sparse_submodule") - assert "test_sparse_submodule at tag MPIserial_2.5.0" in status.stdout From 03156218c7ddf4387fd5a8b74b8550ddae7d08da Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 11 Feb 2024 11:47:20 -0700 Subject: [PATCH 094/161] clean up and combine tests --- tests/conftest.py | 50 +++++++++++++++++++++++----------- tests/test_checkout.py | 51 +++++++++++++++-------------------- tests/test_sparse_checkout.py | 34 ----------------------- 3 files changed, 57 insertions(+), 78 deletions(-) delete mode 100644 tests/test_sparse_checkout.py diff --git a/tests/conftest.py b/tests/conftest.py index d6fec44f3e..ffff68956d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -14,7 +14,11 @@ def logger(): return logger @pytest.fixture(params=[ - {"subrepo_path": "modules/test", "submodule_name": "test_submodule", "gitmodules_content" : """ + {"subrepo_path": "modules/test", + "submodule_name": "test_submodule", + "status1" : "test_submodule d82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_submodule at tag MPIserial_2.4.0", + "gitmodules_content" : """ [submodule "test_submodule"] path = modules/test url = https://github.com/ESMCI/mpi-serial.git @@ -22,7 +26,11 @@ def logger(): fxurl = https://github.com/ESMCI/mpi-serial.git fxrequired = ToplevelOnlyRequired """}, - {"subrepo_path": "modules/test_optional", "submodule_name": "test_optional", "gitmodules_content": """ + {"subrepo_path": "modules/test_optional", + "submodule_name": "test_optional", + "status1" : "test_optional d82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_optional at tag MPIserial_2.4.0", + "gitmodules_content": """ [submodule "test_optional"] path = modules/test_optional url = https://github.com/ESMCI/mpi-serial.git @@ -30,7 +38,11 @@ def logger(): fxurl = https://github.com/ESMCI/mpi-serial.git fxrequired = ToplevelOnlyRequired """}, - {"subrepo_path": "modules/test_alwaysoptional", "submodule_name": "test_alwaysoptional", "gitmodules_content": """ + {"subrepo_path": "modules/test_alwaysoptional", + "submodule_name": "test_alwaysoptional", + "status1" : "test_alwaysoptional d82ce7c is out of sync with .gitmodules MPIserial_2.3.0", + "status2" : "test_alwaysoptional at tag MPIserial_2.3.0", + "gitmodules_content": """ [submodule "test_alwaysoptional"] path = modules/test_alwaysoptional url = https://github.com/ESMCI/mpi-serial.git @@ -38,7 +50,11 @@ def logger(): fxurl = https://github.com/ESMCI/mpi-serial.git fxrequired = AlwaysOptional """}, - {"subrepo_path": "modules/test_sparse", "submodule_name": "test_sparse", "gitmodules_content": """ + {"subrepo_path": "modules/test_sparse", + "submodule_name": "test_sparse", + "status1" : "test_sparse at tag MPIserial_2.5.0", + "status2" : "test_sparse at tag MPIserial_2.5.0", + "gitmodules_content": """ [submodule "test_sparse"] path = modules/test_sparse url = https://github.com/ESMCI/mpi-serial.git @@ -53,30 +69,34 @@ def shared_repos(request): return request.param @pytest.fixture -def test_repo(shared_repos, test_repo_base, logger): +def test_repo(shared_repos, tmp_path, logger): subrepo_path = shared_repos["subrepo_path"] submodule_name = shared_repos["submodule_name"] - - gitp = GitInterface(str(test_repo_base), logger) - gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) - assert test_repo_base.joinpath(".gitmodules").is_file() - return test_repo_base - -@pytest.fixture -def test_repo_base(tmp_path, logger): test_dir = tmp_path / "testrepo" test_dir.mkdir() str_path = str(test_dir) gitp = GitInterface(str_path, logger) assert test_dir.joinpath(".git").is_dir() (test_dir / "modules").mkdir() + if "sparse" in submodule_name: + (test_dir / subrepo_path).mkdir() + # Add the sparse checkout file + sparse_content = """m4 +""" + (test_dir / "modules" / ".sparse_file_list").write_text(sparse_content) + else: + gitp = GitInterface(str(test_dir), logger) + gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) + assert test_dir.joinpath(".gitmodules").is_file() + + return test_dir @pytest.fixture -def git_fleximod(test_repo_base): +def git_fleximod(test_repo): def _run_fleximod(args, input=None): cmd = ["git", "fleximod"] + args.split() - result = subprocess.run(cmd, cwd=test_repo_base, input=input, + result = subprocess.run(cmd, cwd=test_repo, input=input, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) return result diff --git a/tests/test_checkout.py b/tests/test_checkout.py index 6d17ce18fd..38bc5f9b28 100644 --- a/tests/test_checkout.py +++ b/tests/test_checkout.py @@ -1,38 +1,31 @@ import pytest from pathlib import Path - -@pytest.fixture(params=[ - {"subrepo_path": "modules/test", "submodule_name": "test_submodule", "gitmodules_content" : """ [submodule "test_submodule"] - path = modules/test - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.4.0 - fxurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelOnlyRequired -"""}, -]) -def test_config(request): - return request.param -def test_basic_checkout(git_fleximod, test_repo, test_config): +def test_basic_checkout(git_fleximod, test_repo, shared_repos): # Prepare a simple .gitmodules - gm = test_config['gitmodules_content'] + gm = shared_repos['gitmodules_content'] file_path = (test_repo / ".gitmodules") - if not file_path.exists(): - file_path.write_text(gm) + repo_name = shared_repos["submodule_name"] + repo_path = shared_repos["subrepo_path"] + + file_path.write_text(gm) - # Run the command - result = git_fleximod("checkout test_submodule") - - # Assertions - assert result.returncode == 0 - assert Path(test_repo / "modules/test").exists() # Did the submodule directory get created? - - status = git_fleximod("status") + # Run the command + result = git_fleximod(f"checkout {repo_name}") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? + if "sparse" in repo_name: + assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? + assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? + + status = git_fleximod(f"status {repo_name}") - assert "test_submodule d82ce7c is out of sync with .gitmodules MPIserial_2.4.0" in status.stdout + assert shared_repos["status1"] in status.stdout - result = git_fleximod("update") - assert result.returncode == 0 + result = git_fleximod(f"update {repo_name}") + assert result.returncode == 0 - status = git_fleximod("status") - assert "test_submodule at tag MPIserial_2.4.0" in status.stdout + status = git_fleximod(f"status {repo_name}") + assert shared_repos["status2"] in status.stdout diff --git a/tests/test_sparse_checkout.py b/tests/test_sparse_checkout.py deleted file mode 100644 index 7276e18d35..0000000000 --- a/tests/test_sparse_checkout.py +++ /dev/null @@ -1,34 +0,0 @@ -import pytest -from shutil import rmtree -from pathlib import Path -from git_fleximod.gitinterface import GitInterface - -def test_sparse_checkout(shared_repos, git_fleximod, test_repo_base): - repo_name = shared_repos["submodule_name"] - if repo_name == "test_sparse": - gm = shared_repos["gitmodules_content"] - (test_repo_base / ".gitmodules").write_text(gm) - - # Add the sparse checkout file - sparse_content = """m4 -""" - - (test_repo_base / "modules" / ".sparse_file_list").write_text(sparse_content) - - result = git_fleximod(f"checkout {repo_name}") - - # Assertions - assert result.returncode == 0 - assert Path(test_repo_base / f"modules/{repo_name}").exists() # Did the submodule directory get created? - assert Path(test_repo_base / f"modules/{repo_name}/m4").exists() # Did the submodule sparse directory get created? - assert not Path(test_repo_base / f"modules/{repo_name}/README").exists() # Did only the submodule sparse directory get created? - status = git_fleximod(f"status {repo_name}") - - assert f"{repo_name} at tag MPIserial_2.5.0" in status.stdout - - result = git_fleximod(f"update {repo_name}") - assert result.returncode == 0 - - status = git_fleximod(f"status {repo_name}") - assert f"{repo_name} at tag MPIserial_2.5.0" in status.stdout - From 9602eb0b777c3e174027bc9ea121da907879bac6 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 11 Feb 2024 14:43:38 -0700 Subject: [PATCH 095/161] all tests passing --- tests/conftest.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index ffff68956d..a6ba850893 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,8 +16,10 @@ def logger(): @pytest.fixture(params=[ {"subrepo_path": "modules/test", "submodule_name": "test_submodule", - "status1" : "test_submodule d82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", "status2" : "test_submodule at tag MPIserial_2.4.0", + "status3" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status4" : "test_submodule at tag MPIserial_2.4.0", "gitmodules_content" : """ [submodule "test_submodule"] path = modules/test @@ -28,20 +30,24 @@ def logger(): """}, {"subrepo_path": "modules/test_optional", "submodule_name": "test_optional", - "status1" : "test_optional d82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", "status2" : "test_optional at tag MPIserial_2.4.0", + "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", + "status4" : "test_optional at tag MPIserial_2.4.0", "gitmodules_content": """ [submodule "test_optional"] path = modules/test_optional url = https://github.com/ESMCI/mpi-serial.git fxtag = MPIserial_2.4.0 fxurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelOnlyRequired + fxrequired = ToplevelOnlyOptional """}, {"subrepo_path": "modules/test_alwaysoptional", "submodule_name": "test_alwaysoptional", - "status1" : "test_alwaysoptional d82ce7c is out of sync with .gitmodules MPIserial_2.3.0", + "status1" : "test_alwaysoptional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.3.0", "status2" : "test_alwaysoptional at tag MPIserial_2.3.0", + "status3" : "test_alwaysoptional not checked out, aligned at tag MPIserial_2.3.0", + "status4" : "test_alwaysoptional at tag MPIserial_2.3.0", "gitmodules_content": """ [submodule "test_alwaysoptional"] path = modules/test_alwaysoptional @@ -54,6 +60,8 @@ def logger(): "submodule_name": "test_sparse", "status1" : "test_sparse at tag MPIserial_2.5.0", "status2" : "test_sparse at tag MPIserial_2.5.0", + "status3" : "test_sparse at tag MPIserial_2.5.0", + "status4" : "test_sparse at tag MPIserial_2.5.0", "gitmodules_content": """ [submodule "test_sparse"] path = modules/test_sparse @@ -84,13 +92,18 @@ def test_repo(shared_repos, tmp_path, logger): sparse_content = """m4 """ (test_dir / "modules" / ".sparse_file_list").write_text(sparse_content) + gitp.git_operation("add","modules/.sparse_file_list") else: gitp = GitInterface(str(test_dir), logger) gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) assert test_dir.joinpath(".gitmodules").is_file() + gitp.git_operation("add",subrepo_path) + gitp.git_operation("commit","-a","-m","\"add submod\"") + test_dir2 = tmp_path / "testrepo2" + gitp.git_operation("clone",test_dir,test_dir2) + return test_dir2 - - return test_dir + @pytest.fixture def git_fleximod(test_repo): From b5c4515d0e9d1a48de659f96da99e6a8f3c20ed5 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 11 Feb 2024 15:52:38 -0700 Subject: [PATCH 096/161] thought it was working?? --- tests/test_required.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 tests/test_required.py diff --git a/tests/test_required.py b/tests/test_required.py new file mode 100644 index 0000000000..0997db69f7 --- /dev/null +++ b/tests/test_required.py @@ -0,0 +1,33 @@ +import pytest +from pathlib import Path + +def test_required(git_fleximod, test_repo, shared_repos): + file_path = (test_repo / ".gitmodules") + gm = shared_repos["gitmodules_content"] + repo_name = shared_repos["submodule_name"] + + status = git_fleximod(f"status {repo_name}") + print(f"stdout is {status.stdout}") + if file_path.exists(): + with file_path.open("r") as f: + gitmodules_content = f.read() + # add the entry if it does not exist + if repo_name not in gitmodules_content: + file_path.write_text(gitmodules_content+gm) + # or if it is incomplete + elif gm not in gitmodules_content: + file_path.write_text(gm) + else: + file_path.write_text(gm) + status = git_fleximod(f"status {repo_name}") + result = git_fleximod("checkout") + assert result.returncode == 0 + status = git_fleximod(f"status {repo_name}") + assert shared_repos["status3"] in status.stdout + if "not checked out" in status.stdout: + status = git_fleximod(f"checkout {repo_name}") + assert result.returncode == 0 + status = git_fleximod(f"update {repo_name}") + assert result.returncode == 0 + status = git_fleximod(f"status {repo_name}") + assert shared_repos["status4"] in status.stdout From a3ce6116065af2e8e86bd3d7d6de06bc215a6e58 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 13 Feb 2024 15:50:55 -0700 Subject: [PATCH 097/161] test_complex is still a work in progress --- tests/conftest.py | 18 +++++++++++++----- tests/test_checkout.py | 2 +- tests/test_complex.py | 35 +++++++++++++++++++++++++++++++++++ tests/test_required.py | 11 ++++------- 4 files changed, 53 insertions(+), 13 deletions(-) create mode 100644 tests/test_complex.py diff --git a/tests/conftest.py b/tests/conftest.py index a6ba850893..9bd6a758b2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,7 +13,7 @@ def logger(): logger = logging.getLogger(__name__) return logger -@pytest.fixture(params=[ +all_repos=[ {"subrepo_path": "modules/test", "submodule_name": "test_submodule", "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", @@ -71,11 +71,21 @@ def logger(): fxrequired = AlwaysRequired fxsparse = ../.sparse_file_list """}, -]) +] +@pytest.fixture(params=all_repos) def shared_repos(request): return request.param +@pytest.fixture +def get_all_repos(): + return all_repos + +def write_sparse_checkout_file(fp): + sparse_content = """m4 +""" + fp.write_text(sparse_content) + @pytest.fixture def test_repo(shared_repos, tmp_path, logger): subrepo_path = shared_repos["subrepo_path"] @@ -89,9 +99,7 @@ def test_repo(shared_repos, tmp_path, logger): if "sparse" in submodule_name: (test_dir / subrepo_path).mkdir() # Add the sparse checkout file - sparse_content = """m4 -""" - (test_dir / "modules" / ".sparse_file_list").write_text(sparse_content) + write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list") gitp.git_operation("add","modules/.sparse_file_list") else: gitp = GitInterface(str(test_dir), logger) diff --git a/tests/test_checkout.py b/tests/test_checkout.py index 38bc5f9b28..fd8ffc1996 100644 --- a/tests/test_checkout.py +++ b/tests/test_checkout.py @@ -14,7 +14,7 @@ def test_basic_checkout(git_fleximod, test_repo, shared_repos): result = git_fleximod(f"checkout {repo_name}") # Assertions - assert result.returncode == 0 + assert result.returncode == 0 assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? if "sparse" in repo_name: assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? diff --git a/tests/test_complex.py b/tests/test_complex.py new file mode 100644 index 0000000000..628a92d837 --- /dev/null +++ b/tests/test_complex.py @@ -0,0 +1,35 @@ +import pytest +from pathlib import Path +from git_fleximod.gitinterface import GitInterface + +def test_complex_checkout(git_fleximod, get_all_repos, test_repo, request, logger): + gitp = None + for repo in get_all_repos: + repo_name = repo["submodule_name"] + gm = repo["gitmodules_content"] + if "shared_repos0" in request.node.name: + if not gitp: + gitp = GitInterface(str(test_repo), logger) + file_path = (test_repo / ".gitmodules") + if file_path.exists(): + with file_path.open("r") as f: + gitmodules_content = f.read() + print(f"content={gitmodules_content}") + print(f"gm={gm}") + # add the entry if it does not exist + if repo_name not in gitmodules_content: + file_path.write_text(gitmodules_content+gm) + # or if it is incomplete + elif gm not in gitmodules_content: + file_path.write_text(gm) + + else: + file_path.write_text(gm) + if "sparse" in repo_name: + print(f"writing sparse_file_list in {test_repo}") + write_sparse_checkout_file(test_repo / "modules" / ".sparse_file_list") + gitp.git_operation("add","modules/.sparse_file_list") + gitp.git_operation("commit","-a","-m","\"add submod\"") + + + assert(False) diff --git a/tests/test_required.py b/tests/test_required.py index 0997db69f7..b4d99c5642 100644 --- a/tests/test_required.py +++ b/tests/test_required.py @@ -5,17 +5,14 @@ def test_required(git_fleximod, test_repo, shared_repos): file_path = (test_repo / ".gitmodules") gm = shared_repos["gitmodules_content"] repo_name = shared_repos["submodule_name"] - - status = git_fleximod(f"status {repo_name}") - print(f"stdout is {status.stdout}") if file_path.exists(): with file_path.open("r") as f: gitmodules_content = f.read() # add the entry if it does not exist - if repo_name not in gitmodules_content: - file_path.write_text(gitmodules_content+gm) - # or if it is incomplete - elif gm not in gitmodules_content: + if repo_name not in gitmodules_content: + file_path.write_text(gitmodules_content+gm) + # or if it is incomplete + elif gm not in gitmodules_content: file_path.write_text(gm) else: file_path.write_text(gm) From 8af16188fe7c531f3e651000505c06751784e988 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Tue, 13 Feb 2024 16:08:33 -0700 Subject: [PATCH 098/161] update should checkout if needed --- git_fleximod/git_fleximod.py | 34 +++++++++++++++++-- ...arse_checkout.py => test_sparse_update.py} | 7 +--- tests/{test_checkout.py => test_update.py} | 7 +--- 3 files changed, 33 insertions(+), 15 deletions(-) rename tests/{test_sparse_checkout.py => test_sparse_update.py} (86%) rename tests/{test_checkout.py => test_update.py} (82%) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 1b6c357b94..593a8b3b22 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -275,7 +275,7 @@ def submodules_status(gitmodules, root_dir): return testfails, localmods -def submodules_update(gitmodules, root_dir, force): +def submodules_update(gitmodules, root_dir, requiredlist, force): _, localmods = submodules_status(gitmodules, root_dir) print("") if localmods and not force: @@ -289,6 +289,34 @@ def submodules_update(gitmodules, root_dir, force): path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") logger.info("name={} path={} url={} fxtag={}".format(name, path, url, fxtag)) + if not os.path.exists(os.path.join(path, ".git")): + fxrequired = gitmodules.get(name, "fxrequired") + fxsparse = gitmodules.get(name, "fxsparse") + + if fxrequired and fxrequired not in requiredlist: + if "T:F" == fxrequired: + print("Skipping optional component {}".format(name)) + continue + + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) + ) + submodule_sparse_checkout( + root_dir, name, url, path, fxsparse, tag=fxtag, fxhash=fxhash + ) + else: + logger.debug( + "Calling submodule_checkout({},{},{})".format(root_dir, name, path) + ) + + single_submodule_checkout( + root_dir, name, path, url=url, tag=fxtag, force=force, fxhash=fxhash + ) + + if os.path.exists(os.path.join(path, ".git")): submoddir = os.path.join(root_dir, path) with utils.pushd(submoddir): @@ -326,7 +354,7 @@ def submodules_update(gitmodules, root_dir, force): else: print(f"{name:>20} up to date.") - +# checkout is done by update if required so this function may be depricated def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): _, localmods = submodules_status(gitmodules, root_dir) print("") @@ -424,7 +452,7 @@ def main(): sys.exit("No submodule components found") retval = 0 if action == "update": - submodules_update(gitmodules, root_dir, force) + submodules_update(gitmodules, root_dir, fxrequired, force) elif action == "checkout": submodules_checkout(gitmodules, root_dir, fxrequired, force) elif action == "status": diff --git a/tests/test_sparse_checkout.py b/tests/test_sparse_update.py similarity index 86% rename from tests/test_sparse_checkout.py rename to tests/test_sparse_update.py index 0633802cb0..1e6ee83ba1 100644 --- a/tests/test_sparse_checkout.py +++ b/tests/test_sparse_update.py @@ -20,7 +20,7 @@ def test_sparse_checkout(git_fleximod, test_repo_base): """ (test_repo_base / "modules" / ".sparse_file_list").write_text(sparse_content) - result = git_fleximod("checkout") + result = git_fleximod("update") # Assertions assert result.returncode == 0 @@ -31,8 +31,3 @@ def test_sparse_checkout(git_fleximod, test_repo_base): assert "test_sparse_submodule at tag MPIserial_2.5.0" in status.stdout - result = git_fleximod("update") - assert result.returncode == 0 - - status = git_fleximod("status test_sparse_submodule") - assert "test_sparse_submodule at tag MPIserial_2.5.0" in status.stdout diff --git a/tests/test_checkout.py b/tests/test_update.py similarity index 82% rename from tests/test_checkout.py rename to tests/test_update.py index 791587628b..baaea76035 100644 --- a/tests/test_checkout.py +++ b/tests/test_update.py @@ -15,7 +15,7 @@ def test_basic_checkout(git_fleximod, test_repo): (test_repo / ".gitmodules").write_text(gitmodules_content) # Run the command - result = git_fleximod("checkout") + result = git_fleximod("update") # Assertions assert result.returncode == 0 @@ -23,10 +23,5 @@ def test_basic_checkout(git_fleximod, test_repo): status = git_fleximod("status") - assert "test_submodule d82ce7c is out of sync with .gitmodules MPIserial_2.4.0" in status.stdout - - result = git_fleximod("update") - assert result.returncode == 0 - status = git_fleximod("status") assert "test_submodule at tag MPIserial_2.4.0" in status.stdout From 7c47c05ffbd4d329e94ada655c9a467e8f968585 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 14 Feb 2024 09:18:43 -0700 Subject: [PATCH 099/161] restructure of tests working --- tests/conftest.py | 20 +++++++++++++++----- tests/test_b_update.py | 4 ++-- tests/test_c_required.py | 12 ++++-------- tests/test_d_complex.py | 34 +++------------------------------- 4 files changed, 24 insertions(+), 46 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 9bd6a758b2..fd3678f9ee 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -18,7 +18,7 @@ def logger(): "submodule_name": "test_submodule", "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", "status2" : "test_submodule at tag MPIserial_2.4.0", - "status3" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status3" : "test_submodule at tag MPIserial_2.4.0", "status4" : "test_submodule at tag MPIserial_2.4.0", "gitmodules_content" : """ [submodule "test_submodule"] @@ -84,6 +84,7 @@ def get_all_repos(): def write_sparse_checkout_file(fp): sparse_content = """m4 """ + print(f"writing sparse_file_list \n") fp.write_text(sparse_content) @pytest.fixture @@ -110,14 +111,23 @@ def test_repo(shared_repos, tmp_path, logger): test_dir2 = tmp_path / "testrepo2" gitp.git_operation("clone",test_dir,test_dir2) return test_dir2 - + +@pytest.fixture +def complex_repo(tmp_path, logger): + test_dir = tmp_path / "testcomplex" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test") + gitp.git_operation("fetch", "origin", "main") + return test_dir @pytest.fixture -def git_fleximod(test_repo): - def _run_fleximod(args, input=None): +def git_fleximod(): + def _run_fleximod(path, args, input=None): cmd = ["git", "fleximod"] + args.split() - result = subprocess.run(cmd, cwd=test_repo, input=input, + result = subprocess.run(cmd, cwd=path, input=input, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) return result diff --git a/tests/test_b_update.py b/tests/test_b_update.py index e03daa3974..159f1cfae0 100644 --- a/tests/test_b_update.py +++ b/tests/test_b_update.py @@ -11,7 +11,7 @@ def test_basic_checkout(git_fleximod, test_repo, shared_repos): file_path.write_text(gm) # Run the command - result = git_fleximod(f"update {repo_name}") + result = git_fleximod(test_repo, f"update {repo_name}") # Assertions assert result.returncode == 0 @@ -20,7 +20,7 @@ def test_basic_checkout(git_fleximod, test_repo, shared_repos): assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? - status = git_fleximod(f"status {repo_name}") + status = git_fleximod(test_repo, f"status {repo_name}") assert shared_repos["status2"] in status.stdout diff --git a/tests/test_c_required.py b/tests/test_c_required.py index b4d99c5642..d6cc040b8d 100644 --- a/tests/test_c_required.py +++ b/tests/test_c_required.py @@ -16,15 +16,11 @@ def test_required(git_fleximod, test_repo, shared_repos): file_path.write_text(gm) else: file_path.write_text(gm) - status = git_fleximod(f"status {repo_name}") - result = git_fleximod("checkout") + result = git_fleximod(test_repo, "update") assert result.returncode == 0 - status = git_fleximod(f"status {repo_name}") + status = git_fleximod(test_repo, f"status {repo_name}") assert shared_repos["status3"] in status.stdout - if "not checked out" in status.stdout: - status = git_fleximod(f"checkout {repo_name}") - assert result.returncode == 0 - status = git_fleximod(f"update {repo_name}") + status = git_fleximod(test_repo, f"update {repo_name}") assert result.returncode == 0 - status = git_fleximod(f"status {repo_name}") + status = git_fleximod(test_repo, f"status {repo_name}") assert shared_repos["status4"] in status.stdout diff --git a/tests/test_d_complex.py b/tests/test_d_complex.py index 628a92d837..2983b0193f 100644 --- a/tests/test_d_complex.py +++ b/tests/test_d_complex.py @@ -2,34 +2,6 @@ from pathlib import Path from git_fleximod.gitinterface import GitInterface -def test_complex_checkout(git_fleximod, get_all_repos, test_repo, request, logger): - gitp = None - for repo in get_all_repos: - repo_name = repo["submodule_name"] - gm = repo["gitmodules_content"] - if "shared_repos0" in request.node.name: - if not gitp: - gitp = GitInterface(str(test_repo), logger) - file_path = (test_repo / ".gitmodules") - if file_path.exists(): - with file_path.open("r") as f: - gitmodules_content = f.read() - print(f"content={gitmodules_content}") - print(f"gm={gm}") - # add the entry if it does not exist - if repo_name not in gitmodules_content: - file_path.write_text(gitmodules_content+gm) - # or if it is incomplete - elif gm not in gitmodules_content: - file_path.write_text(gm) - - else: - file_path.write_text(gm) - if "sparse" in repo_name: - print(f"writing sparse_file_list in {test_repo}") - write_sparse_checkout_file(test_repo / "modules" / ".sparse_file_list") - gitp.git_operation("add","modules/.sparse_file_list") - gitp.git_operation("commit","-a","-m","\"add submod\"") - - - assert(False) +def test_complex_checkout(git_fleximod, complex_repo, logger): + status = git_fleximod(complex_repo, "status") + print(status) From e6d4838ebeee5a7ce408aa098d9cfab0cd34d405 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 16 Feb 2024 17:40:26 -0700 Subject: [PATCH 100/161] all tests are passing and complete --- README.md | 6 +- git_fleximod/git_fleximod.py | 167 +++++++++++++++++++---------------- tests/conftest.py | 13 +-- tests/test_c_required.py | 4 + tests/test_d_complex.py | 62 ++++++++++++- 5 files changed, 167 insertions(+), 85 deletions(-) diff --git a/README.md b/README.md index 259ba8b54f..94d85c4150 100644 --- a/README.md +++ b/README.md @@ -30,8 +30,8 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec fxtag: Specify a specific tag or branch to checkout for a submodule. fxrequired: Mark a submodule's checkout behavior, with allowed values: - - ToplevelOnlyRequired: Top-level and required (checked out only when this is the Toplevel module). - - ToplevelOnlyOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). + - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module). + - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). - AlwaysRequired: Always required (always checked out). - AlwaysOptional: Always optional (checked out with --optional flag). fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. @@ -89,7 +89,7 @@ Additional example: [submodule "cime"] path = cime url = https://github.com/jedwards4b/cime - fxrequired = ToplevelOnlyRequired + fxrequired = ToplevelRequired fxtag = cime6.0.198_rme01 ``` diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index f81e713814..08f31fa778 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -12,7 +12,8 @@ # logger variable is global logger = None - +def fxrequired_allowed_values(): + return ['ToplevelRequired', 'ToplevelOptional', 'AlwaysRequired', 'AlwaysOptional'] def commandline_arguments(args=None): parser = cli.get_parser() @@ -24,9 +25,9 @@ def commandline_arguments(args=None): # explicitly listing a component overrides the optional flag if options.optional or options.components: - fxrequired = ["ToplevelOnlyRequired", "ToplevelOnlyOptional", "AlwaysRequired", "AlwaysOptional"] + fxrequired = ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] else: - fxrequired = ["ToplevelOnlyRequired", "AlwaysRequired"] + fxrequired = ["ToplevelRequired", "AlwaysRequired"] action = options.action if not action: @@ -67,12 +68,16 @@ def submodule_sparse_checkout( root_dir, name, url, path, sparsefile, tag="master", fxhash=None ): logger.info("Called sparse_checkout for {}".format(name)) + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + if superroot: + gitroot = superroot + else: + gitroot = root_dir + assert(os.path.isdir(os.path.join(gitroot,".git"))) # first create the module directory if not os.path.isdir(os.path.join(root_dir,path)): os.makedirs(os.path.join(root_dir,path)) - # Check first if the module is already defined - # and the sparse-checkout file exists - git = GitInterface(root_dir, logger) # initialize a new git repo and set the sparse checkout flag sprep_repo = os.path.join(root_dir, path) @@ -92,36 +97,39 @@ def submodule_sparse_checkout( sprepo_git.config_set_value("core", "sparseCheckout", "true") # set the repository remote - sprepo_git.git_operation("remote", "add", "origin", url) - superroot = git.git_operation("rev-parse", "--show-superproject-working-tree") - if os.path.isfile(os.path.join(root_dir, ".git")): + logger.info("Setting remote origin in {}/{}".format(root_dir,path)) + status = sprepo_git.git_operation("remote", "-v") + if url not in status: + sprepo_git.git_operation("remote", "add", "origin", url) + + topgit = os.path.join(gitroot,".git") + + if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): with open(os.path.join(root_dir, ".git")) as f: - gitpath = os.path.abspath(os.path.join(root_dir, f.read().split()[1])) - topgit = os.path.abspath(os.path.join(gitpath, "modules")) + gitpath = os.path.relpath(os.path.join(root_dir, f.read().split()[1]), start=os.path.join(root_dir,path)) + topgit = os.path.join(gitpath, "modules") else: - topgit = os.path.abspath(os.path.join(root_dir, ".git", "modules")) - - if not os.path.isdir(topgit): - os.makedirs(topgit) - topgit = os.path.join(topgit, name) - logger.debug( - "root_dir is {} topgit is {} superroot is {}".format( - root_dir, topgit, superroot - ) - ) - + topgit = os.path.relpath(os.path.join(root_dir, ".git", "modules"), start=os.path.join(root_dir,path)) + + with utils.pushd(sprep_repo): + if not os.path.isdir(topgit): + os.makedirs(topgit) + topgit += os.sep + name + if os.path.isdir(os.path.join(root_dir, path, ".git")): - shutil.move(os.path.join(root_dir, path, ".git"), topgit) - with open(os.path.join(root_dir, path, ".git"), "w") as f: - f.write("gitdir: " + os.path.relpath(topgit, os.path.join(root_dir, path))) - - gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) - if os.path.isfile(gitsparse): - logger.warning("submodule {} is already initialized".format(name)) - return - - shutil.copy(os.path.join(root_dir, path, sparsefile), gitsparse) + with utils.pushd(sprep_repo): + shutil.move(".git", topgit) + with open(".git", "w") as f: + f.write("gitdir: " + os.path.relpath(topgit)) + # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) + gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) + if os.path.isfile(gitsparse): + logger.warning("submodule {} is already initialized {}".format(name, topgit)) + return + + with utils.pushd(sprep_repo): + shutil.copy(sparsefile, gitsparse) # Finally checkout the repo if fxhash: @@ -132,28 +140,31 @@ def submodule_sparse_checkout( sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") sprepo_git.git_operation("checkout", tag) print(f"Successfully checked out {name:>20} at {tag}") - + rgit.config_set_value(f'submodule "{name}"',"active","true") + rgit.config_set_value(f'submodule "{name}"',"url",url) def single_submodule_checkout( - root, name, path, url=None, tag=None, force=False, fxhash=None + root, name, path, url=None, tag=None, force=False, fxhash=None, optional=False ): git = GitInterface(root, logger) repodir = os.path.join(root, path) - if os.path.exists(os.path.join(repodir, ".git")): - logger.info("Submodule {} already checked out".format(name)) - return + logger.info("Checkout {} into {}/{}".format(name,root,path)) # if url is provided update to the new url tmpurl = None + repo_exists = False + if os.path.exists(os.path.join(repodir, ".git")): + logger.info("Submodule {} already checked out".format(name)) + repo_exists = True # Look for a .gitmodules file in the newly checkedout repo - if url: + if not repo_exists and url: # ssh urls cause problems for those who dont have git accounts with ssh keys defined # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was # opened with a GitModules object we don't need to worry about restoring the file here # it will be done by the GitModules class if url.startswith("git@"): tmpurl = url - url = url.replace("git@github.com:", "https://github.com") + url = url.replace("git@github.com:", "https://github.com/") git.git_operation("clone", url, path) smgit = GitInterface(repodir, logger) if not tag and not fxhash: @@ -170,26 +181,26 @@ def single_submodule_checkout( if line.startswith("gitdir: "): rootdotgit = line[8:].rstrip() - newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", path)) - if not os.path.isdir(os.path.join(newpath, os.pardir)): - os.makedirs(os.path.abspath(os.path.join(newpath, os.pardir))) - + newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) shutil.move(os.path.join(repodir, ".git"), newpath) with open(os.path.join(repodir, ".git"), "w") as f: - f.write("gitdir: " + newpath) + f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) - if not tmpurl: + if not repo_exists or not tmpurl: logger.debug(git.git_operation("submodule", "update", "--init", "--", path)) if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout print(f"Recursively checking out submodules of {name} {repodir} {url}") gitmodules = GitModules(logger, confpath=repodir) - submodules_checkout(gitmodules, repodir, ["I:T"], force=force) + requiredlist = ["AlwaysRequired"] + if optional: + requiredlist.append("AlwaysOptional") + submodules_checkout(gitmodules, repodir, requiredlist, force=force) if os.path.exists(os.path.join(repodir, ".git")): - print(f"Successfully checked out {name}") + print(f"Successfully checked out {name} {repodir}") else: - utils.fatal_error(f"Failed to checkout {name}") + utils.fatal_error(f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}") if tmpurl: print(git.git_operation("restore", ".gitmodules")) @@ -289,33 +300,37 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): fxhash = gitmodules.get(name, "fxhash") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") - logger.info("name={} path={} url={} fxtag={}".format(name, path, url, fxtag)) - if not os.path.exists(os.path.join(path, ".git")): - fxrequired = gitmodules.get(name, "fxrequired") - fxsparse = gitmodules.get(name, "fxsparse") - - if fxrequired and fxrequired not in requiredlist: - if "T:F" == fxrequired: - print("Skipping optional component {}".format(name)) - continue - - if fxsparse: - logger.debug( - "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( - root_dir, name, url, path, fxsparse, fxtag - ) - ) - submodule_sparse_checkout( - root_dir, name, url, path, fxsparse, tag=fxtag, fxhash=fxhash - ) - else: - logger.debug( - "Calling submodule_checkout({},{},{})".format(root_dir, name, path) + logger.info("name={} path={} url={} fxtag={} requiredlist={}".format(name,os.path.join(root_dir, path), url, fxtag, requiredlist)) +# if not os.path.exists(os.path.join(root_dir,path, ".git")): + fxrequired = gitmodules.get(name, "fxrequired") + assert(fxrequired in fxrequired_allowed_values()) + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + + fxsparse = gitmodules.get(name, "fxsparse") + + if fxrequired and (superroot and "Toplevel" in fxrequired) or fxrequired not in requiredlist: + if "ToplevelOptional" == fxrequired: + print("Skipping optional component {}".format(name)) + continue + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag ) + ) + submodule_sparse_checkout( + root_dir, name, url, path, fxsparse, tag=fxtag, fxhash=fxhash + ) + else: + logger.info( + "Calling submodule_checkout({},{},{},{})".format(root_dir, name, path,url) + ) - single_submodule_checkout( - root_dir, name, path, url=url, tag=fxtag, force=force, fxhash=fxhash - ) + single_submodule_checkout( + root_dir, name, path, url=url, tag=fxtag, force=force, + fxhash=fxhash, optional=("AlwaysOptional" in requiredlist) + ) if os.path.exists(os.path.join(path, ".git")): @@ -371,7 +386,6 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): fxhash = gitmodules.get(name, "fxhash") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") - if fxrequired and fxrequired not in requiredlist: if "Optional" in fxrequired: print("Skipping optional component {}".format(name)) @@ -392,7 +406,8 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): ) single_submodule_checkout( - root_dir, name, path, url=url, tag=fxtag, force=force, fxhash=fxhash + root_dir, name, path, url=url, tag=fxtag, force=force, fxhash=fxhash, + optional = "AlwaysOptional" in requiredlist ) @@ -441,7 +456,7 @@ def main(): ) root_dir = os.path.dirname(file_path) - logger.info("root_dir is {}".format(root_dir)) + logger.info("root_dir is {} includelist={} excludelist={}".format(root_dir, includelist, excludelist)) gitmodules = GitModules( logger, confpath=root_dir, diff --git a/tests/conftest.py b/tests/conftest.py index fd3678f9ee..b88fa61d27 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -26,7 +26,7 @@ def logger(): url = https://github.com/ESMCI/mpi-serial.git fxtag = MPIserial_2.4.0 fxurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelOnlyRequired + fxrequired = ToplevelRequired """}, {"subrepo_path": "modules/test_optional", "submodule_name": "test_optional", @@ -35,12 +35,12 @@ def logger(): "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", "status4" : "test_optional at tag MPIserial_2.4.0", "gitmodules_content": """ - [submodule "test_optional"] + [submodule "test_optional"] path = modules/test_optional url = https://github.com/ESMCI/mpi-serial.git fxtag = MPIserial_2.4.0 fxurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelOnlyOptional + fxrequired = ToplevelOptional """}, {"subrepo_path": "modules/test_alwaysoptional", "submodule_name": "test_alwaysoptional", @@ -84,7 +84,6 @@ def get_all_repos(): def write_sparse_checkout_file(fp): sparse_content = """m4 """ - print(f"writing sparse_file_list \n") fp.write_text(sparse_content) @pytest.fixture @@ -119,8 +118,9 @@ def complex_repo(tmp_path, logger): test_dir.mkdir() str_path = str(test_dir) gitp = GitInterface(str_path, logger) - gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test") + gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") gitp.git_operation("fetch", "origin", "main") + gitp.git_operation("checkout", "main") return test_dir @pytest.fixture @@ -130,6 +130,9 @@ def _run_fleximod(path, args, input=None): result = subprocess.run(cmd, cwd=path, input=input, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) + if result.returncode: + print(result.stdout) + print(result.stderr) return result return _run_fleximod diff --git a/tests/test_c_required.py b/tests/test_c_required.py index d6cc040b8d..89ab8d294d 100644 --- a/tests/test_c_required.py +++ b/tests/test_c_required.py @@ -20,6 +20,10 @@ def test_required(git_fleximod, test_repo, shared_repos): assert result.returncode == 0 status = git_fleximod(test_repo, f"status {repo_name}") assert shared_repos["status3"] in status.stdout + status = git_fleximod(test_repo, f"update --optional") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout status = git_fleximod(test_repo, f"update {repo_name}") assert result.returncode == 0 status = git_fleximod(test_repo, f"status {repo_name}") diff --git a/tests/test_d_complex.py b/tests/test_d_complex.py index 2983b0193f..fdce516274 100644 --- a/tests/test_d_complex.py +++ b/tests/test_d_complex.py @@ -4,4 +4,64 @@ def test_complex_checkout(git_fleximod, complex_repo, logger): status = git_fleximod(complex_repo, "status") - print(status) + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) + assert("Complex not checked out, aligned at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + # This should checkout and update test_submodule and complex_sub + result = git_fleximod(complex_repo, "update") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex") + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + # update a single optional submodule + + result = git_fleximod(complex_repo, "update ToplevelOptional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + + # Finally update optional + result = git_fleximod(complex_repo, "update --optional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex" ) + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + From 194d845ad2131c7ce50e1f5a14fc5476799c87fb Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 17 Feb 2024 08:08:57 -0700 Subject: [PATCH 101/161] add git user info to tests --- .github/workflows/pytest.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yaml b/.github/workflows/pytest.yaml index 4809a2787f..0868dd9a33 100644 --- a/.github/workflows/pytest.yaml +++ b/.github/workflows/pytest.yaml @@ -70,5 +70,8 @@ jobs: # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` # so this line is super-simple. But it could be as complex as you need. - - run: poetry run pytest + - run: | + git config --global user.name "${GITHUB_ACTOR}" + git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" + poetry run pytest From d64c5c6dd311364cc98ffc5ca39be425437e377c Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 17 Feb 2024 08:14:37 -0700 Subject: [PATCH 102/161] remove checkout user option --- git_fleximod/cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 719b156206..80dbd58d5a 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -25,11 +25,11 @@ def get_parser(): # # user options # - choices = ["update", "checkout", "status", "test"] + choices = ["update", "status", "test"] parser.add_argument( "action", choices=choices, - default="checkout", + default="update", help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", ) From 6bac7946376f7ae85511b4ed02ad4881f9e0440d Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sat, 17 Feb 2024 09:43:54 -0700 Subject: [PATCH 103/161] remove fxhash, putting hash in tag works fine --- git_fleximod/git_fleximod.py | 87 +++++++++++------------------------- tests/conftest.py | 10 ++--- 2 files changed, 32 insertions(+), 65 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 08f31fa778..7a4470e246 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -31,7 +31,7 @@ def commandline_arguments(args=None): action = options.action if not action: - action = "checkout" + action = "update" handlers = [logging.StreamHandler()] if options.debug: @@ -65,7 +65,7 @@ def commandline_arguments(args=None): def submodule_sparse_checkout( - root_dir, name, url, path, sparsefile, tag="master", fxhash=None + root_dir, name, url, path, sparsefile, tag="master" ): logger.info("Called sparse_checkout for {}".format(name)) rgit = GitInterface(root_dir, logger) @@ -132,19 +132,14 @@ def submodule_sparse_checkout( shutil.copy(sparsefile, gitsparse) # Finally checkout the repo - if fxhash: - sprepo_git.git_operation("fetch", "origin", "--tags") - sprepo_git.git_operation("checkout", fxhash) - print(f"Successfully checked out {name:>20} at {fxhash}") - else: - sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") - sprepo_git.git_operation("checkout", tag) - print(f"Successfully checked out {name:>20} at {tag}") + sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") + sprepo_git.git_operation("checkout", tag) + print(f"Successfully checked out {name:>20} at {tag}") rgit.config_set_value(f'submodule "{name}"',"active","true") rgit.config_set_value(f'submodule "{name}"',"url",url) def single_submodule_checkout( - root, name, path, url=None, tag=None, force=False, fxhash=None, optional=False + root, name, path, url=None, tag=None, force=False, optional=False ): git = GitInterface(root, logger) repodir = os.path.join(root, path) @@ -167,12 +162,9 @@ def single_submodule_checkout( url = url.replace("git@github.com:", "https://github.com/") git.git_operation("clone", url, path) smgit = GitInterface(repodir, logger) - if not tag and not fxhash: + if not tag: tag = smgit.git_operation("describe", "--tags", "--always").rstrip() - if fxhash: - smgit.git_operation("checkout", fxhash) - else: - smgit.git_operation("checkout", tag) + smgit.git_operation("checkout", tag) # Now need to move the .git dir to the submodule location rootdotgit = os.path.join(root, ".git") if os.path.isfile(rootdotgit): @@ -214,9 +206,6 @@ def submodules_status(gitmodules, root_dir): for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") - fxhash = gitmodules.get(name, "fxhash") - if tag and fxhash: - utils.fatal_error(f"{name:>20} cannot have both fxtag and fxhash") if not path: utils.fatal_error("No path found in .gitmodules for {}".format(name)) newpath = os.path.join(root_dir, path) @@ -234,48 +223,33 @@ def submodules_status(gitmodules, root_dir): if tag and tag == atag: print(f"e {name:>20} not checked out, aligned at tag {tag}") elif tag: - print( - f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" - ) - testfails += 1 - elif fxhash: - n = len(fxhash) - smhash = rootgit.git_operation( - "ls-tree", "--object-only", f"--abbrev={n}", "HEAD", path - ) - if smhash == fxhash: - print(f" {name:>20} not checked out, aligned at hash {fxhash}") + ahash = rootgit.git_operation("submodule", "status", "{}".format(path)).rstrip() + ahash = ahash[1:len(tag)+1] + if tag == ahash: + print(f"e {name:>20} not checked out, aligned at hash {ahash}") else: print( - f"s {name:>20} not checked out, out of sync at hash {smhash}, expected hash is {fxhash}" + f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" ) testfails += 1 else: - print(f"e {name:>20} has no fxtag nor fxhash defined in .gitmodules") + print(f"e {name:>20} has no fxtag defined in .gitmodules") testfails += 1 else: with utils.pushd(newpath): git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() ahash = git.git_operation("status").partition("\n")[0].split()[-1] - if tag and atag != tag: - print(f"s {name:>20} {atag} is out of sync with .gitmodules {tag}") - testfails += 1 - elif tag: + if tag and atag == tag: print(f" {name:>20} at tag {tag}") - elif fxhash: - rootgit = GitInterface(root_dir, logger) - n = len(fxhash) - if ahash.startswith(fxhash): - print(f" {name:>20} at hash {fxhash}") - else: - print( - f"s {name:>20} {ahash} is out of sync with .gitmodules {fxhash}" - ) - testfails += 1 + elif tag and ahash == tag: + print(f" {name:>20} at hash {ahash}") + elif tag: + print(f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}") + testfails += 1 else: print( - f"e {name:>20} has no fxtag nor fxhash defined in .gitmodules, module at {atag}" + f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}" ) testfails += 1 @@ -297,7 +271,6 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): return for name in gitmodules.sections(): fxtag = gitmodules.get(name, "fxtag") - fxhash = gitmodules.get(name, "fxhash") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") logger.info("name={} path={} url={} fxtag={} requiredlist={}".format(name,os.path.join(root_dir, path), url, fxtag, requiredlist)) @@ -320,7 +293,7 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): ) ) submodule_sparse_checkout( - root_dir, name, url, path, fxsparse, tag=fxtag, fxhash=fxhash + root_dir, name, url, path, fxsparse, tag=fxtag ) else: logger.info( @@ -329,7 +302,7 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): single_submodule_checkout( root_dir, name, path, url=url, tag=fxtag, force=force, - fxhash=fxhash, optional=("AlwaysOptional" in requiredlist) + optional=("AlwaysOptional" in requiredlist) ) @@ -362,11 +335,8 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): if fxtag and fxtag != atag: print(f"{name:>20} updated to {fxtag}") git.git_operation("checkout", fxtag) - elif fxhash: - print(f"{name:>20} updated to {fxhash}") - git.git_operation("checkout", fxhash) - elif not (fxtag or fxhash): - print(f"No fxtag nor fxhash found for submodule {name:>20}") + elif not fxtag: + print(f"No fxtag found for submodule {name:>20}") else: print(f"{name:>20} up to date.") @@ -383,7 +353,6 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): fxrequired = gitmodules.get(name, "fxrequired") fxsparse = gitmodules.get(name, "fxsparse") fxtag = gitmodules.get(name, "fxtag") - fxhash = gitmodules.get(name, "fxhash") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") if fxrequired and fxrequired not in requiredlist: @@ -398,7 +367,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): ) ) submodule_sparse_checkout( - root_dir, name, url, path, fxsparse, tag=fxtag, fxhash=fxhash + root_dir, name, url, path, fxsparse, tag=fxtag ) else: logger.debug( @@ -406,7 +375,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): ) single_submodule_checkout( - root_dir, name, path, url=url, tag=fxtag, force=force, fxhash=fxhash, + root_dir, name, path, url=url, tag=fxtag, force=force, optional = "AlwaysOptional" in requiredlist ) @@ -469,8 +438,6 @@ def main(): retval = 0 if action == "update": submodules_update(gitmodules, root_dir, fxrequired, force) - elif action == "checkout": - submodules_checkout(gitmodules, root_dir, fxrequired, force) elif action == "status": submodules_status(gitmodules, root_dir) elif action == "test": diff --git a/tests/conftest.py b/tests/conftest.py index b88fa61d27..5fed907c4f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -44,15 +44,15 @@ def logger(): """}, {"subrepo_path": "modules/test_alwaysoptional", "submodule_name": "test_alwaysoptional", - "status1" : "test_alwaysoptional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.3.0", - "status2" : "test_alwaysoptional at tag MPIserial_2.3.0", - "status3" : "test_alwaysoptional not checked out, aligned at tag MPIserial_2.3.0", - "status4" : "test_alwaysoptional at tag MPIserial_2.3.0", + "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", + "status2" : "test_alwaysoptional at hash e5cf35c", + "status3" : "test_alwaysoptional not checked out, out of sync at tag MPIserial_2.3.0", + "status4" : "test_alwaysoptional at hash e5cf35c", "gitmodules_content": """ [submodule "test_alwaysoptional"] path = modules/test_alwaysoptional url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.3.0 + fxtag = e5cf35c fxurl = https://github.com/ESMCI/mpi-serial.git fxrequired = AlwaysOptional """}, From 678bae800ca818a0dd458dbc39334007da8f973d Mon Sep 17 00:00:00 2001 From: James Edwards Date: Sun, 18 Feb 2024 10:09:05 -0700 Subject: [PATCH 104/161] update version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 772b154216..e8c7bd58ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.6.2" +version = "0.7.0" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] From 8d0ed18d3a7f30048aa4b302eb607e27bf1d45ff Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 22 Feb 2024 09:25:59 -0700 Subject: [PATCH 105/161] add translater from manage_externals --- git_fleximod/gitinterface.py | 22 ++-- git_fleximod/gitmodules.py | 39 +++--- git_fleximod/metoflexi.py | 242 +++++++++++++++++++++++++++++++++++ pyproject.toml | 1 + 4 files changed, 281 insertions(+), 23 deletions(-) create mode 100755 git_fleximod/metoflexi.py diff --git a/git_fleximod/gitinterface.py b/git_fleximod/gitinterface.py index 4d4c4f4b9e..203c500304 100644 --- a/git_fleximod/gitinterface.py +++ b/git_fleximod/gitinterface.py @@ -1,25 +1,31 @@ import os import sys from . import utils +from pathlib import Path class GitInterface: def __init__(self, repo_path, logger): logger.debug("Initialize GitInterface for {}".format(repo_path)) - self.repo_path = repo_path + if isinstance(repo_path, str): + self.repo_path = Path(repo_path).resolve() + elif isinstance(repo_path, Path): + self.repo_path = repo_path.resolve() + else: + raise TypeError("repo_path must be a str or Path object") self.logger = logger try: import git self._use_module = True try: - self.repo = git.Repo(repo_path) # Initialize GitPython repo + self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo except git.exc.InvalidGitRepositoryError: self.git = git self._init_git_repo() msg = "Using GitPython interface to git" except ImportError: self._use_module = False - if not os.path.exists(os.path.join(repo_path, ".git")): + if not (repo_path / ".git").exists(): self._init_git_repo() msg = "Using shell interface to git" self.logger.info(msg) @@ -32,13 +38,13 @@ def _git_command(self, operation, *args): except Exception as e: sys.exit(e) else: - return ["git", "-C", self.repo_path, operation] + list(args) + return ["git", "-C", str(self.repo_path), operation] + list(args) def _init_git_repo(self): if self._use_module: - self.repo = self.git.Repo.init(self.repo_path) + self.repo = self.git.Repo.init(str(self.repo_path)) else: - command = ("git", "-C", self.repo_path, "init") + command = ("git", "-C", str(self.repo_path), "init") utils.execute_subprocess(command) # pylint: disable=unused-argument @@ -58,7 +64,7 @@ def config_get_value(self, section, name): config = self.repo.config_reader() return config.get_value(section, name) else: - cmd = ("git", "-C", self.repo_path, "config", "--get", f"{section}.{name}") + cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}") output = utils.execute_subprocess(cmd, output_to_caller=True) return output.strip() @@ -68,6 +74,6 @@ def config_set_value(self, section, name, value): writer.set_value(section, name, value) writer.release() # Ensure changes are saved else: - cmd = ("git", "-C", self.repo_path, "config", f"{section}.{name}", value) + cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value) self.logger.info(cmd) utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/git_fleximod/gitmodules.py b/git_fleximod/gitmodules.py index ae0ebe121c..68c82d066f 100644 --- a/git_fleximod/gitmodules.py +++ b/git_fleximod/gitmodules.py @@ -1,14 +1,14 @@ -import os import shutil -from configparser import ConfigParser +from pathlib import Path +from configparser import RawConfigParser, ConfigParser from .lstripreader import LstripReader -class GitModules(ConfigParser): +class GitModules(RawConfigParser): def __init__( self, logger, - confpath=os.getcwd(), + confpath=Path.cwd(), conffile=".gitmodules", includelist=None, excludelist=None, @@ -25,25 +25,32 @@ def __init__( confpath, conffile, includelist, excludelist ) ) - ConfigParser.__init__(self) - self.conf_file = os.path.join(confpath, conffile) - # first create a backup of this file to be restored on deletion of the object - shutil.copy(self.conf_file, self.conf_file + ".save") - self.read_file(LstripReader(self.conf_file), source=conffile) + super().__init__() + self.conf_file = (Path(confpath) / Path(conffile)) + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=conffile) self.includelist = includelist self.excludelist = excludelist + self.isdirty = False + + def reload(self): + self.clear() + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file) + def set(self, name, option, value): """ Sets a configuration value for a specific submodule: Ensures the appropriate section exists for the submodule. Calls the parent class's set method to store the value. """ + self.isdirty = True self.logger.debug("set called {} {} {}".format(name, option, value)) section = f'submodule "{name}"' if not self.has_section(section): self.add_section(section) - ConfigParser.set(self, section, option, str(value)) + super().set(section, option, str(value)) # pylint: disable=redefined-builtin, arguments-differ def get(self, name, option, raw=False, vars=None, fallback=None): @@ -62,12 +69,14 @@ def get(self, name, option, raw=False, vars=None, fallback=None): return None def save(self): - print("Called gitmodules save, not expected") - # self.write(open(self.conf_file, "w")) - + if self.isdirty: + self.logger.info("Writing {}".format(self.conf_file)) + with open(self.conf_file, "w") as fd: + self.write(fd) + self.isdirty = False + def __del__(self): - self.logger.debug("Destroying GitModules object") - shutil.move(self.conf_file + ".save", self.conf_file) + self.save() def sections(self): """Strip the submodule part out of section and just use the name""" diff --git a/git_fleximod/metoflexi.py b/git_fleximod/metoflexi.py new file mode 100755 index 0000000000..1d56bc9ad8 --- /dev/null +++ b/git_fleximod/metoflexi.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python +from configparser import ConfigParser +import sys +import shutil +from pathlib import Path +import argparse +import logging +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from git_fleximod import utils + +logger = None + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return attempt + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument('-e', '--externals', nargs='?', + default='Externals.cfg', + help='The externals description filename. ' + 'Default: %(default)s.') + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser + +def commandline_arguments(args=None): + parser = get_parser() + + options = parser.parse_args(args) + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + return( + options.path, + options.gitmodules, + options.externals + ) + +class ExternalRepoTranslator: + """ + Translates external repositories configured in an INI-style externals file. + """ + + def __init__(self, rootpath, gitmodules, externals): + self.rootpath = rootpath + if gitmodules: + self.gitmodules = GitModules(logger, confpath=rootpath) + self.externals = (rootpath / Path(externals)).resolve() + print(f"Translating {self.externals}") + self.git = GitInterface(rootpath, logger) + +# def __del__(self): +# if (self.rootpath / "save.gitignore"): + + + def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol): + """ + Translates a single repository based on configuration details. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + tag (str): The tag to use for the external repository. + url (str): The URL of the external repository. + path (str): The relative path within the main repository for the external repository. + efile (str): The external file or file containing submodules. + hash_ (str): The commit hash to checkout (if applicable). + sparse (str): Boolean indicating whether to use sparse checkout (if applicable). + protocol (str): The protocol to use (e.g., 'git', 'http'). + """ + assert protocol != "svn", "SVN protocol is not currently supported" + print(f"Translating repository {section}") + if efile: + file_path = Path(path) / Path(efile) + newroot = (self.rootpath / file_path).parent.resolve() + if not newroot.exists(): + newroot.mkdir(parents=True) + logger.info("Newroot is {}".format(newroot)) + newt = ExternalRepoTranslator(newroot, ".gitmodules", efile) + newt.translate_repo() + if protocol == "externals_only": + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + + return + + newpath = (self.rootpath / Path(path)) + if newpath.exists(): + shutil.rmtree(newpath) + logger.info("Creating directory {}".format(newpath)) + newpath.mkdir(parents=True) + if tag: + logger.info("cloning {}".format(section)) + try: + self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) + except: + self.git.git_operation("clone", url, path) + with utils.pushd(newpath): + ngit = GitInterface(newpath, logger) + ngit.git_operation("checkout", tag) + + if (newpath / ".gitignore").exists(): + logger.info("Moving .gitignore file in {}".format(newpath)) + (newpath / ".gitignore").rename((newpath / "save.gitignore")) + + if hash_: + self.git.git_operation("clone", url, path) + git = GitInterface(newpath, logger) + git.git_operation("fetch", "origin") + git.git_operation("checkout", hash_) + if sparse: + print("setting as sparse submodule {}".format(section)) + sparsefile = (newpath / Path(sparse)) + newfile = (newpath / ".git" / "info" / "sparse-checkout") + print(f"sparsefile {sparsefile} newfile {newfile}") + shutil.copy(sparsefile, newfile) + logger.info("adding submodule {}".format(section)) + self.gitmodules.save() + self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) + self.git.git_operation("submodule","absorbgitdirs") + self.gitmodules.reload() + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + + + def translate_repo(self): + """ + Translates external repositories defined within an external file. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + external_file (str): The path to the external file containing repository definitions. + """ + econfig = ConfigParser() + econfig.read((self.rootpath / Path(self.externals))) + + for section in econfig.sections(): + if section == "externals_description": + logger.info("skipping section {}".format(section)) + return + logger.info("Translating section {}".format(section)) + tag = econfig.get(section, "tag", raw=False, fallback=None) + url = econfig.get(section, "repo_url", raw=False, fallback=None) + path = econfig.get(section, "local_path", raw=False, fallback=None) + efile = econfig.get(section, "externals", raw=False, fallback=None) + hash_ = econfig.get(section, "hash", raw=False, fallback=None) + sparse = econfig.get(section, "sparse", raw=False, fallback=None) + protocol = econfig.get(section, "protocol", raw=False, fallback=None) + + self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol) + + + +def _main(): + rootpath, gitmodules, externals = commandline_arguments() + global logger + logger = logging.getLogger(__name__) + with utils.pushd(rootpath): + t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals) + logger.info("Translating {}".format(rootpath)) + t.translate_repo() + + +if __name__ == "__main__": + sys.exit(_main()) diff --git a/pyproject.toml b/pyproject.toml index e8c7bd58ba..212b014d97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,6 +15,7 @@ packages = [ [tool.poetry.scripts] git-fleximod = "git_fleximod.git_fleximod:main" +me2flexi = "git_fleximod.metoflexi:_main" fsspec = "fsspec.fuse:main" [tool.poetry.dependencies] From 91df5a0ea5f8601c01c0462cf7e9a8aca84f2aa3 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Wed, 28 Feb 2024 16:00:42 -0700 Subject: [PATCH 106/161] update README and improve me2flexi --- README.md | 5 ++--- git_fleximod/git_fleximod.py | 5 +++-- git_fleximod/metoflexi.py | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 94d85c4150..c17c5b6f7a 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,6 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec Basic Usage: git fleximod [options] Available Commands: - checkout: Checkout submodules according to git submodule hash configuration. status: Display the status of submodules. update: Update submodules to the tag indicated in .gitmodules variable fxtag. test: Make sure that fxtags and submodule hashes are consistant, @@ -55,9 +54,9 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec Here are some common usage examples: -Checkout submodules, including optional ones: +Update all submodules, including optional ones: ```bash - git fleximod checkout --optional + git fleximod update --optional ``` Updating a specific submodule to the fxtag indicated in .gitmodules: diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 7a4470e246..1f60b1822b 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -132,8 +132,9 @@ def submodule_sparse_checkout( shutil.copy(sparsefile, gitsparse) # Finally checkout the repo - sprepo_git.git_operation("fetch", "--depth=1", "origin", "--tags") + sprepo_git.git_operation("fetch", "origin", "--tags") sprepo_git.git_operation("checkout", tag) + print(f"Successfully checked out {name:>20} at {tag}") rgit.config_set_value(f'submodule "{name}"',"active","true") rgit.config_set_value(f'submodule "{name}"',"url",url) @@ -242,7 +243,7 @@ def submodules_status(gitmodules, root_dir): ahash = git.git_operation("status").partition("\n")[0].split()[-1] if tag and atag == tag: print(f" {name:>20} at tag {tag}") - elif tag and ahash == tag: + elif tag and ahash[:len(tag)] == tag: print(f" {name:>20} at hash {ahash}") elif tag: print(f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}") diff --git a/git_fleximod/metoflexi.py b/git_fleximod/metoflexi.py index 1d56bc9ad8..b607ad925d 100755 --- a/git_fleximod/metoflexi.py +++ b/git_fleximod/metoflexi.py @@ -168,9 +168,9 @@ def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, p ngit = GitInterface(newpath, logger) ngit.git_operation("checkout", tag) - if (newpath / ".gitignore").exists(): - logger.info("Moving .gitignore file in {}".format(newpath)) - (newpath / ".gitignore").rename((newpath / "save.gitignore")) +# if (newpath / ".gitignore").exists(): +# logger.info("Moving .gitignore file in {}".format(newpath)) +# (newpath / ".gitignore").rename((newpath / "save.gitignore")) if hash_: self.git.git_operation("clone", url, path) From b7914f26c22704b0aa984bad0978eeb7d88e55e0 Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 29 Feb 2024 15:25:23 -0700 Subject: [PATCH 107/161] correcting version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 212b014d97..cbbe83a401 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.7.0" +version = "0.6.2" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] From ff3fcb9587fabfe28e924b2249bcee89bb68d97c Mon Sep 17 00:00:00 2001 From: James Edwards Date: Thu, 29 Feb 2024 15:25:38 -0700 Subject: [PATCH 108/161] Bump to 0.7.0 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 80dbd58d5a..f9cd67cff7 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,7 +1,7 @@ from pathlib import Path import argparse -__version__ = "0.6.2" +__version__ = "0.7.0" def find_root_dir(filename=".git"): d = Path.cwd() diff --git a/pyproject.toml b/pyproject.toml index cbbe83a401..212b014d97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.6.2" +version = "0.7.0" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index 583cef45ae..568226dfa1 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.6.2" +current = "0.7.0" # Example of a semver regexp. # Make sure this matches current_version before From fd53786233fd742ec7959908cc8a543dd5315d8f Mon Sep 17 00:00:00 2001 From: James Edwards Date: Fri, 1 Mar 2024 07:09:45 -0700 Subject: [PATCH 109/161] code cleanup --- git_fleximod/git_fleximod.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 1f60b1822b..8f2b1199e2 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -184,14 +184,14 @@ def single_submodule_checkout( if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout - print(f"Recursively checking out submodules of {name} {repodir} {url}") + print(f"Recursively checking out submodules of {name}") gitmodules = GitModules(logger, confpath=repodir) requiredlist = ["AlwaysRequired"] if optional: requiredlist.append("AlwaysOptional") submodules_checkout(gitmodules, repodir, requiredlist, force=force) if os.path.exists(os.path.join(repodir, ".git")): - print(f"Successfully checked out {name} {repodir}") + print(f"Successfully checked out {name:>20}") else: utils.fatal_error(f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}") @@ -207,6 +207,8 @@ def submodules_status(gitmodules, root_dir): for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") + required = gitmodules.get(name, "fxrequired") + level = required and "Toplevel" in required if not path: utils.fatal_error("No path found in .gitmodules for {}".format(name)) newpath = os.path.join(root_dir, path) @@ -217,6 +219,8 @@ def submodules_status(gitmodules, root_dir): url = gitmodules.get(name, "url") tags = rootgit.git_operation("ls-remote", "--tags", url) atag = None + if level: + continue for htag in tags.split("\n"): if tag and tag in htag: atag = (htag.split()[1])[10:] From 0c26ad0ce55a4d080ef211662e9f0d544c4ceada Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 14 Mar 2024 12:43:23 -0600 Subject: [PATCH 110/161] update cleanup --- git_fleximod/git_fleximod.py | 12 +++--- git_fleximod/metoflexi.py | 84 +++++++++++++++++------------------- 2 files changed, 46 insertions(+), 50 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 8f2b1199e2..69301e72d5 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -201,7 +201,7 @@ def single_submodule_checkout( return -def submodules_status(gitmodules, root_dir): +def submodules_status(gitmodules, root_dir, toplevel=False): testfails = 0 localmods = 0 for name in gitmodules.sections(): @@ -219,7 +219,7 @@ def submodules_status(gitmodules, root_dir): url = gitmodules.get(name, "url") tags = rootgit.git_operation("ls-remote", "--tags", url) atag = None - if level: + if not toplevel and level: continue for htag in tags.split("\n"): if tag and tag in htag: @@ -268,18 +268,20 @@ def submodules_status(gitmodules, root_dir): def submodules_update(gitmodules, root_dir, requiredlist, force): _, localmods = submodules_status(gitmodules, root_dir) - print("") + if localmods and not force: print( "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" ) return + elif not localmods: + return for name in gitmodules.sections(): fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") logger.info("name={} path={} url={} fxtag={} requiredlist={}".format(name,os.path.join(root_dir, path), url, fxtag, requiredlist)) -# if not os.path.exists(os.path.join(root_dir,path, ".git")): + # if not os.path.exists(os.path.join(root_dir,path, ".git")): fxrequired = gitmodules.get(name, "fxrequired") assert(fxrequired in fxrequired_allowed_values()) rgit = GitInterface(root_dir, logger) @@ -444,7 +446,7 @@ def main(): if action == "update": submodules_update(gitmodules, root_dir, fxrequired, force) elif action == "status": - submodules_status(gitmodules, root_dir) + submodules_status(gitmodules, root_dir, toplevel=True) elif action == "test": retval = submodules_test(gitmodules, root_dir) else: diff --git a/git_fleximod/metoflexi.py b/git_fleximod/metoflexi.py index b607ad925d..1a4f84213a 100755 --- a/git_fleximod/metoflexi.py +++ b/git_fleximod/metoflexi.py @@ -150,53 +150,47 @@ def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, p if sparse: self.gitmodules.set(section, "fxsparse", sparse) self.gitmodules.set(section, "fxrequired", "ToplevelRequired") - - return - - newpath = (self.rootpath / Path(path)) - if newpath.exists(): - shutil.rmtree(newpath) - logger.info("Creating directory {}".format(newpath)) - newpath.mkdir(parents=True) - if tag: - logger.info("cloning {}".format(section)) - try: - self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) - except: + else: + newpath = (self.rootpath / Path(path)) + if newpath.exists(): + shutil.rmtree(newpath) + logger.info("Creating directory {}".format(newpath)) + newpath.mkdir(parents=True) + if tag: + logger.info("cloning {}".format(section)) + try: + self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) + except: + self.git.git_operation("clone", url, path) + with utils.pushd(newpath): + ngit = GitInterface(newpath, logger) + ngit.git_operation("checkout", tag) + if hash_: self.git.git_operation("clone", url, path) - with utils.pushd(newpath): - ngit = GitInterface(newpath, logger) - ngit.git_operation("checkout", tag) - -# if (newpath / ".gitignore").exists(): -# logger.info("Moving .gitignore file in {}".format(newpath)) -# (newpath / ".gitignore").rename((newpath / "save.gitignore")) - - if hash_: - self.git.git_operation("clone", url, path) - git = GitInterface(newpath, logger) - git.git_operation("fetch", "origin") - git.git_operation("checkout", hash_) - if sparse: - print("setting as sparse submodule {}".format(section)) - sparsefile = (newpath / Path(sparse)) - newfile = (newpath / ".git" / "info" / "sparse-checkout") - print(f"sparsefile {sparsefile} newfile {newfile}") - shutil.copy(sparsefile, newfile) - logger.info("adding submodule {}".format(section)) - self.gitmodules.save() - self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) - self.git.git_operation("submodule","absorbgitdirs") - self.gitmodules.reload() - if tag: - self.gitmodules.set(section, "fxtag", tag) - if hash_: - self.gitmodules.set(section, "fxtag", hash_) + git = GitInterface(newpath, logger) + git.git_operation("fetch", "origin") + git.git_operation("checkout", hash_) + if sparse: + print("setting as sparse submodule {}".format(section)) + sparsefile = (newpath / Path(sparse)) + newfile = (newpath / ".git" / "info" / "sparse-checkout") + print(f"sparsefile {sparsefile} newfile {newfile}") + shutil.copy(sparsefile, newfile) + + logger.info("adding submodule {}".format(section)) + self.gitmodules.save() + self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) + self.git.git_operation("submodule","absorbgitdirs") + self.gitmodules.reload() + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) - self.gitmodules.set(section, "fxurl", url) - if sparse: - self.gitmodules.set(section, "fxsparse", sparse) - self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + self.gitmodules.set(section, "fxurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") def translate_repo(self): From 17dc6d3a0abbc7a6f69c79197f6892501b7e2e8d Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 14 Mar 2024 12:45:58 -0600 Subject: [PATCH 111/161] update cleanup --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index c17c5b6f7a..f8afb40b37 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,8 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec - AlwaysOptional: Always optional (checked out with --optional flag). fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. fxurl: This is the url used in the test subcommand to assure that protected branches do not point to forks + **NOTE** the fxurl variable is only used to identify the official project repository and should not be + changed by users. Use the url variable to change to a fork if desired. ## Sparse Checkouts From a56b7f7cec1372dec15962865123f9d14f8fd4bf Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 14 Mar 2024 14:33:45 -0600 Subject: [PATCH 112/161] all tests passing --- git_fleximod/git_fleximod.py | 52 +++++++++++++++++++++++++++++++----- 1 file changed, 46 insertions(+), 6 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 69301e72d5..db788c1484 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -67,6 +67,24 @@ def commandline_arguments(args=None): def submodule_sparse_checkout( root_dir, name, url, path, sparsefile, tag="master" ): + """ + This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq + in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. + Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important + because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time + and disk space consuming. + + Parameters: + root_dir (str): The root directory for the git operation. + name (str): The name of the submodule. + url (str): The URL of the submodule. + path (str): The path to the submodule. + sparsefile (str): The sparse file for the submodule. + tag (str, optional): The tag to checkout. Defaults to "master". + + Returns: + None + """ logger.info("Called sparse_checkout for {}".format(name)) rgit = GitInterface(root_dir, logger) superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") @@ -140,8 +158,24 @@ def submodule_sparse_checkout( rgit.config_set_value(f'submodule "{name}"',"url",url) def single_submodule_checkout( - root, name, path, url=None, tag=None, force=False, optional=False + root, name, path, url=None, tag=None, force=False, optional=False ): + """ + This function checks out a single git submodule. + + Parameters: + root (str): The root directory for the git operation. + name (str): The name of the submodule. + path (str): The path to the submodule. + url (str, optional): The URL of the submodule. Defaults to None. + tag (str, optional): The tag to checkout. Defaults to None. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + optional (bool, optional): If set to True, the submodule is considered optional. Defaults to False. + + Returns: + None + """ + # function implementation... git = GitInterface(root, logger) repodir = os.path.join(root, path) logger.info("Checkout {} into {}/{}".format(name,root,path)) @@ -204,6 +238,7 @@ def single_submodule_checkout( def submodules_status(gitmodules, root_dir, toplevel=False): testfails = 0 localmods = 0 + needsupdate = 0 for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") @@ -219,6 +254,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): url = gitmodules.get(name, "url") tags = rootgit.git_operation("ls-remote", "--tags", url) atag = None + needsupdate += 1 if not toplevel and level: continue for htag in tags.split("\n"): @@ -252,6 +288,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): elif tag: print(f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}") testfails += 1 + needsupdate += 1 else: print( f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}" @@ -263,19 +300,20 @@ def submodules_status(gitmodules, root_dir, toplevel=False): localmods = localmods + 1 print("M" + textwrap.indent(status, " ")) - return testfails, localmods + return testfails, localmods, needsupdate def submodules_update(gitmodules, root_dir, requiredlist, force): - _, localmods = submodules_status(gitmodules, root_dir) + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) if localmods and not force: print( "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" ) return - elif not localmods: + if needsupdate == 0: return + for name in gitmodules.sections(): fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") @@ -349,13 +387,15 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): # checkout is done by update if required so this function may be depricated def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): - _, localmods = submodules_status(gitmodules, root_dir) + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) print("") if localmods and not force: print( "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" ) return + if not needsupdate: + return for name in gitmodules.sections(): fxrequired = gitmodules.get(name, "fxrequired") fxsparse = gitmodules.get(name, "fxsparse") @@ -389,7 +429,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): def submodules_test(gitmodules, root_dir): # First check that fxtags are present and in sync with submodule hashes - testfails, localmods = submodules_status(gitmodules, root_dir) + testfails, localmods, _ = submodules_status(gitmodules, root_dir) print("") # Then make sure that urls are consistant with fxurls (not forks and not ssh) # and that sparse checkout files exist From c31e17f4fbb17aaffd8afdd1475b19dcfb074409 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 14 Mar 2024 14:48:01 -0600 Subject: [PATCH 113/161] more cleanup and comments --- git_fleximod/git_fleximod.py | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index db788c1484..89b0a6e031 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -387,8 +387,21 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): # checkout is done by update if required so this function may be depricated def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): - _, localmods, needsupdate = submodules_status(gitmodules, root_dir) + """ + This function checks out all git submodules based on the provided parameters. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + requiredlist (list): The list of required modules. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + + Returns: + None + """ + # function implementation... print("") + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) if localmods and not force: print( "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" @@ -426,10 +439,23 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): optional = "AlwaysOptional" in requiredlist ) - def submodules_test(gitmodules, root_dir): + """ + This function tests the git submodules based on the provided parameters. + + It first checks that fxtags are present and in sync with submodule hashes. + Then it ensures that urls are consistent with fxurls (not forks and not ssh) + and that sparse checkout files exist. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + + Returns: + int: The number of test failures. + """ # First check that fxtags are present and in sync with submodule hashes - testfails, localmods, _ = submodules_status(gitmodules, root_dir) + testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir) print("") # Then make sure that urls are consistant with fxurls (not forks and not ssh) # and that sparse checkout files exist @@ -444,7 +470,7 @@ def submodules_test(gitmodules, root_dir): if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): print(f"{name:>20} sparse checkout file {fxsparse} not found") testfails += 1 - return testfails + localmods + return testfails + localmods + needsupdate def main(): From 82428601f52b15b27f67031ad093d91554d09d3e Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Tue, 19 Mar 2024 13:00:01 -0600 Subject: [PATCH 114/161] fix py3.11 issue in gitinterface --- git_fleximod/gitinterface.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/git_fleximod/gitinterface.py b/git_fleximod/gitinterface.py index 203c500304..93ae38ecde 100644 --- a/git_fleximod/gitinterface.py +++ b/git_fleximod/gitinterface.py @@ -25,7 +25,7 @@ def __init__(self, repo_path, logger): msg = "Using GitPython interface to git" except ImportError: self._use_module = False - if not (repo_path / ".git").exists(): + if not (self.repo_path / ".git").exists(): self._init_git_repo() msg = "Using shell interface to git" self.logger.info(msg) From 3fba38563fd5a9ceec5d0abcd210ae90676b6309 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 28 Mar 2024 13:09:57 -0600 Subject: [PATCH 115/161] explain M flag --- git_fleximod/git_fleximod.py | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 89b0a6e031..ebb719d4b7 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -307,9 +307,7 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): _, localmods, needsupdate = submodules_status(gitmodules, root_dir) if localmods and not force: - print( - "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" - ) + local_mods_output() return if needsupdate == 0: return @@ -385,6 +383,22 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): else: print(f"{name:>20} up to date.") +def local_mods_output(): + text = '''\ + The submodules labeled with 'M' above are not in a clean state. + The following are options for how to proceed: + (1) Go into each submodule which is not in a clean state and issue a 'git status' + Either revert or commit your changes so that the submodule is in a clean state. + (2) use the --force option to git-fleximod + (3) you can name the particular submodules to update using the git-fleximod command line + (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') + then rerun git-fleximod update. +''' + print(text) + + + + # checkout is done by update if required so this function may be depricated def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): """ @@ -403,9 +417,7 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): print("") _, localmods, needsupdate = submodules_status(gitmodules, root_dir) if localmods and not force: - print( - "Repository has local mods, cowardly refusing to continue, fix issues or use --force to override" - ) + local_mods_output() return if not needsupdate: return @@ -512,7 +524,11 @@ def main(): if action == "update": submodules_update(gitmodules, root_dir, fxrequired, force) elif action == "status": - submodules_status(gitmodules, root_dir, toplevel=True) + tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) + if tfails + lmods + updates > 0: + print(f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n") + if lmods > 0: + local_mods_output() elif action == "test": retval = submodules_test(gitmodules, root_dir) else: From b4e07beb2fef9e4c62514a9b7fe4b86e99e043b2 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 28 Mar 2024 13:37:26 -0600 Subject: [PATCH 116/161] fixes issue with ssh access to github (should not be required) --- git_fleximod/git_fleximod.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index ebb719d4b7..a0d3a76137 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -185,7 +185,6 @@ def single_submodule_checkout( if os.path.exists(os.path.join(repodir, ".git")): logger.info("Submodule {} already checked out".format(name)) repo_exists = True - # Look for a .gitmodules file in the newly checkedout repo if not repo_exists and url: # ssh urls cause problems for those who dont have git accounts with ssh keys defined @@ -209,7 +208,11 @@ def single_submodule_checkout( rootdotgit = line[8:].rstrip() newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) - shutil.move(os.path.join(repodir, ".git"), newpath) + if os.path.exists(newpath): + shutil.rmtree(os.path.join(repodir,".git")) + else: + shutil.move(os.path.join(repodir, ".git"), newpath) + with open(os.path.join(repodir, ".git"), "w") as f: f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) @@ -252,6 +255,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): rootgit = GitInterface(root_dir, logger) # submodule commands use path, not name url = gitmodules.get(name, "url") + url = url.replace("git@github.com:", "https://github.com/") tags = rootgit.git_operation("ls-remote", "--tags", url) atag = None needsupdate += 1 @@ -445,7 +449,6 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) - single_submodule_checkout( root_dir, name, path, url=url, tag=fxtag, force=force, optional = "AlwaysOptional" in requiredlist From 3a9201dce1a482de86d7a00e6958cd8ce310b4ef Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 28 Mar 2024 13:59:06 -0600 Subject: [PATCH 117/161] better error handling for not a tag --- git_fleximod/git_fleximod.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index a0d3a76137..8e479fc30c 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -227,9 +227,7 @@ def single_submodule_checkout( if optional: requiredlist.append("AlwaysOptional") submodules_checkout(gitmodules, repodir, requiredlist, force=force) - if os.path.exists(os.path.join(repodir, ".git")): - print(f"Successfully checked out {name:>20}") - else: + if not os.path.exists(os.path.join(repodir, ".git")): utils.fatal_error(f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}") if tmpurl: @@ -380,8 +378,11 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): git.git_operation("fetch", newremote, "--tags") atag = git.git_operation("describe", "--tags", "--always").rstrip() if fxtag and fxtag != atag: - print(f"{name:>20} updated to {fxtag}") - git.git_operation("checkout", fxtag) + try: + git.git_operation("checkout", fxtag) + print(f"{name:>20} updated to {fxtag}") + except Exception as error: + print(error) elif not fxtag: print(f"No fxtag found for submodule {name:>20}") else: From ad6b84ee19be4983561fbfff5d9d9abe24fda74e Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 28 Mar 2024 14:15:45 -0600 Subject: [PATCH 118/161] Bump to 0.7.3 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index f9cd67cff7..169a1b3384 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,7 +1,7 @@ from pathlib import Path import argparse -__version__ = "0.7.0" +__version__ = "0.7.3" def find_root_dir(filename=".git"): d = Path.cwd() diff --git a/pyproject.toml b/pyproject.toml index 212b014d97..bb4d89a307 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.7.0" +version = "0.7.3" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index 568226dfa1..7338278fb6 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.7.0" +current = "0.7.3" # Example of a semver regexp. # Make sure this matches current_version before From fd0ce7ad12f73b7932b98c6531b127767a7a2a19 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 28 Mar 2024 14:40:25 -0600 Subject: [PATCH 119/161] set python 3.7 as min version requirement --- git_fleximod/git_fleximod.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 8e479fc30c..3c0ae0764e 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -1,5 +1,9 @@ #!/usr/bin/env python import sys +MIN_PYTHON = (3, 7) +if sys.version_info < MIN_PYTHON: + sys.exit("Python %s.%s or later is required."%MIN_PYTHON) + import os import shutil import logging From ca419c61c810d57f0f7b4ab0de165ad30e28360d Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 29 Mar 2024 14:53:16 -0600 Subject: [PATCH 120/161] rename fxurl --- git_fleximod/git_fleximod.py | 165 ++++++++++++++++++++++------------- 1 file changed, 105 insertions(+), 60 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 3c0ae0764e..e4e18ee25e 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -1,8 +1,9 @@ #!/usr/bin/env python import sys + MIN_PYTHON = (3, 7) if sys.version_info < MIN_PYTHON: - sys.exit("Python %s.%s or later is required."%MIN_PYTHON) + sys.exit("Python %s.%s or later is required." % MIN_PYTHON) import os import shutil @@ -16,8 +17,11 @@ # logger variable is global logger = None + + def fxrequired_allowed_values(): - return ['ToplevelRequired', 'ToplevelOptional', 'AlwaysRequired', 'AlwaysOptional'] + return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] + def commandline_arguments(args=None): parser = cli.get_parser() @@ -29,7 +33,12 @@ def commandline_arguments(args=None): # explicitly listing a component overrides the optional flag if options.optional or options.components: - fxrequired = ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] + fxrequired = [ + "ToplevelRequired", + "ToplevelOptional", + "AlwaysRequired", + "AlwaysOptional", + ] else: fxrequired = ["ToplevelRequired", "AlwaysRequired"] @@ -68,16 +77,14 @@ def commandline_arguments(args=None): ) -def submodule_sparse_checkout( - root_dir, name, url, path, sparsefile, tag="master" -): +def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): """ This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important - because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time + because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time and disk space consuming. - + Parameters: root_dir (str): The root directory for the git operation. name (str): The name of the submodule. @@ -96,10 +103,10 @@ def submodule_sparse_checkout( gitroot = superroot else: gitroot = root_dir - assert(os.path.isdir(os.path.join(gitroot,".git"))) + assert os.path.isdir(os.path.join(gitroot, ".git")) # first create the module directory - if not os.path.isdir(os.path.join(root_dir,path)): - os.makedirs(os.path.join(root_dir,path)) + if not os.path.isdir(os.path.join(root_dir, path)): + os.makedirs(os.path.join(root_dir, path)) # initialize a new git repo and set the sparse checkout flag sprep_repo = os.path.join(root_dir, path) @@ -120,25 +127,31 @@ def submodule_sparse_checkout( # set the repository remote - logger.info("Setting remote origin in {}/{}".format(root_dir,path)) + logger.info("Setting remote origin in {}/{}".format(root_dir, path)) status = sprepo_git.git_operation("remote", "-v") if url not in status: sprepo_git.git_operation("remote", "add", "origin", url) - - topgit = os.path.join(gitroot,".git") - + + topgit = os.path.join(gitroot, ".git") + if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): with open(os.path.join(root_dir, ".git")) as f: - gitpath = os.path.relpath(os.path.join(root_dir, f.read().split()[1]), start=os.path.join(root_dir,path)) + gitpath = os.path.relpath( + os.path.join(root_dir, f.read().split()[1]), + start=os.path.join(root_dir, path), + ) topgit = os.path.join(gitpath, "modules") else: - topgit = os.path.relpath(os.path.join(root_dir, ".git", "modules"), start=os.path.join(root_dir,path)) - + topgit = os.path.relpath( + os.path.join(root_dir, ".git", "modules"), + start=os.path.join(root_dir, path), + ) + with utils.pushd(sprep_repo): if not os.path.isdir(topgit): os.makedirs(topgit) topgit += os.sep + name - + if os.path.isdir(os.path.join(root_dir, path, ".git")): with utils.pushd(sprep_repo): shutil.move(".git", topgit) @@ -147,7 +160,9 @@ def submodule_sparse_checkout( # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) if os.path.isfile(gitsparse): - logger.warning("submodule {} is already initialized {}".format(name, topgit)) + logger.warning( + "submodule {} is already initialized {}".format(name, topgit) + ) return with utils.pushd(sprep_repo): @@ -156,10 +171,11 @@ def submodule_sparse_checkout( # Finally checkout the repo sprepo_git.git_operation("fetch", "origin", "--tags") sprepo_git.git_operation("checkout", tag) - + print(f"Successfully checked out {name:>20} at {tag}") - rgit.config_set_value(f'submodule "{name}"',"active","true") - rgit.config_set_value(f'submodule "{name}"',"url",url) + rgit.config_set_value(f'submodule "{name}"', "active", "true") + rgit.config_set_value(f'submodule "{name}"', "url", url) + def single_submodule_checkout( root, name, path, url=None, tag=None, force=False, optional=False @@ -182,7 +198,7 @@ def single_submodule_checkout( # function implementation... git = GitInterface(root, logger) repodir = os.path.join(root, path) - logger.info("Checkout {} into {}/{}".format(name,root,path)) + logger.info("Checkout {} into {}/{}".format(name, root, path)) # if url is provided update to the new url tmpurl = None repo_exists = False @@ -213,7 +229,7 @@ def single_submodule_checkout( newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) if os.path.exists(newpath): - shutil.rmtree(os.path.join(repodir,".git")) + shutil.rmtree(os.path.join(repodir, ".git")) else: shutil.move(os.path.join(repodir, ".git"), newpath) @@ -232,7 +248,9 @@ def single_submodule_checkout( requiredlist.append("AlwaysOptional") submodules_checkout(gitmodules, repodir, requiredlist, force=force) if not os.path.exists(os.path.join(repodir, ".git")): - utils.fatal_error(f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}") + utils.fatal_error( + f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}" + ) if tmpurl: print(git.git_operation("restore", ".gitmodules")) @@ -248,7 +266,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") required = gitmodules.get(name, "fxrequired") - level = required and "Toplevel" in required + level = required and "Toplevel" in required if not path: utils.fatal_error("No path found in .gitmodules for {}".format(name)) newpath = os.path.join(root_dir, path) @@ -270,8 +288,10 @@ def submodules_status(gitmodules, root_dir, toplevel=False): if tag and tag == atag: print(f"e {name:>20} not checked out, aligned at tag {tag}") elif tag: - ahash = rootgit.git_operation("submodule", "status", "{}".format(path)).rstrip() - ahash = ahash[1:len(tag)+1] + ahash = rootgit.git_operation( + "submodule", "status", "{}".format(path) + ).rstrip() + ahash = ahash[1 : len(tag) + 1] if tag == ahash: print(f"e {name:>20} not checked out, aligned at hash {ahash}") else: @@ -289,10 +309,12 @@ def submodules_status(gitmodules, root_dir, toplevel=False): ahash = git.git_operation("status").partition("\n")[0].split()[-1] if tag and atag == tag: print(f" {name:>20} at tag {tag}") - elif tag and ahash[:len(tag)] == tag: + elif tag and ahash[: len(tag)] == tag: print(f" {name:>20} at hash {ahash}") elif tag: - print(f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}") + print( + f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}" + ) testfails += 1 needsupdate += 1 else: @@ -317,21 +339,29 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): return if needsupdate == 0: return - + for name in gitmodules.sections(): fxtag = gitmodules.get(name, "fxtag") path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") - logger.info("name={} path={} url={} fxtag={} requiredlist={}".format(name,os.path.join(root_dir, path), url, fxtag, requiredlist)) + logger.info( + "name={} path={} url={} fxtag={} requiredlist={}".format( + name, os.path.join(root_dir, path), url, fxtag, requiredlist + ) + ) # if not os.path.exists(os.path.join(root_dir,path, ".git")): fxrequired = gitmodules.get(name, "fxrequired") - assert(fxrequired in fxrequired_allowed_values()) + assert fxrequired in fxrequired_allowed_values() rgit = GitInterface(root_dir, logger) superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") - + fxsparse = gitmodules.get(name, "fxsparse") - - if fxrequired and (superroot and "Toplevel" in fxrequired) or fxrequired not in requiredlist: + + if ( + fxrequired + and (superroot and "Toplevel" in fxrequired) + or fxrequired not in requiredlist + ): if "ToplevelOptional" == fxrequired: print("Skipping optional component {}".format(name)) continue @@ -341,20 +371,24 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): root_dir, name, url, path, fxsparse, fxtag ) ) - submodule_sparse_checkout( - root_dir, name, url, path, fxsparse, tag=fxtag - ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) else: logger.info( - "Calling submodule_checkout({},{},{},{})".format(root_dir, name, path,url) + "Calling submodule_checkout({},{},{},{})".format( + root_dir, name, path, url + ) ) - + single_submodule_checkout( - root_dir, name, path, url=url, tag=fxtag, force=force, - optional=("AlwaysOptional" in requiredlist) + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional=("AlwaysOptional" in requiredlist), ) - if os.path.exists(os.path.join(path, ".git")): submoddir = os.path.join(root_dir, path) with utils.pushd(submoddir): @@ -392,8 +426,9 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): else: print(f"{name:>20} up to date.") + def local_mods_output(): - text = '''\ + text = """\ The submodules labeled with 'M' above are not in a clean state. The following are options for how to proceed: (1) Go into each submodule which is not in a clean state and issue a 'git status' @@ -402,12 +437,10 @@ def local_mods_output(): (3) you can name the particular submodules to update using the git-fleximod command line (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') then rerun git-fleximod update. -''' +""" print(text) - - - + # checkout is done by update if required so this function may be depricated def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): """ @@ -447,18 +480,22 @@ def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): root_dir, name, url, path, fxsparse, fxtag ) ) - submodule_sparse_checkout( - root_dir, name, url, path, fxsparse, tag=fxtag - ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) else: logger.debug( "Calling submodule_checkout({},{},{})".format(root_dir, name, path) ) single_submodule_checkout( - root_dir, name, path, url=url, tag=fxtag, force=force, - optional = "AlwaysOptional" in requiredlist + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional="AlwaysOptional" in requiredlist, ) + def submodules_test(gitmodules, root_dir): """ This function tests the git submodules based on the provided parameters. @@ -481,10 +518,12 @@ def submodules_test(gitmodules, root_dir): # and that sparse checkout files exist for name in gitmodules.sections(): url = gitmodules.get(name, "url") - fxurl = gitmodules.get(name, "fxurl") + fxurl = gitmodules.get(name, "fxDONOTMODIFYurl") fxsparse = gitmodules.get(name, "fxsparse") path = gitmodules.get(name, "path") - if not fxurl or url != fxurl: + fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl + url = url[:-4] if url.endswith(".git") else url + if not fxurl or url.lower() != fxurl.lower(): print(f"{name:>20} url {url} not in sync with required {fxurl}") testfails += 1 if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): @@ -518,7 +557,11 @@ def main(): ) root_dir = os.path.dirname(file_path) - logger.info("root_dir is {} includelist={} excludelist={}".format(root_dir, includelist, excludelist)) + logger.info( + "root_dir is {} includelist={} excludelist={}".format( + root_dir, includelist, excludelist + ) + ) gitmodules = GitModules( logger, confpath=root_dir, @@ -534,9 +577,11 @@ def main(): elif action == "status": tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) if tfails + lmods + updates > 0: - print(f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n") + print( + f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n" + ) if lmods > 0: - local_mods_output() + local_mods_output() elif action == "test": retval = submodules_test(gitmodules, root_dir) else: From 152bad98fecc769308c782db5c80d95e51b33f61 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 3 Apr 2024 09:05:06 -0600 Subject: [PATCH 121/161] fix tests on izumi --- git_fleximod/git_fleximod.py | 4 ++-- git_fleximod/utils.py | 2 +- tests/conftest.py | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index e4e18ee25e..7957001b84 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -100,9 +100,9 @@ def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master rgit = GitInterface(root_dir, logger) superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") if superroot: - gitroot = superroot + gitroot = superroot.strip() else: - gitroot = root_dir + gitroot = root_dir.strip() assert os.path.isdir(os.path.join(gitroot, ".git")) # first create the module directory if not os.path.isdir(os.path.join(root_dir, path)): diff --git a/git_fleximod/utils.py b/git_fleximod/utils.py index f0753367e5..7cc1de38cc 100644 --- a/git_fleximod/utils.py +++ b/git_fleximod/utils.py @@ -272,7 +272,7 @@ def execute_subprocess(commands, status_to_caller=False, output_to_caller=False) cwd = os.getcwd() msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) logging.info(msg) - commands_str = " ".join(commands) + commands_str = " ".join(str(element) for element in commands) logging.info(commands_str) return_to_caller = status_to_caller or output_to_caller status = -1 diff --git a/tests/conftest.py b/tests/conftest.py index 5fed907c4f..942a0efb97 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -25,7 +25,7 @@ def logger(): path = modules/test url = https://github.com/ESMCI/mpi-serial.git fxtag = MPIserial_2.4.0 - fxurl = https://github.com/ESMCI/mpi-serial.git + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git fxrequired = ToplevelRequired """}, {"subrepo_path": "modules/test_optional", @@ -39,7 +39,7 @@ def logger(): path = modules/test_optional url = https://github.com/ESMCI/mpi-serial.git fxtag = MPIserial_2.4.0 - fxurl = https://github.com/ESMCI/mpi-serial.git + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git fxrequired = ToplevelOptional """}, {"subrepo_path": "modules/test_alwaysoptional", @@ -53,7 +53,7 @@ def logger(): path = modules/test_alwaysoptional url = https://github.com/ESMCI/mpi-serial.git fxtag = e5cf35c - fxurl = https://github.com/ESMCI/mpi-serial.git + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git fxrequired = AlwaysOptional """}, {"subrepo_path": "modules/test_sparse", @@ -67,7 +67,7 @@ def logger(): path = modules/test_sparse url = https://github.com/ESMCI/mpi-serial.git fxtag = MPIserial_2.5.0 - fxurl = https://github.com/ESMCI/mpi-serial.git + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git fxrequired = AlwaysRequired fxsparse = ../.sparse_file_list """}, From 5a1e5f64b813208fb6c62dfdab6e466a5b4ef8d6 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 3 Apr 2024 09:50:57 -0600 Subject: [PATCH 122/161] fix issue in metoflexi --- git_fleximod/metoflexi.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/metoflexi.py b/git_fleximod/metoflexi.py index 1a4f84213a..cc347db2dd 100755 --- a/git_fleximod/metoflexi.py +++ b/git_fleximod/metoflexi.py @@ -17,7 +17,7 @@ def find_root_dir(filename=".git"): while d != root: attempt = d / filename if attempt.is_dir(): - return attempt + return d d = d.parent return None @@ -146,7 +146,7 @@ def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, p if hash_: self.gitmodules.set(section, "fxtag", hash_) - self.gitmodules.set(section, "fxurl", url) + self.gitmodules.set(section, "fxDONOTUSEurl", url) if sparse: self.gitmodules.set(section, "fxsparse", sparse) self.gitmodules.set(section, "fxrequired", "ToplevelRequired") @@ -187,7 +187,7 @@ def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, p if hash_: self.gitmodules.set(section, "fxtag", hash_) - self.gitmodules.set(section, "fxurl", url) + self.gitmodules.set(section, "fxDONOTUSEurl", url) if sparse: self.gitmodules.set(section, "fxsparse", sparse) self.gitmodules.set(section, "fxrequired", "ToplevelRequired") From 5398ef43fc6cdacdc894b7e6567bc9fb28b26da6 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Tue, 23 Apr 2024 15:18:24 -0600 Subject: [PATCH 123/161] if submodule does not exist add it --- git_fleximod/git_fleximod.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 7957001b84..adf575f9cb 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -235,10 +235,13 @@ def single_submodule_checkout( with open(os.path.join(repodir, ".git"), "w") as f: f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) + + if not os.path.exists(repodir): + git.git_operation("submodule", "add", "--name", name, "--", url, path) if not repo_exists or not tmpurl: - logger.debug(git.git_operation("submodule", "update", "--init", "--", path)) - + git.git_operation("submodule", "update", "--init", "--", path) + if os.path.exists(os.path.join(repodir, ".gitmodules")): # recursively handle this checkout print(f"Recursively checking out submodules of {name}") @@ -311,6 +314,8 @@ def submodules_status(gitmodules, root_dir, toplevel=False): print(f" {name:>20} at tag {tag}") elif tag and ahash[: len(tag)] == tag: print(f" {name:>20} at hash {ahash}") + elif atag == ahash: + print(f" {name:>20} at hash {ahash}") elif tag: print( f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}" From 46a74ac7f75b5d086fb1f3b3ffb117ae76cc46d4 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Tue, 23 Apr 2024 16:44:14 -0600 Subject: [PATCH 124/161] create parent if it doesnt exist --- git_fleximod/git_fleximod.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index adf575f9cb..103cc82a50 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -237,6 +237,9 @@ def single_submodule_checkout( f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) if not os.path.exists(repodir): + parent = os.path.dirname(repodir) + if not os.path.isdir(parent): + os.makedirs(parent) git.git_operation("submodule", "add", "--name", name, "--", url, path) if not repo_exists or not tmpurl: From eb932b015072ba3ec8cc7fadf5a387058f79fc4c Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Tue, 23 Apr 2024 16:49:47 -0600 Subject: [PATCH 125/161] Bump to 0.7.4 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 169a1b3384..1fb959dad0 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,7 +1,7 @@ from pathlib import Path import argparse -__version__ = "0.7.3" +__version__ = "0.7.4" def find_root_dir(filename=".git"): d = Path.cwd() diff --git a/pyproject.toml b/pyproject.toml index bb4d89a307..2484552e4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.7.3" +version = "0.7.4" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index 7338278fb6..d4b8eaee11 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.7.3" +current = "0.7.4" # Example of a semver regexp. # Make sure this matches current_version before From 32bf4b5d9eae057e24bdcedd45215f2a102c3acb Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Tue, 14 May 2024 10:45:10 -0600 Subject: [PATCH 126/161] Update externals to match cesm2_3_alpha17f --- Externals.cfg | 25 +++++++++---------------- cime_config/testdefs/testlist_cam.xml | 2 +- 2 files changed, 10 insertions(+), 17 deletions(-) diff --git a/Externals.cfg b/Externals.cfg index ffe2c4b012..f967878539 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -1,5 +1,5 @@ [ccs_config] -tag = ccs_config_cesm0.0.85 +tag = ccs_config_cesm0.0.106 protocol = git repo_url = https://github.com/ESMCI/ccs_config_cesm local_path = ccs_config @@ -13,7 +13,7 @@ local_path = components/cice5 required = True [cice6] -tag = cesm_cice6_4_1_10 +tag = cesm_cice6_5_0_7 protocol = git repo_url = https://github.com/ESCOMP/CESM_CICE local_path = components/cice @@ -21,14 +21,14 @@ externals = Externals.cfg required = True [cmeps] -tag = cmeps0.14.59 +tag = cmeps0.14.60 protocol = git repo_url = https://github.com/ESCOMP/CMEPS.git local_path = components/cmeps required = True [cdeps] -tag = cdeps1.0.31 +tag = cdeps1.0.33 protocol = git repo_url = https://github.com/ESCOMP/CDEPS.git local_path = components/cdeps @@ -36,7 +36,7 @@ externals = Externals_CDEPS.cfg required = True [cpl7] -tag = cpl77.0.7 +tag = cpl77.0.8 protocol = git repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps local_path = components/cpl7 @@ -49,13 +49,6 @@ repo_url = https://github.com/ESCOMP/CESM_share local_path = share required = True -[mct] -tag = MCT_2.11.0 -protocol = git -repo_url = https://github.com/MCSclimate/MCT -local_path = libraries/mct -required = True - [parallelio] tag = pio2_6_2 protocol = git @@ -71,7 +64,7 @@ local_path = cime required = True [cism] -tag = cismwrap_2_1_96 +tag = cismwrap_2_1_100 protocol = git repo_url = https://github.com/ESCOMP/CISM-wrapper local_path = components/cism @@ -79,7 +72,7 @@ externals = Externals_CISM.cfg required = True [clm] -tag = ctsm5.1.dev145 +tag = ctsm5.2.005 protocol = git repo_url = https://github.com/ESCOMP/CTSM local_path = components/clm @@ -95,14 +88,14 @@ externals = Externals_FMS.cfg required = True [mosart] -tag = mosart1_0_48 +tag = mosart1_0_49 protocol = git repo_url = https://github.com/ESCOMP/MOSART local_path = components/mosart required = True [rtm] -tag = rtm1_0_78 +tag = rtm1_0_79 protocol = git repo_url = https://github.com/ESCOMP/RTM local_path = components/rtm diff --git a/cime_config/testdefs/testlist_cam.xml b/cime_config/testdefs/testlist_cam.xml index 05983cca7b..98813e6993 100644 --- a/cime_config/testdefs/testlist_cam.xml +++ b/cime_config/testdefs/testlist_cam.xml @@ -1444,7 +1444,7 @@ - + From c2a1d37eb9d7a79d8dffb54bb1a4fa3b471ef841 Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Tue, 14 May 2024 11:22:48 -0600 Subject: [PATCH 127/161] put back in MCT since makefile mods would be required --- Externals.cfg | 7 +++++++ cime_config/testdefs/testlist_cam.xml | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/Externals.cfg b/Externals.cfg index f967878539..ad6907e697 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -49,6 +49,13 @@ repo_url = https://github.com/ESCOMP/CESM_share local_path = share required = True +[mct] +tag = MCT_2.11.0 +protocol = git +repo_url = https://github.com/MCSclimate/MCT +local_path = libraries/mct +required = True + [parallelio] tag = pio2_6_2 protocol = git diff --git a/cime_config/testdefs/testlist_cam.xml b/cime_config/testdefs/testlist_cam.xml index 98813e6993..05983cca7b 100644 --- a/cime_config/testdefs/testlist_cam.xml +++ b/cime_config/testdefs/testlist_cam.xml @@ -1444,7 +1444,7 @@ - + From 94121295e57a1d6e713b8e3ba84261d9e9cfebe6 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Tue, 14 May 2024 13:48:01 -0600 Subject: [PATCH 128/161] update documentation --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index f8afb40b37..d1ef632f28 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec status: Display the status of submodules. update: Update submodules to the tag indicated in .gitmodules variable fxtag. test: Make sure that fxtags and submodule hashes are consistant, - make sure that official urls (as defined by fxurl) are set + make sure that official urls (as defined by fxDONOTUSEurl) are set make sure that fxtags are defined for all submodules Additional Options: See git fleximod --help for more details. @@ -34,8 +34,8 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec - AlwaysRequired: Always required (always checked out). - AlwaysOptional: Always optional (checked out with --optional flag). fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. - fxurl: This is the url used in the test subcommand to assure that protected branches do not point to forks - **NOTE** the fxurl variable is only used to identify the official project repository and should not be + fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks + **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be changed by users. Use the url variable to change to a fork if desired. ## Sparse Checkouts From 0c922db8c47af355779d96b5a3c1b3702d7a6353 Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Tue, 14 May 2024 15:49:34 -0600 Subject: [PATCH 129/161] Increase wall clock times --- cime_config/testdefs/testlist_cam.xml | 28 +++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/cime_config/testdefs/testlist_cam.xml b/cime_config/testdefs/testlist_cam.xml index 05983cca7b..575729a37c 100644 --- a/cime_config/testdefs/testlist_cam.xml +++ b/cime_config/testdefs/testlist_cam.xml @@ -73,7 +73,7 @@ - + @@ -83,7 +83,7 @@ - + @@ -93,7 +93,7 @@ - + @@ -1811,7 +1811,7 @@ - + @@ -1829,7 +1829,7 @@ - + @@ -2033,7 +2033,7 @@ - + @@ -2390,7 +2390,7 @@ - + @@ -2415,7 +2415,7 @@ - + @@ -2424,7 +2424,7 @@ - + @@ -2450,7 +2450,7 @@ - + @@ -2459,7 +2459,7 @@ - + @@ -2725,7 +2725,7 @@ - + @@ -2748,7 +2748,7 @@ - + @@ -2800,7 +2800,7 @@ - + From 4d797696cf47d5cab0eb18dba692cc746e35d45f Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Thu, 16 May 2024 13:08:22 -0600 Subject: [PATCH 130/161] revert times back for regression tests to original times --- cime_config/testdefs/testlist_cam.xml | 28 +++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/cime_config/testdefs/testlist_cam.xml b/cime_config/testdefs/testlist_cam.xml index 575729a37c..05983cca7b 100644 --- a/cime_config/testdefs/testlist_cam.xml +++ b/cime_config/testdefs/testlist_cam.xml @@ -73,7 +73,7 @@ - + @@ -83,7 +83,7 @@ - + @@ -93,7 +93,7 @@ - + @@ -1811,7 +1811,7 @@ - + @@ -1829,7 +1829,7 @@ - + @@ -2033,7 +2033,7 @@ - + @@ -2390,7 +2390,7 @@ - + @@ -2415,7 +2415,7 @@ - + @@ -2424,7 +2424,7 @@ - + @@ -2450,7 +2450,7 @@ - + @@ -2459,7 +2459,7 @@ - + @@ -2725,7 +2725,7 @@ - + @@ -2748,7 +2748,7 @@ - + @@ -2800,7 +2800,7 @@ - + From 6d958488d99059327aed1e362112cffbc26378e6 Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Thu, 16 May 2024 16:34:40 -0600 Subject: [PATCH 131/161] ChangeLog for cam6_3_161 --- doc/ChangeLog | 115 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 114 insertions(+), 1 deletion(-) diff --git a/doc/ChangeLog b/doc/ChangeLog index 9f4f0d348d..3a5c46527d 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,6 +1,120 @@ =============================================================== +Tag name: cam6_3_161 +Originator(s): cacraig +Date: May 16, 2024 +One-line Summary: Update to alpha17 externals +Github PR URL: https://github.com/ESCOMP/CAM/pull/1031 + +Purpose of changes (include the issue number and title text for each relevant GitHub issue): + - Update externals to match externals which will be used in cesm2_3_beta17: https://github.com/ESCOMP/CAM/issues/985 + - Bring in ccs_config0.0.99: https://github.com/ESCOMP/CAM/issues/1021 + - Unable to compile cam6_3_154 with nvhpc/24.3 on Derecho: https://github.com/ESCOMP/CAM/issues/1025 + +Describe any changes made to build system: N/A + +Describe any changes made to the namelist: N/A + +List any changes to the defaults for the boundary datasets: N/A + +Describe any substantial timing or memory changes: N/A + +Code reviewed by: nusbaume + +List all files eliminated: N/A + +List all files added and what they do: N/A + +List all existing files that have been modified, and describe the changes: +M Externals.cfg + - update externals to match cesm2_3_beta17 + +If there were any failures reported from running test_driver.sh on any test +platform, and checkin with these failures has been OK'd by the gatekeeper, +then copy the lines from the td.*.status files for the failed tests to the +appropriate machine below. All failed tests must be justified. + +NOTE - most tests have namelist differences due to mediator namelist changes + +derecho/intel/aux_cam: + ERP_Ln9.f09_f09_mg17.FCSD_HCO.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + - pre-existing failure + + ERP_Lh12.f19_f19_mg17.FW4madSD.derecho_intel.cam-outfrq3h (Overall: FAIL) details: + - Bug during CREATE_NEWCASE in CTSM code - will go away when CTSM external is updated post git-fleximod + + SMS_D_Ln9.T42_T42.FSCAM.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + - Bug in med.F90 - Will go away when CICE external is updated post git-fleximod + + ERP_Ln9.ne30_ne30_mg17.FCnudged.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + FAIL ERP_Ln9.ne30_ne30_mg17.FCnudged.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD failed to initialize + SMS_D_Ln9.ne16_ne16_mg17.FX2000.derecho_intel.cam-outfrq9s (Overall: PEND) details: + PEND SMS_D_Ln9.ne16_ne16_mg17.FX2000.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD + SMS_D_Ln9_P1280x1.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + FAIL SMS_D_Ln9_P1280x1.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD time=2 + SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCHIST.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + FAIL SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCHIST.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD time=1 + - Bugs reported to CTSM and will be fixed when CTSM external is updated post git-fleximod + + ERP_D_Ln9.ne30pg3_ne30pg3_mg17.F2000dev.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERP_D_Ln9.ne30pg3_ne30pg3_mg17.FLTHIST.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERP_D_Ln9.ne30pg3_ne30pg3_mg17.FLTHIST.derecho_intel.cam-outfrq9s_rrtmgp (Overall: DIFF) details: + ERP_Ld3.f09_f09_mg17.FWHIST.derecho_intel.cam-reduced_hist1d (Overall: DIFF) details: + ERP_Ln9.C96_C96_mg17.F2000climo.derecho_intel.cam-outfrq9s_mg3 (Overall: DIFF) details: + ERP_Ln9.f09_f09_mg17.F1850.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERP_Ln9.f09_f09_mg17.F2000climo.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERP_Ln9.f09_f09_mg17.F2000dev.derecho_intel.cam-outfrq9s_mg3 (Overall: DIFF) details: + ERP_Ln9.f09_f09_mg17.F2010climo.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERP_Ln9.f09_f09_mg17.FHIST_BDRD.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERP_Ln9.f19_f19_mg17.FWsc1850.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERP_Ln9.ne30pg3_ne30pg3_mg17.FW2000climo.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERS_Ld3.f10_f10_mg37.F1850.derecho_intel.cam-outfrq1d_14dec_ghg_cam_dev (Overall: DIFF) details: + ERS_Ln9.f09_f09_mg17.FX2000.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERS_Ln9.f19_f19_mg17.FSPCAMS.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERS_Ln9.f19_f19_mg17.FXSD.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + ERS_Ln9_P288x1.mpasa120_mpasa120.F2000climo.derecho_intel.cam-outfrq9s_mpasa120 (Overall: DIFF) details: + ERS_Ln9_P36x1.mpasa480_mpasa480.F2000climo.derecho_intel.cam-outfrq9s_mpasa480 (Overall: DIFF) details: + SMS_D_Ln9.f09_f09_mg17.FCts2nudged.derecho_intel.cam-outfrq9s_leapday (Overall: DIFF) details: + SMS_D_Ln9.f09_f09_mg17.FCvbsxHIST.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + SMS_D_Ln9.f09_f09_mg17.FSD.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + SMS_D_Ln9.f19_f19_mg17.FWma2000climo.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + SMS_D_Ln9.f19_f19_mg17.FWma2000climo.derecho_intel.cam-outfrq9s_waccm_ma_mam4 (Overall: DIFF) details: + SMS_D_Ln9.f19_f19_mg17.FXHIST.derecho_intel.cam-outfrq9s_amie (Overall: DIFF) details: + SMS_D_Ln9.ne30pg3_ne30pg3_mg17.FMTHIST.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + SMS_D_Ln9_P1280x1.ne30pg3_ne30pg3_mg17.FCLTHIST.derecho_intel.cam-outfrq9s (Overall: DIFF) details: + SMS_Ld1.f09_f09_mg17.FCHIST_GC.derecho_intel.cam-outfrq1d (Overall: DIFF) details: + SMS_Ld1.f09_f09_mg17.FW2000climo.derecho_intel.cam-outfrq1d (Overall: DIFF) details: + SMS_Ld1.f19_f19.F2000dev.derecho_intel.cam-outfrq1d (Overall: DIFF) details: + SMS_Ld1.ne30pg3_ne30pg3_mg17.FC2010climo.derecho_intel.cam-outfrq1d (Overall: DIFF) details: + SMS_Lh12.f09_f09_mg17.FCSD_HCO.derecho_intel.cam-outfrq3h (Overall: DIFF) details: + SMS_Lm13.f10_f10_mg37.F2000climo.derecho_intel.cam-outfrq1m (Overall: DIFF) details: + SMS_Ln9.f09_f09_mg17.F2010climo.derecho_intel.cam-nudging (Overall: DIFF) details: + SMS_Ln9.f09_f09_mg17.FW1850.derecho_intel.cam-reduced_hist3s (Overall: DIFF) details: + SMS_Ln9.f19_f19.F2000climo.derecho_intel.cam-silhs (Overall: DIFF) details: + SMS_Ln9.f19_f19_mg17.FHIST.derecho_intel.cam-outfrq9s_nochem (Overall: DIFF) details: + SMS_Ln9.ne30pg3_ne30pg3_mg17.FW2000climo.derecho_intel.cam-outfrq9s_rrtmgp (Overall: DIFF) details: + - Answer changes due to updated externals + +izumi/nag/aux_cam: all B4B, except: + DAE.f45_f45_mg37.FHS94.izumi_nag.cam-dae (Overall: FAIL) details: + - pre-existing failure + +izumi/gnu/aux_cam: all BFB except: + SMS_P48x1_D_Ln9.f19_f19_mg17.FW4madSD.izumi_gnu.cam-outfrq9s (Overall: FAIL) details: + - Bug during CREATE_NEWCASE in CTSM code - will go away when CTSM external is updated post git-fleximod + + SMS_D_Ln9.f10_f10_mg37.2000_CAM%DEV%GHGMAM4_CLM50%SP_CICE%PRES_DOCN%DOM_MOSART_SGLC_SWAV_SESP.izumi_gnu.cam-outfrq9s (Overall: DIFF) details: + - answer changes due to updated externals + + +CAM tag used for the baseline comparison tests if different than previous +tag: cam6_3_159 as cam6_3_160 did not run regression tests + + +=============================================================== +=============================================================== + Tag name: cam6_3_160 Originator(s): cacraig, jedwards Date: April 29, 2024 @@ -461,7 +575,6 @@ derecho/intel/aux_cam: SMS_Ld1.f09_f09_mg17.FW2000climo.derecho_intel.cam-outfrq1d (Overall: DIFF) details: SMS_Ld1.f19_f19.F2000dev.derecho_intel.cam-outfrq1d (Overall: DIFF) details: SMS_Ld1.ne30pg3_ne30pg3_mg17.FC2010climo.derecho_intel.cam-outfrq1d (Overall: DIFF) details: - SMS_Lh12.f09_f09_mg17.FCSD_HCO.derecho_intel.cam-outfrq3h (Overall: DIFF) details: SMS_Lm13.f10_f10_mg37.F2000climo.derecho_intel.cam-outfrq1m (Overall: DIFF) details: SMS_Ln9.f09_f09_mg17.F2010climo.derecho_intel.cam-nudging (Overall: DIFF) details: SMS_Ln9.f09_f09_mg17.FW1850.derecho_intel.cam-reduced_hist3s (Overall: DIFF) details: From 8593b17b3685cd33237f41b27bb30e7658c16655 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 29 May 2024 07:45:25 -0600 Subject: [PATCH 132/161] add submodules --- .gitignore | 24 ---- .gitmodules | 199 ++++++++++++++++++++++++++++ ccs_config | 1 + chem_proc | 1 + cime | 1 + components/cdeps | 1 + components/cice | 1 + components/cism | 1 + components/clm | 1 + components/cmeps | 1 + components/mizuRoute | 1 + components/mosart | 1 + components/rtm | 1 + libraries/FMS | 1 + libraries/mct | 1 + libraries/parallelio | 1 + share | 1 + src/atmos_phys | 1 + src/chemistry/geoschem/geoschem_src | 1 + src/dynamics/fv3 | 1 + src/dynamics/mpas/dycore | 1 + src/hemco | 1 + src/physics/ali_arms | 1 + src/physics/carma/base | 1 + src/physics/clubb | 1 + src/physics/cosp2/src | 1 + src/physics/pumas | 1 + src/physics/pumas-frozen | 1 + src/physics/rrtmgp/data | 1 + src/physics/rrtmgp/ext | 1 + 30 files changed, 227 insertions(+), 24 deletions(-) create mode 100644 .gitmodules create mode 160000 ccs_config create mode 160000 chem_proc create mode 160000 cime create mode 160000 components/cdeps create mode 160000 components/cice create mode 160000 components/cism create mode 160000 components/clm create mode 160000 components/cmeps create mode 160000 components/mizuRoute create mode 160000 components/mosart create mode 160000 components/rtm create mode 160000 libraries/FMS create mode 160000 libraries/mct create mode 160000 libraries/parallelio create mode 160000 share create mode 160000 src/atmos_phys create mode 160000 src/chemistry/geoschem/geoschem_src create mode 160000 src/dynamics/fv3 create mode 160000 src/dynamics/mpas/dycore create mode 160000 src/hemco create mode 160000 src/physics/ali_arms create mode 160000 src/physics/carma/base create mode 160000 src/physics/clubb create mode 160000 src/physics/cosp2/src create mode 160000 src/physics/pumas create mode 160000 src/physics/pumas-frozen create mode 160000 src/physics/rrtmgp/data create mode 160000 src/physics/rrtmgp/ext diff --git a/.gitignore b/.gitignore index e24b6fa55e..ca3a7df6c0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,27 +1,3 @@ -# Ignore externals -ccs_config -chem_proc -cime -components -manage_externals.log -src/physics/ali_arms/ -src/physics/carma/base -src/physics/clubb -src/physics/cosp2/src -src/physics/silhs -src/chemistry/geoschem/geoschem_src -src/physics/pumas -src/physics/pumas-frozen -src/physics/rrtmgp/data -src/physics/rrtmgp/ext -src/dynamics/fv3 -libraries/FMS -libraries/mct -libraries/parallelio -src/atmos_phys -src/dynamics/mpas/dycore -share -src/hemco # Ignore compiled python buildnmlc buildcppc diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000000..4b2a94beb3 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,199 @@ +[submodule "chem_proc"] + path = chem_proc + url = https://github.com/ESCOMP/CHEM_PREPROCESSOR.git + fxrequired = AlwaysRequired + fxtag = chem_proc5_0_05 + fxDONOTUSEurl = https://github.com/ESCOMP/CHEM_PREPROCESSOR.git + +[submodule "carma"] + path = src/physics/carma/base + url = https://github.com/ESCOMP/CARMA_base.git + fxrequired = AlwaysRequired + fxtag = carma4_01 + fxDONOTUSEurl = https://github.com/ESCOMP/CARMA_base.git + +[submodule "pumas"] + path = src/physics/pumas + url = https://github.com/ESCOMP/PUMAS + fxrequired = AlwaysRequired + fxtag = pumas_cam-release_v1.36 + fxDONOTUSEurl = https://github.com/ESCOMP/PUMAS + +[submodule "pumas-frozen"] + path = src/physics/pumas-frozen + url = https://github.com/ESCOMP/PUMAS + fxrequired = AlwaysRequired + fxtag = pumas_cam-release_v1.17_rename + fxDONOTUSEurl = https://github.com/ESCOMP/PUMAS + +[submodule "ali_arms"] + path = src/physics/ali_arms + url = https://github.com/ESCOMP/ALI-ARMS + fxrequired = AlwaysRequired + fxtag = ALI_ARMS_v1.0.1 + fxDONOTUSEurl = https://github.com/ESCOMP/ALI-ARMS + +[submodule "atmos_phys"] + path = src/atmos_phys + url = https://github.com/ESCOMP/atmospheric_physics + fxtag = atmos_phys0_02_006 + fxrequired = AlwaysRequired + fxDONOTUSEurl = https://github.com/ESCOMP/atmospheric_physics + +[submodule "fv3"] + path = src/dynamics/fv3 + url = https://github.com/ESCOMP/CAM_FV3_interface.git + fxrequired = AlwaysRequired + fxtag = fv3int_022824 + fxDONOTUSEurl = https://github.com/ESCOMP/CAM_FV3_interface.git + +[submodule "geoschem"] + path = src/chemistry/geoschem/geoschem_src + url = https://github.com/geoschem/geos-chem.git + fxrequired = AlwaysRequired + fxtag = 14.1.2 + fxDONOTUSEurl = https://github.com/geoschem/geos-chem.git + +[submodule "hemco"] + path = src/hemco + url = https://github.com/ESCOMP/HEMCO_CESM.git + fxtag = hemco-cesm1_2_1_hemco3_6_3_cesm + fxrequired = AlwaysRequired + fxDONOTUSEurl = https://github.com/ESCOMP/HEMCO_CESM.git + +[submodule "rte-rrtmgp"] + path = src/physics/rrtmgp/ext + url = https://github.com/earth-system-radiation/rte-rrtmgp.git + fxrequired = AlwaysRequired + fxtag = v1.7 + fxDONOTUSEurl = https://github.com/earth-system-radiation/rte-rrtmgp.git + +[submodule "rrtmgp-data"] + path = src/physics/rrtmgp/data + url = https://github.com/earth-system-radiation/rrtmgp-data.git + fxrequired = AlwaysRequired + fxtag = v1.8 + fxDONOTUSEurl = https://github.com/earth-system-radiation/rrtmgp-data.git + +[submodule "mpas"] + path = src/dynamics/mpas/dycore + url = https://github.com/MPAS-Dev/MPAS-Model.git + fxrequired = AlwaysRequired + fxsparse = ../.mpas_sparse_checkout + fxtag = b8c33daa + fxDONOTUSEurl = https://github.com/MPAS-Dev/MPAS-Model.git + +[submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxrequired = AlwaysRequired + fxsparse = ../.cosp_sparse_checkout + fxtag = v2.1.4cesm + fxDONOTUSEurl = https://github.com/CFMIP/COSPv2.0 + +[submodule "clubb"] + path = src/physics/clubb + url = https://github.com/larson-group/clubb_release + fxrequired = AlwaysRequired + fxsparse = ../.clubb_sparse_checkout + fxtag = clubb_4ncar_20231115_5406350 + fxDONOTUSEurl = https://github.com/larson-group/clubb_release + +[submodule "cism"] +path = components/cism +url = https://github.com/ESCOMP/CISM-wrapper +fxtag = cismwrap_2_2_001 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CISM-wrapper + +[submodule "rtm"] +path = components/rtm +url = https://github.com/ESCOMP/RTM +fxtag = rtm1_0_79 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/RTM + +[submodule "mosart"] +path = components/mosart +url = https://github.com/ESCOMP/MOSART +fxtag = mosart1_0_49 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/MOSART + +[submodule "mizuRoute"] +path = components/mizuRoute +url = https://github.com/ESCOMP/mizuRoute +fxtag = cesm-coupling.n02_v2.1.2 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/mizuRoute + +[submodule "ccs_config"] +path = ccs_config +url = https://github.com/ESMCI/ccs_config_cesm.git +fxtag = ccs_config_cesm0.0.106 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESMCI/ccs_config_cesm.git + +[submodule "cime"] +path = cime +url = https://github.com/ESMCI/cime +fxtag = cime6.0.246 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESMCI/cime + +[submodule "cmeps"] +path = components/cmeps +url = https://github.com/ESCOMP/CMEPS.git +fxtag = cmeps0.14.63 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CMEPS.git + +[submodule "cdeps"] +path = components/cdeps +url = https://github.com/ESCOMP/CDEPS.git +fxtag = cdeps1.0.34 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CDEPS.git + +[submodule "share"] +path = share +url = https://github.com/ESCOMP/CESM_share +fxtag = share1.0.19 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CESM_share + +[submodule "mct"] +path = libraries/mct +url = https://github.com/MCSclimate/MCT +fxtag = MCT_2.11.0 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/MCSclimate/MCT + +[submodule "parallelio"] +path = libraries/parallelio +url = https://github.com/NCAR/ParallelIO +fxtag = pio2_6_2 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/NCAR/ParallelIO + +[submodule "cice"] +path = components/cice +url = https://github.com/ESCOMP/CESM_CICE +fxtag = cesm_cice6_5_0_9 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/NCAR/ParallelIO + +[submodule "clm"] +path = components/clm +url = https://github.com/ESCOMP/CTSM +fxtag = ctsm5.2.007 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/CTSM + +[submodule "fms"] +path = libraries/FMS +url = https://github.com/ESCOMP/FMS_interface +fxtag = fi_240516 +fxrequired = ToplevelRequired +fxDONOTUSEurl = https://github.com/ESCOMP/FMS_interface + diff --git a/ccs_config b/ccs_config new file mode 160000 index 0000000000..f90e10502c --- /dev/null +++ b/ccs_config @@ -0,0 +1 @@ +Subproject commit f90e10502c7246c2b45e373b7dd5e0da6cba0828 diff --git a/chem_proc b/chem_proc new file mode 160000 index 0000000000..944f506033 --- /dev/null +++ b/chem_proc @@ -0,0 +1 @@ +Subproject commit 944f506033d88a5dd1ee4556a932d2c90023cbfc diff --git a/cime b/cime new file mode 160000 index 0000000000..422ddaa770 --- /dev/null +++ b/cime @@ -0,0 +1 @@ +Subproject commit 422ddaa770a3cea6e83a60c9700ebce77acaceed diff --git a/components/cdeps b/components/cdeps new file mode 160000 index 0000000000..7a522c828c --- /dev/null +++ b/components/cdeps @@ -0,0 +1 @@ +Subproject commit 7a522c828c32dc35777992653f281ec525509c4a diff --git a/components/cice b/components/cice new file mode 160000 index 0000000000..bdf6ea04d6 --- /dev/null +++ b/components/cice @@ -0,0 +1 @@ +Subproject commit bdf6ea04d6133434fcaa4de5336de106f01290d0 diff --git a/components/cism b/components/cism new file mode 160000 index 0000000000..c05dd5c4fc --- /dev/null +++ b/components/cism @@ -0,0 +1 @@ +Subproject commit c05dd5c4fc85327e76523aaea9cfe1e388748928 diff --git a/components/clm b/components/clm new file mode 160000 index 0000000000..6aebaad42f --- /dev/null +++ b/components/clm @@ -0,0 +1 @@ +Subproject commit 6aebaad42f1db6d8be2c84205adce60a884cc254 diff --git a/components/cmeps b/components/cmeps new file mode 160000 index 0000000000..6384ff4e4a --- /dev/null +++ b/components/cmeps @@ -0,0 +1 @@ +Subproject commit 6384ff4e4a6bc82a678f9419a43ffbd5d53ac209 diff --git a/components/mizuRoute b/components/mizuRoute new file mode 160000 index 0000000000..0a62ee6185 --- /dev/null +++ b/components/mizuRoute @@ -0,0 +1 @@ +Subproject commit 0a62ee6185bdc9b99cd88ed5f15ea850602c43a2 diff --git a/components/mosart b/components/mosart new file mode 160000 index 0000000000..1c22a8c547 --- /dev/null +++ b/components/mosart @@ -0,0 +1 @@ +Subproject commit 1c22a8c5478b714cf5b7c54b3e5cf7bad09fd9b2 diff --git a/components/rtm b/components/rtm new file mode 160000 index 0000000000..88503adbc2 --- /dev/null +++ b/components/rtm @@ -0,0 +1 @@ +Subproject commit 88503adbc275fb2ccbb6b598e460deaeb140e515 diff --git a/libraries/FMS b/libraries/FMS new file mode 160000 index 0000000000..270433531d --- /dev/null +++ b/libraries/FMS @@ -0,0 +1 @@ +Subproject commit 270433531d33c64da7944d80564fe39a84917d26 diff --git a/libraries/mct b/libraries/mct new file mode 160000 index 0000000000..e36024c5dd --- /dev/null +++ b/libraries/mct @@ -0,0 +1 @@ +Subproject commit e36024c5ddf482625ae6bd9474eff7d8f393f87c diff --git a/libraries/parallelio b/libraries/parallelio new file mode 160000 index 0000000000..6539ef05ae --- /dev/null +++ b/libraries/parallelio @@ -0,0 +1 @@ +Subproject commit 6539ef05ae7584ec570a56fdab9f7dfb336c2b80 diff --git a/share b/share new file mode 160000 index 0000000000..4b9dc4871a --- /dev/null +++ b/share @@ -0,0 +1 @@ +Subproject commit 4b9dc4871a259f00f35bb47708d876cb7dcdf75c diff --git a/src/atmos_phys b/src/atmos_phys new file mode 160000 index 0000000000..4944547f04 --- /dev/null +++ b/src/atmos_phys @@ -0,0 +1 @@ +Subproject commit 4944547f04b1457d78bf7d3c0becddcfe0deabb9 diff --git a/src/chemistry/geoschem/geoschem_src b/src/chemistry/geoschem/geoschem_src new file mode 160000 index 0000000000..20e2402baf --- /dev/null +++ b/src/chemistry/geoschem/geoschem_src @@ -0,0 +1 @@ +Subproject commit 20e2402baf56c682cc04af74adb139efdb6ca000 diff --git a/src/dynamics/fv3 b/src/dynamics/fv3 new file mode 160000 index 0000000000..38b889fcff --- /dev/null +++ b/src/dynamics/fv3 @@ -0,0 +1 @@ +Subproject commit 38b889fcfff1e316ef5b4d4d02ecc47c801b8669 diff --git a/src/dynamics/mpas/dycore b/src/dynamics/mpas/dycore new file mode 160000 index 0000000000..f084b36f8a --- /dev/null +++ b/src/dynamics/mpas/dycore @@ -0,0 +1 @@ +Subproject commit f084b36f8ac82eb1e76c426d3572339c36523c77 diff --git a/src/hemco b/src/hemco new file mode 160000 index 0000000000..3bd9df0970 --- /dev/null +++ b/src/hemco @@ -0,0 +1 @@ +Subproject commit 3bd9df097069ff0158a1d104483c445b69f1426d diff --git a/src/physics/ali_arms b/src/physics/ali_arms new file mode 160000 index 0000000000..825e7f20e2 --- /dev/null +++ b/src/physics/ali_arms @@ -0,0 +1 @@ +Subproject commit 825e7f20e2dd368b95b1e3cb2562ab571318bb4d diff --git a/src/physics/carma/base b/src/physics/carma/base new file mode 160000 index 0000000000..2123b52f97 --- /dev/null +++ b/src/physics/carma/base @@ -0,0 +1 @@ +Subproject commit 2123b52f97daaeb546753cbbef56c6f1b70ba608 diff --git a/src/physics/clubb b/src/physics/clubb new file mode 160000 index 0000000000..a7eaa45389 --- /dev/null +++ b/src/physics/clubb @@ -0,0 +1 @@ +Subproject commit a7eaa4538926587250da0bd37653f5cf70aad174 diff --git a/src/physics/cosp2/src b/src/physics/cosp2/src new file mode 160000 index 0000000000..241f1b7f4f --- /dev/null +++ b/src/physics/cosp2/src @@ -0,0 +1 @@ +Subproject commit 241f1b7f4f64dbabe8d1c042ce27466a0b6972ee diff --git a/src/physics/pumas b/src/physics/pumas new file mode 160000 index 0000000000..d2683f278f --- /dev/null +++ b/src/physics/pumas @@ -0,0 +1 @@ +Subproject commit d2683f278fd6de65c326354673124ad3cf34f047 diff --git a/src/physics/pumas-frozen b/src/physics/pumas-frozen new file mode 160000 index 0000000000..d2683f278f --- /dev/null +++ b/src/physics/pumas-frozen @@ -0,0 +1 @@ +Subproject commit d2683f278fd6de65c326354673124ad3cf34f047 diff --git a/src/physics/rrtmgp/data b/src/physics/rrtmgp/data new file mode 160000 index 0000000000..aafa333a60 --- /dev/null +++ b/src/physics/rrtmgp/data @@ -0,0 +1 @@ +Subproject commit aafa333a60c06fca2fbf219fbd17e7f432b43e3f diff --git a/src/physics/rrtmgp/ext b/src/physics/rrtmgp/ext new file mode 160000 index 0000000000..41c5fcd950 --- /dev/null +++ b/src/physics/rrtmgp/ext @@ -0,0 +1 @@ +Subproject commit 41c5fcd950fed09b8afe186dede266824eca7fd3 From aff830f7dfec1956a90b594460eb59ae026d5534 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 29 May 2024 07:49:30 -0600 Subject: [PATCH 133/161] update externals, waiting for clm tag --- ccs_config | 2 +- chem_proc | 2 +- components/cdeps | 2 +- components/mizuRoute | 2 +- components/mosart | 2 +- libraries/mct | 2 +- libraries/parallelio | 2 +- src/chemistry/geoschem/geoschem_src | 2 +- src/dynamics/mpas/dycore | 2 +- src/hemco | 2 +- src/physics/carma/base | 2 +- src/physics/clubb | 2 +- src/physics/cosp2/src | 2 +- src/physics/pumas | 2 +- src/physics/pumas-frozen | 2 +- src/physics/rrtmgp/data | 2 +- src/physics/rrtmgp/ext | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/ccs_config b/ccs_config index f90e10502c..2ff978f92a 160000 --- a/ccs_config +++ b/ccs_config @@ -1 +1 @@ -Subproject commit f90e10502c7246c2b45e373b7dd5e0da6cba0828 +Subproject commit 2ff978f92a5ac9a6ab243e5c14d06a7e2d2f5799 diff --git a/chem_proc b/chem_proc index 944f506033..ed4ba1b27e 160000 --- a/chem_proc +++ b/chem_proc @@ -1 +1 @@ -Subproject commit 944f506033d88a5dd1ee4556a932d2c90023cbfc +Subproject commit ed4ba1b27e40c8d5af6657ec49a3e3cc8e6f8b5f diff --git a/components/cdeps b/components/cdeps index 7a522c828c..7476950699 160000 --- a/components/cdeps +++ b/components/cdeps @@ -1 +1 @@ -Subproject commit 7a522c828c32dc35777992653f281ec525509c4a +Subproject commit 7476950699909813d1938a34bd8d71bf5bfbf1e9 diff --git a/components/mizuRoute b/components/mizuRoute index 0a62ee6185..81c720c7ee 160000 --- a/components/mizuRoute +++ b/components/mizuRoute @@ -1 +1 @@ -Subproject commit 0a62ee6185bdc9b99cd88ed5f15ea850602c43a2 +Subproject commit 81c720c7ee51f9c69f2934f696078c42f4493565 diff --git a/components/mosart b/components/mosart index 1c22a8c547..8c682b1b7f 160000 --- a/components/mosart +++ b/components/mosart @@ -1 +1 @@ -Subproject commit 1c22a8c5478b714cf5b7c54b3e5cf7bad09fd9b2 +Subproject commit 8c682b1b7f15d146816de302e0d359da3e957056 diff --git a/libraries/mct b/libraries/mct index e36024c5dd..82b0071e69 160000 --- a/libraries/mct +++ b/libraries/mct @@ -1 +1 @@ -Subproject commit e36024c5ddf482625ae6bd9474eff7d8f393f87c +Subproject commit 82b0071e69d14330b75d23b0bc68543ebea9aadc diff --git a/libraries/parallelio b/libraries/parallelio index 6539ef05ae..f52ade0756 160000 --- a/libraries/parallelio +++ b/libraries/parallelio @@ -1 +1 @@ -Subproject commit 6539ef05ae7584ec570a56fdab9f7dfb336c2b80 +Subproject commit f52ade075619b32fa141993b5665b0fe099befc2 diff --git a/src/chemistry/geoschem/geoschem_src b/src/chemistry/geoschem/geoschem_src index 20e2402baf..28345ee76e 160000 --- a/src/chemistry/geoschem/geoschem_src +++ b/src/chemistry/geoschem/geoschem_src @@ -1 +1 @@ -Subproject commit 20e2402baf56c682cc04af74adb139efdb6ca000 +Subproject commit 28345ee76e5631d6d14869a36dc73e9dd6e0ce1e diff --git a/src/dynamics/mpas/dycore b/src/dynamics/mpas/dycore index f084b36f8a..b8c33daa4b 160000 --- a/src/dynamics/mpas/dycore +++ b/src/dynamics/mpas/dycore @@ -1 +1 @@ -Subproject commit f084b36f8ac82eb1e76c426d3572339c36523c77 +Subproject commit b8c33daa4b124f116b0ba1b1386968c4c15b42e4 diff --git a/src/hemco b/src/hemco index 3bd9df0970..83127485c1 160000 --- a/src/hemco +++ b/src/hemco @@ -1 +1 @@ -Subproject commit 3bd9df097069ff0158a1d104483c445b69f1426d +Subproject commit 83127485c149846ba79d5472ae131acd7bda0544 diff --git a/src/physics/carma/base b/src/physics/carma/base index 2123b52f97..bf165cd84e 160000 --- a/src/physics/carma/base +++ b/src/physics/carma/base @@ -1 +1 @@ -Subproject commit 2123b52f97daaeb546753cbbef56c6f1b70ba608 +Subproject commit bf165cd84ef94087d9a5669a5ad47838ab24c0ef diff --git a/src/physics/clubb b/src/physics/clubb index a7eaa45389..50cee042e5 160000 --- a/src/physics/clubb +++ b/src/physics/clubb @@ -1 +1 @@ -Subproject commit a7eaa4538926587250da0bd37653f5cf70aad174 +Subproject commit 50cee042e588fefd3fe58c2a1d638affec8c0389 diff --git a/src/physics/cosp2/src b/src/physics/cosp2/src index 241f1b7f4f..34d8eef3d2 160000 --- a/src/physics/cosp2/src +++ b/src/physics/cosp2/src @@ -1 +1 @@ -Subproject commit 241f1b7f4f64dbabe8d1c042ce27466a0b6972ee +Subproject commit 34d8eef3d231a87c0f73e565f6b5d548876b294a diff --git a/src/physics/pumas b/src/physics/pumas index d2683f278f..84f27d8042 160000 --- a/src/physics/pumas +++ b/src/physics/pumas @@ -1 +1 @@ -Subproject commit d2683f278fd6de65c326354673124ad3cf34f047 +Subproject commit 84f27d804207e79e344e8deec98b471207f9b1f0 diff --git a/src/physics/pumas-frozen b/src/physics/pumas-frozen index d2683f278f..be3cad3a12 160000 --- a/src/physics/pumas-frozen +++ b/src/physics/pumas-frozen @@ -1 +1 @@ -Subproject commit d2683f278fd6de65c326354673124ad3cf34f047 +Subproject commit be3cad3a12d25918f5016b509b15057f84aab608 diff --git a/src/physics/rrtmgp/data b/src/physics/rrtmgp/data index aafa333a60..df02975ab9 160000 --- a/src/physics/rrtmgp/data +++ b/src/physics/rrtmgp/data @@ -1 +1 @@ -Subproject commit aafa333a60c06fca2fbf219fbd17e7f432b43e3f +Subproject commit df02975ab93165b34a59f0d04b4ae6148fe5127c diff --git a/src/physics/rrtmgp/ext b/src/physics/rrtmgp/ext index 41c5fcd950..4d8c5df4c6 160000 --- a/src/physics/rrtmgp/ext +++ b/src/physics/rrtmgp/ext @@ -1 +1 @@ -Subproject commit 41c5fcd950fed09b8afe186dede266824eca7fd3 +Subproject commit 4d8c5df4c63434aaab854afd1b02f5986d41dfb3 From dbbb17ffd56271971f3f85ad7728b18c6d202c06 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 29 May 2024 07:51:15 -0600 Subject: [PATCH 134/161] remove manage externals --- Externals.cfg | 118 - Externals_CAM.cfg | 105 - manage_externals/.dir_locals.el | 12 - manage_externals/.github/ISSUE_TEMPLATE.md | 6 - .../.github/PULL_REQUEST_TEMPLATE.md | 17 - .../.github/workflows/bumpversion.yml | 19 - manage_externals/.github/workflows/tests.yml | 30 - manage_externals/.gitignore | 17 - manage_externals/.travis.yml | 18 - manage_externals/LICENSE.txt | 34 - manage_externals/README.md | 231 -- manage_externals/README_FIRST | 54 - manage_externals/checkout_externals | 36 - manage_externals/manic/__init__.py | 9 - manage_externals/manic/checkout.py | 446 ---- .../manic/externals_description.py | 830 -------- manage_externals/manic/externals_status.py | 164 -- manage_externals/manic/global_constants.py | 18 - manage_externals/manic/repository.py | 98 - manage_externals/manic/repository_factory.py | 30 - manage_externals/manic/repository_git.py | 849 -------- manage_externals/manic/repository_svn.py | 288 --- manage_externals/manic/sourcetree.py | 425 ---- manage_externals/manic/utils.py | 330 --- manage_externals/test/.coveragerc | 7 - manage_externals/test/.gitignore | 7 - manage_externals/test/.pylint.rc | 426 ---- manage_externals/test/Makefile | 124 -- manage_externals/test/README.md | 53 - manage_externals/test/doc/.gitignore | 2 - manage_externals/test/doc/Makefile | 20 - manage_externals/test/doc/conf.py | 172 -- manage_externals/test/doc/develop.rst | 202 -- manage_externals/test/doc/index.rst | 22 - manage_externals/test/doc/testing.rst | 123 -- manage_externals/test/repos/README.md | 33 - .../test/repos/container.git/HEAD | 1 - .../test/repos/container.git/config | 6 - .../test/repos/container.git/description | 1 - .../test/repos/container.git/info/exclude | 6 - .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../71/5b8f3e4afe1802a178e1d603af404ba45d59de | Bin 136 -> 0 bytes .../b0/f87705e2b9601cb831878f3d51efa78b910d7b | Bin 89 -> 0 bytes .../f9/e08370a737e941de6f6492e3f427c2ef4c1a03 | Bin 81 -> 0 bytes .../repos/container.git/refs/heads/master | 1 - manage_externals/test/repos/error/readme.txt | 3 - .../test/repos/mixed-cont-ext.git/HEAD | 1 - .../test/repos/mixed-cont-ext.git/config | 6 - .../test/repos/mixed-cont-ext.git/description | 1 - .../repos/mixed-cont-ext.git/info/exclude | 6 - .../00/437ac2000d5f06fb8a572a01a5bbdae98b17cb | Bin 172 -> 0 bytes .../01/97458f2dbe5fcd6bc44fa46983be0a30282379 | Bin 171 -> 0 bytes .../06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 | Bin 136 -> 0 bytes .../14/368b701616a8c53820b610414a4b9a07540cf6 | 1 - .../15/2b57e1cf23721cd17ff681cb9276e3fb9fc091 | 2 - .../1f/01fa46c17b1f38b37e6259f6e9d041bda3144f | Bin 167 -> 0 bytes .../37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 | Bin 89 -> 0 bytes .../38/9a2b876b8965d3c91a3db8d28a483eaf019d5c | Bin 130 -> 0 bytes .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 | Bin 129 -> 0 bytes .../6f/c379457ecb4e576a13c7610ae1fa73f845ee6a | 1 - .../93/a159deb9175bfeb2820a0006ddd92d78131332 | Bin 169 -> 0 bytes .../95/80ecc12f16334ce44e42287d5d46f927bb7b75 | 1 - .../a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd | Bin 130 -> 0 bytes .../e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 | Bin 130 -> 0 bytes .../fd/15a5ad5204356229c60a831d2a8120a43ac901 | 2 - .../mixed-cont-ext.git/refs/heads/master | 1 - .../mixed-cont-ext.git/refs/heads/new-feature | 1 - .../test/repos/simple-ext-fork.git/HEAD | 1 - .../test/repos/simple-ext-fork.git/config | 8 - .../repos/simple-ext-fork.git/description | 1 - .../repos/simple-ext-fork.git/info/exclude | 6 - .../00/fd13e76189f9134b0506b4b8ed3172723b467f | Bin 89 -> 0 bytes .../0b/15e8af3d4615b42314216efeae3fff184046a8 | Bin 89 -> 0 bytes .../0b/67df4e7e8e6e1c6e401542738b352d18744677 | Bin 167 -> 0 bytes .../11/a76e3d9a67313dec7ce1230852ab5c86352c5c | 2 - .../16/5506a7408a482f50493434e13fffeb44af893f | Bin 89 -> 0 bytes .../24/4386e788c9bc608613e127a329c742450a60e4 | Bin 164 -> 0 bytes .../32/7e97d86e941047d809dba58f2804740c6c30cf | Bin 89 -> 0 bytes .../36/418b4e5665956a90725c9a1b5a8e551c5f3d48 | Bin 159 -> 0 bytes .../3d/7099c35404ae6c8640ce263b38bef06e98cc26 | 2 - .../3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b | 2 - .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../4d/837135915ed93eed6fff6b439f284ce317296f | Bin 89 -> 0 bytes .../56/175e017ad38bf3d33d74b6bd7c74624b28466a | Bin 89 -> 0 bytes .../5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae | Bin 93 -> 0 bytes .../67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 | Bin 165 -> 0 bytes .../7b/0bd630ac13865735a1dff3437a137d8ab50663 | Bin 119 -> 0 bytes .../88/cf20868e0cc445f5642a480ed034c71e0d7e9f | 2 - .../8d/2b3b35126224c975d23f109aa1e3cbac452989 | 2 - .../9b/75494003deca69527bb64bcaa352e801611dd2 | Bin 138 -> 0 bytes .../a2/2a5da9119328ea6d693f88861457c07e14ac04 | 1 - .../a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 | 3 - .../b9/3737be3ea6b19f6255983748a0a0f4d622f936 | Bin 89 -> 0 bytes .../c5/32bc8fde96fa63103a52057f0baffcc9f00c6b | 1 - .../c5/b315915742133dbdfbeed0753e481b55c1d364 | 1 - .../f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca | 1 - .../repos/simple-ext-fork.git/packed-refs | 5 - .../simple-ext-fork.git/refs/heads/feature2 | 1 - .../refs/tags/abandoned-feature | 1 - .../refs/tags/forked-feature-v1 | 1 - .../test/repos/simple-ext.git/HEAD | 1 - .../test/repos/simple-ext.git/config | 6 - .../test/repos/simple-ext.git/description | 1 - .../test/repos/simple-ext.git/info/exclude | 6 - .../00/fd13e76189f9134b0506b4b8ed3172723b467f | Bin 89 -> 0 bytes .../09/0e1034746b2c865f7b0280813dbf4061a700e8 | Bin 164 -> 0 bytes .../0b/15e8af3d4615b42314216efeae3fff184046a8 | Bin 89 -> 0 bytes .../11/a76e3d9a67313dec7ce1230852ab5c86352c5c | 2 - .../14/2711fdbbcb8034d7cad6bae6801887b12fe61d | Bin 83 -> 0 bytes .../31/dbcd6de441e671a467ef317146539b7ffabb11 | Bin 90 -> 0 bytes .../36/418b4e5665956a90725c9a1b5a8e551c5f3d48 | Bin 159 -> 0 bytes .../41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 | Bin 133 -> 0 bytes .../60/7ec299c17dd285c029edc41a0109e49d441380 | Bin 168 -> 0 bytes .../60/b1cc1a38d63a4bcaa1e767262bbe23dbf9f5f5 | 2 - .../63/a99393d1baff97ccef967af30380659867b139 | 1 - .../95/3256da5612fcd9263590a353bc18c6f224e74f | 1 - .../9b/75494003deca69527bb64bcaa352e801611dd2 | Bin 138 -> 0 bytes .../a2/2a5da9119328ea6d693f88861457c07e14ac04 | 1 - .../b7/692b6d391899680da7b9b6fd8af4c413f06fe7 | Bin 137 -> 0 bytes .../c5/b315915742133dbdfbeed0753e481b55c1d364 | 1 - .../d1/163870d19c3dee34fada3a76b785cfa2a8424b | Bin 130 -> 0 bytes .../d8/ed2f33179d751937f8fde2e33921e4827babf4 | Bin 60 -> 0 bytes .../df/312890f93ba4d2c694208599b665c4a08afeff | Bin 89 -> 0 bytes .../repos/simple-ext.git/refs/heads/feature2 | 1 - .../repos/simple-ext.git/refs/heads/feature3 | 1 - .../repos/simple-ext.git/refs/heads/master | 1 - .../test/repos/simple-ext.git/refs/tags/tag1 | 1 - .../test/repos/simple-ext.git/refs/tags/tag2 | 1 - manage_externals/test/requirements.txt | 5 - manage_externals/test/test_sys_checkout.py | 1896 ----------------- .../test/test_sys_repository_git.py | 238 --- .../test/test_unit_externals_description.py | 478 ----- .../test/test_unit_externals_status.py | 299 --- manage_externals/test/test_unit_repository.py | 208 -- .../test/test_unit_repository_git.py | 811 ------- .../test/test_unit_repository_svn.py | 501 ----- manage_externals/test/test_unit_utils.py | 350 --- 138 files changed, 10270 deletions(-) delete mode 100644 Externals.cfg delete mode 100644 Externals_CAM.cfg delete mode 100644 manage_externals/.dir_locals.el delete mode 100644 manage_externals/.github/ISSUE_TEMPLATE.md delete mode 100644 manage_externals/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 manage_externals/.github/workflows/bumpversion.yml delete mode 100644 manage_externals/.github/workflows/tests.yml delete mode 100644 manage_externals/.gitignore delete mode 100644 manage_externals/.travis.yml delete mode 100644 manage_externals/LICENSE.txt delete mode 100644 manage_externals/README.md delete mode 100644 manage_externals/README_FIRST delete mode 100755 manage_externals/checkout_externals delete mode 100644 manage_externals/manic/__init__.py delete mode 100755 manage_externals/manic/checkout.py delete mode 100644 manage_externals/manic/externals_description.py delete mode 100644 manage_externals/manic/externals_status.py delete mode 100644 manage_externals/manic/global_constants.py delete mode 100644 manage_externals/manic/repository.py delete mode 100644 manage_externals/manic/repository_factory.py delete mode 100644 manage_externals/manic/repository_git.py delete mode 100644 manage_externals/manic/repository_svn.py delete mode 100644 manage_externals/manic/sourcetree.py delete mode 100644 manage_externals/manic/utils.py delete mode 100644 manage_externals/test/.coveragerc delete mode 100644 manage_externals/test/.gitignore delete mode 100644 manage_externals/test/.pylint.rc delete mode 100644 manage_externals/test/Makefile delete mode 100644 manage_externals/test/README.md delete mode 100644 manage_externals/test/doc/.gitignore delete mode 100644 manage_externals/test/doc/Makefile delete mode 100644 manage_externals/test/doc/conf.py delete mode 100644 manage_externals/test/doc/develop.rst delete mode 100644 manage_externals/test/doc/index.rst delete mode 100644 manage_externals/test/doc/testing.rst delete mode 100644 manage_externals/test/repos/README.md delete mode 100644 manage_externals/test/repos/container.git/HEAD delete mode 100644 manage_externals/test/repos/container.git/config delete mode 100644 manage_externals/test/repos/container.git/description delete mode 100644 manage_externals/test/repos/container.git/info/exclude delete mode 100644 manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de delete mode 100644 manage_externals/test/repos/container.git/objects/b0/f87705e2b9601cb831878f3d51efa78b910d7b delete mode 100644 manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 delete mode 100644 manage_externals/test/repos/container.git/refs/heads/master delete mode 100644 manage_externals/test/repos/error/readme.txt delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/HEAD delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/config delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/description delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/info/exclude delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/00/437ac2000d5f06fb8a572a01a5bbdae98b17cb delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/14/368b701616a8c53820b610414a4b9a07540cf6 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/15/2b57e1cf23721cd17ff681cb9276e3fb9fc091 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/objects/fd/15a5ad5204356229c60a831d2a8120a43ac901 delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master delete mode 100644 manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/HEAD delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/config delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/description delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/info/exclude delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/0b/67df4e7e8e6e1c6e401542738b352d18744677 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/24/4386e788c9bc608613e127a329c742450a60e4 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/3d/7099c35404ae6c8640ce263b38bef06e98cc26 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/4d/837135915ed93eed6fff6b439f284ce317296f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/56/175e017ad38bf3d33d74b6bd7c74624b28466a delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/88/cf20868e0cc445f5642a480ed034c71e0d7e9f delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/8d/2b3b35126224c975d23f109aa1e3cbac452989 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/c5/32bc8fde96fa63103a52057f0baffcc9f00c6b delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/objects/f2/68d4e56d067da9bd1d85e55bdc40a8bd2b0bca delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/packed-refs delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature delete mode 100644 manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 delete mode 100644 manage_externals/test/repos/simple-ext.git/HEAD delete mode 100644 manage_externals/test/repos/simple-ext.git/config delete mode 100644 manage_externals/test/repos/simple-ext.git/description delete mode 100644 manage_externals/test/repos/simple-ext.git/info/exclude delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/09/0e1034746b2c865f7b0280813dbf4061a700e8 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/60/7ec299c17dd285c029edc41a0109e49d441380 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/60/b1cc1a38d63a4bcaa1e767262bbe23dbf9f5f5 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/63/a99393d1baff97ccef967af30380659867b139 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/95/3256da5612fcd9263590a353bc18c6f224e74f delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/9b/75494003deca69527bb64bcaa352e801611dd2 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/a2/2a5da9119328ea6d693f88861457c07e14ac04 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/c5/b315915742133dbdfbeed0753e481b55c1d364 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/d1/163870d19c3dee34fada3a76b785cfa2a8424b delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 delete mode 100644 manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/heads/feature2 delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/heads/feature3 delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/heads/master delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/tags/tag1 delete mode 100644 manage_externals/test/repos/simple-ext.git/refs/tags/tag2 delete mode 100644 manage_externals/test/requirements.txt delete mode 100644 manage_externals/test/test_sys_checkout.py delete mode 100644 manage_externals/test/test_sys_repository_git.py delete mode 100644 manage_externals/test/test_unit_externals_description.py delete mode 100644 manage_externals/test/test_unit_externals_status.py delete mode 100644 manage_externals/test/test_unit_repository.py delete mode 100644 manage_externals/test/test_unit_repository_git.py delete mode 100755 manage_externals/test/test_unit_repository_svn.py delete mode 100644 manage_externals/test/test_unit_utils.py diff --git a/Externals.cfg b/Externals.cfg deleted file mode 100644 index ad6907e697..0000000000 --- a/Externals.cfg +++ /dev/null @@ -1,118 +0,0 @@ -[ccs_config] -tag = ccs_config_cesm0.0.106 -protocol = git -repo_url = https://github.com/ESMCI/ccs_config_cesm -local_path = ccs_config -required = True - -[cice5] -tag = cice5_20220204 -protocol = git -repo_url = https://github.com/ESCOMP/CESM_CICE5 -local_path = components/cice5 -required = True - -[cice6] -tag = cesm_cice6_5_0_7 -protocol = git -repo_url = https://github.com/ESCOMP/CESM_CICE -local_path = components/cice -externals = Externals.cfg -required = True - -[cmeps] -tag = cmeps0.14.60 -protocol = git -repo_url = https://github.com/ESCOMP/CMEPS.git -local_path = components/cmeps -required = True - -[cdeps] -tag = cdeps1.0.33 -protocol = git -repo_url = https://github.com/ESCOMP/CDEPS.git -local_path = components/cdeps -externals = Externals_CDEPS.cfg -required = True - -[cpl7] -tag = cpl77.0.8 -protocol = git -repo_url = https://github.com/ESCOMP/CESM_CPL7andDataComps -local_path = components/cpl7 -required = True - -[share] -tag = share1.0.18 -protocol = git -repo_url = https://github.com/ESCOMP/CESM_share -local_path = share -required = True - -[mct] -tag = MCT_2.11.0 -protocol = git -repo_url = https://github.com/MCSclimate/MCT -local_path = libraries/mct -required = True - -[parallelio] -tag = pio2_6_2 -protocol = git -repo_url = https://github.com/NCAR/ParallelIO -local_path = libraries/parallelio -required = True - -[cime] -tag = cime6.0.236_httpsbranch01 -protocol = git -repo_url = https://github.com/ESMCI/cime -local_path = cime -required = True - -[cism] -tag = cismwrap_2_1_100 -protocol = git -repo_url = https://github.com/ESCOMP/CISM-wrapper -local_path = components/cism -externals = Externals_CISM.cfg -required = True - -[clm] -tag = ctsm5.2.005 -protocol = git -repo_url = https://github.com/ESCOMP/CTSM -local_path = components/clm -externals = Externals_CLM.cfg -required = True - -[fms] -tag = fi_230818 -protocol = git -repo_url = https://github.com/ESCOMP/FMS_interface -local_path = libraries/FMS -externals = Externals_FMS.cfg -required = True - -[mosart] -tag = mosart1_0_49 -protocol = git -repo_url = https://github.com/ESCOMP/MOSART -local_path = components/mosart -required = True - -[rtm] -tag = rtm1_0_79 -protocol = git -repo_url = https://github.com/ESCOMP/RTM -local_path = components/rtm -required = True - -[cam] -local_path = . -protocol = externals_only -externals = Externals_CAM.cfg -required = True - -[externals_description] -schema_version = 1.0.0 diff --git a/Externals_CAM.cfg b/Externals_CAM.cfg deleted file mode 100644 index a1c76e7db5..0000000000 --- a/Externals_CAM.cfg +++ /dev/null @@ -1,105 +0,0 @@ -[chem_proc] -local_path = chem_proc -protocol = git -repo_url = https://github.com/ESCOMP/CHEM_PREPROCESSOR.git -tag = chem_proc5_0_05 -required = True - -[carma] -local_path = src/physics/carma/base -protocol = git -repo_url = https://github.com/ESCOMP/CARMA_base.git -tag = carma4_01 -required = True - -[cosp2] -local_path = src/physics/cosp2/src -protocol = git -repo_url = https://github.com/CFMIP/COSPv2.0 -sparse = ../.cosp_sparse_checkout -tag = v2.1.4cesm -required = True - -[clubb] -local_path = src/physics/clubb -protocol = git -repo_url = https://github.com/larson-group/clubb_release -sparse = ../.clubb_sparse_checkout -tag = clubb_4ncar_20231115_5406350 -required = True - -[pumas] -local_path = src/physics/pumas -protocol = git -repo_url = https://github.com/ESCOMP/PUMAS -tag = pumas_cam-release_v1.36 -required = True - -[pumas-frozen] -local_path = src/physics/pumas-frozen -protocol = git -repo_url = https://github.com/ESCOMP/PUMAS -tag = pumas_cam-release_v1.17_rename -required = True - -[ali_arms] -local_path = src/physics/ali_arms -protocol = git -repo_url = https://github.com/ESCOMP/ALI-ARMS -tag = ALI_ARMS_v1.0.1 -required = True - -[atmos_phys] -tag = atmos_phys0_02_006 -protocol = git -repo_url = https://github.com/ESCOMP/atmospheric_physics -required = True -local_path = src/atmos_phys - -[fv3] -tag = fv3int_022824 -protocol = git -repo_url = https://github.com/ESCOMP/CAM_FV3_interface.git -local_path = src/dynamics/fv3 -externals = Externals_FV3.cfg -required = True - -[mpas] -local_path = src/dynamics/mpas/dycore -protocol = git -repo_url = https://github.com/MPAS-Dev/MPAS-Model.git -sparse = ../.mpas_sparse_checkout -hash = b8c33daa -required = True - -[geoschem] -local_path = src/chemistry/geoschem/geoschem_src -protocol = git -repo_url = https://github.com/geoschem/geos-chem.git -tag = 14.1.2 -required = True - -[hemco] -local_path = src/hemco -tag = hemco-cesm1_2_1_hemco3_6_3_cesm -protocol = git -repo_url = https://github.com/ESCOMP/HEMCO_CESM.git -required = True -externals = Externals_HCO.cfg - -[rte-rrtmgp] -local_path = src/physics/rrtmgp/ext -protocol = git -repo_url = https://github.com/earth-system-radiation/rte-rrtmgp.git -tag = v1.7 -required = True - -[rrtmgp-data] -local_path = src/physics/rrtmgp/data -protocol = git -repo_url = https://github.com/earth-system-radiation/rrtmgp-data.git -tag = v1.8 -required = True - -[externals_description] -schema_version = 1.0.0 diff --git a/manage_externals/.dir_locals.el b/manage_externals/.dir_locals.el deleted file mode 100644 index a370490e92..0000000000 --- a/manage_externals/.dir_locals.el +++ /dev/null @@ -1,12 +0,0 @@ -; -*- mode: Lisp -*- - -((python-mode - . ( - ;; fill the paragraph to 80 columns when using M-q - (fill-column . 80) - - ;; Use 4 spaces to indent in Python - (python-indent-offset . 4) - (indent-tabs-mode . nil) - ))) - diff --git a/manage_externals/.github/ISSUE_TEMPLATE.md b/manage_externals/.github/ISSUE_TEMPLATE.md deleted file mode 100644 index 8ecb2ae64b..0000000000 --- a/manage_externals/.github/ISSUE_TEMPLATE.md +++ /dev/null @@ -1,6 +0,0 @@ -### Summary of Issue: -### Expected behavior and actual behavior: -### Steps to reproduce the problem (should include model description file(s) or link to publi c repository): -### What is the changeset ID of the code, and the machine you are using: -### have you modified the code? If so, it must be committed and available for testing: -### Screen output or log file showing the error message and context: diff --git a/manage_externals/.github/PULL_REQUEST_TEMPLATE.md b/manage_externals/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index b68b1fb5e2..0000000000 --- a/manage_externals/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,17 +0,0 @@ -[ 50 character, one line summary ] - -[ Description of the changes in this commit. It should be enough - information for someone not following this development to understand. - Lines should be wrapped at about 72 characters. ] - -User interface changes?: [ No/Yes ] -[ If yes, describe what changed, and steps taken to ensure backward compatibilty ] - -Fixes: [Github issue #s] And brief description of each issue. - -Testing: - test removed: - unit tests: - system tests: - manual testing: - diff --git a/manage_externals/.github/workflows/bumpversion.yml b/manage_externals/.github/workflows/bumpversion.yml deleted file mode 100644 index f4dc9b7ca5..0000000000 --- a/manage_externals/.github/workflows/bumpversion.yml +++ /dev/null @@ -1,19 +0,0 @@ -name: Bump version -on: - push: - branches: - - main -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Bump version and push tag - id: tag_version - uses: mathieudutour/github-tag-action@v5.5 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - create_annotated_tag: true - default_bump: patch - dry_run: false - tag_prefix: manic- diff --git a/manage_externals/.github/workflows/tests.yml b/manage_externals/.github/workflows/tests.yml deleted file mode 100644 index dd75b91b49..0000000000 --- a/manage_externals/.github/workflows/tests.yml +++ /dev/null @@ -1,30 +0,0 @@ -# This is a workflow to compile the cmeps source without cime -name: Test Manic - -# Controls when the action will run. Triggers the workflow on push or pull request -# events but only for the master branch -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - test-manic: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Test Manic - run: | - pushd test - git config --global user.email "devnull@example.com" - git config --global user.name "GITHUB tester" - git config --global protocol.file.allow always - make utest - make stest - popd - - - name: Setup tmate session - if: ${{ failure() }} - uses: mxschmitt/action-tmate@v3 diff --git a/manage_externals/.gitignore b/manage_externals/.gitignore deleted file mode 100644 index a71ac0cd75..0000000000 --- a/manage_externals/.gitignore +++ /dev/null @@ -1,17 +0,0 @@ -# directories that are checked out by the tool -cime/ -cime_config/ -components/ - -# generated local files -*.log - -# editor files -*~ -*.bak - -# generated python files -*.pyc - -# test tmp file -test/tmp diff --git a/manage_externals/.travis.yml b/manage_externals/.travis.yml deleted file mode 100644 index d9b24c584d..0000000000 --- a/manage_externals/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ -language: python -os: linux -python: - - "3.4" - - "3.5" - - "3.6" - - "3.7" - - "3.8" -install: - - pip install -r test/requirements.txt -before_script: - - git --version -script: - - cd test; make test - - cd test; make lint -after_success: - - cd test; make coverage - - cd test; coveralls diff --git a/manage_externals/LICENSE.txt b/manage_externals/LICENSE.txt deleted file mode 100644 index 665ee03fbc..0000000000 --- a/manage_externals/LICENSE.txt +++ /dev/null @@ -1,34 +0,0 @@ -Copyright (c) 2017-2018, University Corporation for Atmospheric Research (UCAR) -All rights reserved. - -Developed by: - University Corporation for Atmospheric Research - National Center for Atmospheric Research - https://www2.cesm.ucar.edu/working-groups/sewg - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the "Software"), -to deal with the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom -the Software is furnished to do so, subject to the following conditions: - - - Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimers. - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimers in the documentation - and/or other materials provided with the distribution. - - Neither the names of [Name of Development Group, UCAR], - nor the names of its contributors may be used to endorse or promote - products derived from this Software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/manage_externals/README.md b/manage_externals/README.md deleted file mode 100644 index 9475301b5d..0000000000 --- a/manage_externals/README.md +++ /dev/null @@ -1,231 +0,0 @@ --- AUTOMATICALLY GENERATED FILE. DO NOT EDIT -- - -[![Build Status](https://travis-ci.org/ESMCI/manage_externals.svg?branch=master)](https://travis-ci.org/ESMCI/manage_externals)[![Coverage Status](https://coveralls.io/repos/github/ESMCI/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/ESMCI/manage_externals?branch=master) -``` -usage: checkout_externals [-h] [-e [EXTERNALS]] [-o] [-S] [-v] [--backtrace] - [-d] [--no-logging] - -checkout_externals manages checking out groups of externals from revision -control based on a externals description file. By default only the -required externals are checkout out. - -Operations performed by manage_externals utilities are explicit and -data driven. checkout_externals will always make the working copy *exactly* -match what is in the externals file when modifying the working copy of -a repository. - -If checkout_externals isn't doing what you expected, double check the contents -of the externals description file. - -Running checkout_externals without the '--status' option will always attempt to -synchronize the working copy to exactly match the externals description. - -optional arguments: - -h, --help show this help message and exit - -e [EXTERNALS], --externals [EXTERNALS] - The externals description filename. Default: - Externals.cfg. - -o, --optional By default only the required externals are checked - out. This flag will also checkout the optional - externals. - -S, --status Output status of the repositories managed by - checkout_externals. By default only summary - information is provided. Use verbose output to see - details. - -v, --verbose Output additional information to the screen and log - file. This flag can be used up to two times, - increasing the verbosity level each time. - --backtrace DEVELOPER: show exception backtraces as extra - debugging output - -d, --debug DEVELOPER: output additional debugging information to - the screen and log file. - --no-logging DEVELOPER: disable logging. - -``` -NOTE: checkout_externals *MUST* be run from the root of the source tree it -is managing. For example, if you cloned a repository with: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -Then the root of the source tree is /path/to/some-project-dev. If you -obtained a sub-project via a checkout of another project: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -and you need to checkout the sub-project externals, then the root of the -source tree is /path/to/some-project-dev. Do *NOT* run checkout_externals -from within /path/to/some-project-dev/sub-project - -The root of the source tree will be referred to as `${SRC_ROOT}` below. - -# Supported workflows - - * Checkout all required components from the default externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals - - * To update all required components to the current values in the - externals description file, re-run checkout_externals: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals - - If there are *any* modifications to *any* working copy according - to the git or svn 'status' command, checkout_externals - will not update any external repositories. Modifications - include: modified files, added files, removed files, or missing - files. - - To avoid this safety check, edit the externals description file - and comment out the modified external block. - - * Checkout all required components from a user specified externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals --externals my-externals.cfg - - * Status summary of the repositories managed by checkout_externals: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals --status - - ./cime - s ./components/cism - ./components/mosart - e-o ./components/rtm - M ./src/fates - e-o ./tools/PTCLM - - where: - * column one indicates the status of the repository in relation - to the externals description file. - * column two indicates whether the working copy has modified files. - * column three shows how the repository is managed, optional or required - - Column one will be one of these values: - * s : out-of-sync : repository is checked out at a different commit - compared with the externals description - * e : empty : directory does not exist - checkout_externals has not been run - * ? : unknown : directory exists but .git or .svn directories are missing - - Column two will be one of these values: - * M : Modified : modified, added, deleted or missing files - * : blank / space : clean - * - : dash : no meaningful state, for empty repositories - - Column three will be one of these values: - * o : optional : optionally repository - * : blank / space : required repository - - * Detailed git or svn status of the repositories managed by checkout_externals: - - $ cd ${SRC_ROOT} - $ ./manage_externals/checkout_externals --status --verbose - -# Externals description file - - The externals description contains a list of the external - repositories that are used and their version control locations. The - file format is the standard ini/cfg configuration file format. Each - external is defined by a section containing the component name in - square brackets: - - * name (string) : component name, e.g. [cime], [cism], etc. - - Each section has the following keyword-value pairs: - - * required (boolean) : whether the component is a required checkout, - 'true' or 'false'. - - * local_path (string) : component path *relative* to where - checkout_externals is called. - - * protoctol (string) : version control protocol that is used to - manage the component. Valid values are 'git', 'svn', - 'externals_only'. - - Switching an external between different protocols is not - supported, e.g. from svn to git. To switch protocols, you need to - manually move the old working copy to a new location. - - Note: 'externals_only' will only process the external's own - external description file without trying to manage a repository - for the component. This is used for retreiving externals for - standalone components like cam and clm. If the source root of the - externals_only component is the same as the main source root, then - the local path must be set to '.', the unix current working - directory, e. g. 'local_path = .' - - * repo_url (string) : URL for the repository location, examples: - * https://svn-ccsm-models.cgd.ucar.edu/glc - * git@github.com:esmci/cime.git - * /path/to/local/repository - * . - - NOTE: To operate on only the local clone and and ignore remote - repositories, set the url to '.' (the unix current path), - i.e. 'repo_url = .' . This can be used to checkout a local branch - instead of the upstream branch. - - If a repo url is determined to be a local path (not a network url) - then user expansion, e.g. ~/, and environment variable expansion, - e.g. $HOME or $REPO_ROOT, will be performed. - - Relative paths are difficult to get correct, especially for mixed - use repos. It is advised that local paths expand to absolute paths. - If relative paths are used, they should be relative to one level - above local_path. If local path is 'src/foo', the the relative url - should be relative to 'src'. - - * tag (string) : tag to checkout - - * hash (string) : the git hash to checkout. Only applies to git - repositories. - - * branch (string) : branch to checkout from the specified - repository. Specifying a branch on a remote repository means that - checkout_externals will checkout the version of the branch in the remote, - not the the version in the local repository (if it exists). - - Note: one and only one of tag, branch hash must be supplied. - - * externals (string) : used to make manage_externals aware of - sub-externals required by an external. This is a relative path to - the external's root directory. For example, the main externals - description has an external checkout out at 'src/useful_library'. - useful_library requires additional externals to be complete. - Those additional externals are managed from the source root by the - externals description file pointed 'useful_library/sub-xternals.cfg', - Then the main 'externals' field in the top level repo should point to - 'sub-externals.cfg'. - Note that by default, `checkout_externals` will clone an external's - submodules. As a special case, the entry, `externals = None`, will - prevent this behavior. For more control over which externals are - checked out, create an externals file (and see the `from_submodule` - configuration entry below). - - * from_submodule (True / False) : used to pull the repo_url, local_path, - and hash properties for this external from the .gitmodules file in - this repository. Note that the section name (the entry in square - brackets) must match the name in the .gitmodules file. - If from_submodule is True, the protocol must be git and no repo_url, - local_path, hash, branch, or tag entries are allowed. - Default: False - - * sparse (string) : used to control a sparse checkout. This optional - entry should point to a filename (path relative to local_path) that - contains instructions on which repository paths to include (or - exclude) from the working tree. - See the "SPARSE CHECKOUT" section of https://git-scm.com/docs/git-read-tree - Default: sparse checkout is disabled - - * Lines begining with '#' or ';' are comments and will be ignored. - -# Obtaining this tool, reporting issues, etc. - - The master repository for manage_externals is - https://github.com/ESMCI/manage_externals. Any issues with this tool - should be reported there. diff --git a/manage_externals/README_FIRST b/manage_externals/README_FIRST deleted file mode 100644 index c8a47d7806..0000000000 --- a/manage_externals/README_FIRST +++ /dev/null @@ -1,54 +0,0 @@ -CESM is comprised of a number of different components that are -developed and managed independently. Each component may have -additional 'external' dependancies and optional parts that are also -developed and managed independently. - -The checkout_externals.py tool manages retreiving and updating the -components and their externals so you have a complete set of source -files for the model. - -checkout_externals.py relies on a model description file that -describes what components are needed, where to find them and where to -put them in the source tree. The default file is called "CESM.xml" -regardless of whether you are checking out CESM or a standalone -component. - -checkout_externals requires access to git and svn repositories that -require authentication. checkout_externals may pass through -authentication requests, but it will not cache them for you. For the -best and most robust user experience, you should have svn and git -working without password authentication. See: - - https://help.github.com/articles/connecting-to-github-with-ssh/ - - ?svn ref? - -NOTE: checkout_externals.py *MUST* be run from the root of the source -tree it is managing. For example, if you cloned CLM with: - - $ git clone git@github.com/ncar/clm clm-dev - -Then the root of the source tree is /path/to/cesm-dev. If you obtained -CLM via an svn checkout of CESM and you need to checkout the CLM -externals, then the root of the source tree for CLM is: - - /path/to/cesm-dev/components/clm - -The root of the source tree will be referred to as ${SRC_ROOT} below. - -To get started quickly, checkout all required components from the -default model description file: - - $ cd ${SRC_ROOT} - $ ./checkout_cesm/checkout_externals.py - -For additional information about using checkout model, please see: - - ${SRC_ROOT}/checkout_cesm/README - -or run: - - $ cd ${SRC_ROOT} - $ ./checkout_cesm/checkout_externals.py --help - - diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals deleted file mode 100755 index 48bce24010..0000000000 --- a/manage_externals/checkout_externals +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3 - -"""Main driver wrapper around the manic/checkout utility. - -Tool to assemble external respositories represented in an externals -description file. - -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import sys -import traceback - -import manic - -if sys.hexversion < 0x02070000: - print(70 * '*') - print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) - print('It appears that you are running python {0}'.format( - '.'.join(str(x) for x in sys.version_info[0:3]))) - print(70 * '*') - sys.exit(1) - - -if __name__ == '__main__': - ARGS = manic.checkout.commandline_arguments() - try: - RET_STATUS, _ = manic.checkout.main(ARGS) - sys.exit(RET_STATUS) - except Exception as error: # pylint: disable=broad-except - manic.printlog(str(error)) - if ARGS.backtrace: - traceback.print_exc() - sys.exit(1) diff --git a/manage_externals/manic/__init__.py b/manage_externals/manic/__init__.py deleted file mode 100644 index 11badedd3b..0000000000 --- a/manage_externals/manic/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Public API for the manage_externals library -""" - -from manic import checkout -from manic.utils import printlog - -__all__ = [ - 'checkout', 'printlog', -] diff --git a/manage_externals/manic/checkout.py b/manage_externals/manic/checkout.py deleted file mode 100755 index 3f5537adce..0000000000 --- a/manage_externals/manic/checkout.py +++ /dev/null @@ -1,446 +0,0 @@ -#!/usr/bin/env python3 - -""" -Tool to assemble repositories represented in a model-description file. - -If loaded as a module (e.g., in a component's buildcpp), it can be used -to check the validity of existing subdirectories and load missing sources. -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import argparse -import logging -import os -import os.path -import sys - -from manic.externals_description import create_externals_description -from manic.externals_description import read_externals_description_file -from manic.externals_status import check_safe_to_update_repos -from manic.sourcetree import SourceTree -from manic.utils import printlog, fatal_error -from manic.global_constants import VERSION_SEPERATOR, LOG_FILE_NAME - -if sys.hexversion < 0x02070000: - print(70 * '*') - print('ERROR: {0} requires python >= 2.7.x. '.format(sys.argv[0])) - print('It appears that you are running python {0}'.format( - VERSION_SEPERATOR.join(str(x) for x in sys.version_info[0:3]))) - print(70 * '*') - sys.exit(1) - - -# --------------------------------------------------------------------- -# -# User input -# -# --------------------------------------------------------------------- -def commandline_arguments(args=None): - """Process the command line arguments - - Params: args - optional args. Should only be used during systems - testing. - - Returns: processed command line arguments - """ - description = ''' - -%(prog)s manages checking out groups of externals from revision -control based on an externals description file. By default only the -required externals are checkout out. - -Running %(prog)s without the '--status' option will always attempt to -synchronize the working copy to exactly match the externals description. -''' - - epilog = ''' -``` -NOTE: %(prog)s *MUST* be run from the root of the source tree it -is managing. For example, if you cloned a repository with: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -Then the root of the source tree is /path/to/some-project-dev. If you -obtained a sub-project via a checkout of another project: - - $ git clone git@github.com/{SOME_ORG}/some-project some-project-dev - -and you need to checkout the sub-project externals, then the root of the -source tree remains /path/to/some-project-dev. Do *NOT* run %(prog)s -from within /path/to/some-project-dev/sub-project - -The root of the source tree will be referred to as `${SRC_ROOT}` below. - - -# Supported workflows - - * Checkout all required components from the default externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s - - * To update all required components to the current values in the - externals description file, re-run %(prog)s: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s - - If there are *any* modifications to *any* working copy according - to the git or svn 'status' command, %(prog)s - will not update any external repositories. Modifications - include: modified files, added files, removed files, or missing - files. - - To avoid this safety check, edit the externals description file - and comment out the modified external block. - - * Checkout all required components from a user specified externals - description file: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s --externals my-externals.cfg - - * Status summary of the repositories managed by %(prog)s: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s --status - - ./cime - s ./components/cism - ./components/mosart - e-o ./components/rtm - M ./src/fates - e-o ./tools/PTCLM - - - where: - * column one indicates the status of the repository in relation - to the externals description file. - * column two indicates whether the working copy has modified files. - * column three shows how the repository is managed, optional or required - - Column one will be one of these values: - * s : out-of-sync : repository is checked out at a different commit - compared with the externals description - * e : empty : directory does not exist - %(prog)s has not been run - * ? : unknown : directory exists but .git or .svn directories are missing - - Column two will be one of these values: - * M : Modified : modified, added, deleted or missing files - * : blank / space : clean - * - : dash : no meaningful state, for empty repositories - - Column three will be one of these values: - * o : optional : optionally repository - * : blank / space : required repository - - * Detailed git or svn status of the repositories managed by %(prog)s: - - $ cd ${SRC_ROOT} - $ ./manage_externals/%(prog)s --status --verbose - -# Externals description file - - The externals description contains a list of the external - repositories that are used and their version control locations. The - file format is the standard ini/cfg configuration file format. Each - external is defined by a section containing the component name in - square brackets: - - * name (string) : component name, e.g. [cime], [cism], etc. - - Each section has the following keyword-value pairs: - - * required (boolean) : whether the component is a required checkout, - 'true' or 'false'. - - * local_path (string) : component path *relative* to where - %(prog)s is called. - - * protoctol (string) : version control protocol that is used to - manage the component. Valid values are 'git', 'svn', - 'externals_only'. - - Switching an external between different protocols is not - supported, e.g. from svn to git. To switch protocols, you need to - manually move the old working copy to a new location. - - Note: 'externals_only' will only process the external's own - external description file without trying to manage a repository - for the component. This is used for retrieving externals for - standalone components like cam and ctsm which also serve as - sub-components within a larger project. If the source root of the - externals_only component is the same as the main source root, then - the local path must be set to '.', the unix current working - directory, e. g. 'local_path = .' - - * repo_url (string) : URL for the repository location, examples: - * https://svn-ccsm-models.cgd.ucar.edu/glc - * git@github.com:esmci/cime.git - * /path/to/local/repository - * . - - NOTE: To operate on only the local clone and and ignore remote - repositories, set the url to '.' (the unix current path), - i.e. 'repo_url = .' . This can be used to checkout a local branch - instead of the upstream branch. - - If a repo url is determined to be a local path (not a network url) - then user expansion, e.g. ~/, and environment variable expansion, - e.g. $HOME or $REPO_ROOT, will be performed. - - Relative paths are difficult to get correct, especially for mixed - use repos. It is advised that local paths expand to absolute paths. - If relative paths are used, they should be relative to one level - above local_path. If local path is 'src/foo', the the relative url - should be relative to 'src'. - - * tag (string) : tag to checkout - - * hash (string) : the git hash to checkout. Only applies to git - repositories. - - * branch (string) : branch to checkout from the specified - repository. Specifying a branch on a remote repository means that - %(prog)s will checkout the version of the branch in the remote, - not the the version in the local repository (if it exists). - - Note: one and only one of tag, branch hash must be supplied. - - * externals (string) : used to make manage_externals aware of - sub-externals required by an external. This is a relative path to - the external's root directory. For example, if LIBX is often used - as a sub-external, it might have an externals file (for its - externals) called Externals_LIBX.cfg. To use libx as a standalone - checkout, it would have another file, Externals.cfg with the - following entry: - - [ libx ] - local_path = . - protocol = externals_only - externals = Externals_LIBX.cfg - required = True - - Now, %(prog)s will process Externals.cfg and also process - Externals_LIBX.cfg as if it was a sub-external. - - Note that by default, checkout_externals will clone an external's - submodules. As a special case, the entry, "externals = None", will - prevent this behavior. For more control over which externals are - checked out, create an externals file (and see the from_submodule - configuration entry below). - - * from_submodule (True / False) : used to pull the repo_url, local_path, - and hash properties for this external from the .gitmodules file in - this repository. Note that the section name (the entry in square - brackets) must match the name in the .gitmodules file. - If from_submodule is True, the protocol must be git and no repo_url, - local_path, hash, branch, or tag entries are allowed. - Default: False - - * sparse (string) : used to control a sparse checkout. This optional - entry should point to a filename (path relative to local_path) that - contains instructions on which repository paths to include (or - exclude) from the working tree. - See the "SPARSE CHECKOUT" section of https://git-scm.com/docs/git-read-tree - Default: sparse checkout is disabled - - * Lines beginning with '#' or ';' are comments and will be ignored. - -# Obtaining this tool, reporting issues, etc. - - The master repository for manage_externals is - https://github.com/ESMCI/manage_externals. Any issues with this tool - should be reported there. - -# Troubleshooting - -Operations performed by manage_externals utilities are explicit and -data driven. %(prog)s will always attempt to make the working copy -*exactly* match what is in the externals file when modifying the -working copy of a repository. - -If %(prog)s is not doing what you expected, double check the contents -of the externals description file or examine the output of -./manage_externals/%(prog)s --status - -''' - - parser = argparse.ArgumentParser( - description=description, epilog=epilog, - formatter_class=argparse.RawDescriptionHelpFormatter) - - # - # user options - # - parser.add_argument("components", nargs="*", - help="Specific component(s) to checkout. By default, " - "all required externals are checked out.") - - parser.add_argument('-e', '--externals', nargs='?', - default='Externals.cfg', - help='The externals description filename. ' - 'Default: %(default)s.') - - parser.add_argument('-x', '--exclude', nargs='*', - help='Component(s) listed in the externals file which should be ignored.') - - parser.add_argument('-o', '--optional', action='store_true', default=False, - help='By default only the required externals ' - 'are checked out. This flag will also checkout the ' - 'optional externals.') - - parser.add_argument('-S', '--status', action='store_true', default=False, - help='Output the status of the repositories managed by ' - '%(prog)s. By default only summary information ' - 'is provided. Use the verbose option to see details.') - - parser.add_argument('-v', '--verbose', action='count', default=0, - help='Output additional information to ' - 'the screen and log file. This flag can be ' - 'used up to two times, increasing the ' - 'verbosity level each time.') - - parser.add_argument('--svn-ignore-ancestry', action='store_true', default=False, - help='By default, subversion will abort if a component is ' - 'already checked out and there is no common ancestry with ' - 'the new URL. This flag passes the "--ignore-ancestry" flag ' - 'to the svn switch call. (This is not recommended unless ' - 'you are sure about what you are doing.)') - - # - # developer options - # - parser.add_argument('--backtrace', action='store_true', - help='DEVELOPER: show exception backtraces as extra ' - 'debugging output') - - parser.add_argument('-d', '--debug', action='store_true', default=False, - help='DEVELOPER: output additional debugging ' - 'information to the screen and log file.') - - logging_group = parser.add_mutually_exclusive_group() - - logging_group.add_argument('--logging', dest='do_logging', - action='store_true', - help='DEVELOPER: enable logging.') - logging_group.add_argument('--no-logging', dest='do_logging', - action='store_false', default=False, - help='DEVELOPER: disable logging ' - '(this is the default)') - - if args: - options = parser.parse_args(args) - else: - options = parser.parse_args() - return options - -def _dirty_local_repo_msg(program_name, config_file): - return """The external repositories labeled with 'M' above are not in a clean state. -The following are four options for how to proceed: -(1) Go into each external that is not in a clean state and issue either a 'git status' or - an 'svn status' command (depending on whether the external is managed by git or - svn). Either revert or commit your changes so that all externals are in a clean - state. (To revert changes in git, follow the instructions given when you run 'git - status'.) (Note, though, that it is okay to have untracked files in your working - directory.) Then rerun {program_name}. -(2) Alternatively, you do not have to rely on {program_name}. Instead, you can manually - update out-of-sync externals (labeled with 's' above) as described in the - configuration file {config_file}. (For example, run 'git fetch' and 'git checkout' - commands to checkout the appropriate tags for each external, as given in - {config_file}.) -(3) You can also use {program_name} to manage most, but not all externals: You can specify - one or more externals to ignore using the '-x' or '--exclude' argument to - {program_name}. Excluding externals labeled with 'M' will allow {program_name} to - update the other, non-excluded externals. -(4) As a last resort, if you are confident that there is no work that needs to be saved - from a given external, you can remove that external (via "rm -rf [directory]") and - then rerun the {program_name} tool. This option is mainly useful as a workaround for - issues with this tool (such as https://github.com/ESMCI/manage_externals/issues/157). -The external repositories labeled with '?' above are not under version -control using the expected protocol. If you are sure you want to switch -protocols, and you don't have any work you need to save from this -directory, then run "rm -rf [directory]" before rerunning the -{program_name} tool. -""".format(program_name=program_name, config_file=config_file) -# --------------------------------------------------------------------- -# -# main -# -# --------------------------------------------------------------------- -def main(args): - """ - Function to call when module is called from the command line. - Parse externals file and load required repositories or all repositories if - the --all option is passed. - - Returns a tuple (overall_status, tree_status). overall_status is 0 - on success, non-zero on failure. tree_status is a dict mapping local path - to ExternalStatus -- if no checkout is happening. If checkout is happening, tree_status - is None. - """ - if args.do_logging: - logging.basicConfig(filename=LOG_FILE_NAME, - format='%(levelname)s : %(asctime)s : %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', - level=logging.DEBUG) - - program_name = os.path.basename(sys.argv[0]) - logging.info('Beginning of %s', program_name) - - load_all = False - if args.optional: - load_all = True - - root_dir = os.path.abspath(os.getcwd()) - model_data = read_externals_description_file(root_dir, args.externals) - ext_description = create_externals_description( - model_data, components=args.components, exclude=args.exclude) - - for comp in args.components: - if comp not in ext_description.keys(): - # Note we can't print out the list of found externals because - # they were filtered in create_externals_description above. - fatal_error( - "No component {} found in {}".format( - comp, args.externals)) - - source_tree = SourceTree(root_dir, ext_description, svn_ignore_ancestry=args.svn_ignore_ancestry) - if args.components: - components_str = 'specified components' - else: - components_str = 'required & optional components' - printlog('Checking local status of ' + components_str + ': ', end='') - tree_status = source_tree.status(print_progress=True) - printlog('') - - if args.status: - # user requested status-only - for comp in sorted(tree_status): - tree_status[comp].log_status_message(args.verbose) - else: - # checkout / update the external repositories. - safe_to_update = check_safe_to_update_repos(tree_status) - if not safe_to_update: - # print status - for comp in sorted(tree_status): - tree_status[comp].log_status_message(args.verbose) - # exit gracefully - printlog('-' * 70) - printlog(_dirty_local_repo_msg(program_name, args.externals)) - printlog('-' * 70) - else: - if not args.components: - source_tree.checkout(args.verbose, load_all) - for comp in args.components: - source_tree.checkout(args.verbose, load_all, load_comp=comp) - printlog('') - # New tree status is unknown, don't return anything. - tree_status = None - - logging.info('%s completed without exceptions.', program_name) - # NOTE(bja, 2017-11) tree status is used by the systems tests - return 0, tree_status diff --git a/manage_externals/manic/externals_description.py b/manage_externals/manic/externals_description.py deleted file mode 100644 index 546e7fdcb4..0000000000 --- a/manage_externals/manic/externals_description.py +++ /dev/null @@ -1,830 +0,0 @@ -#!/usr/bin/env python3 - -"""Model description - -Model description is the representation of the various externals -included in the model. It processes in input data structure, and -converts it into a standard interface that is used by the rest of the -system. - -To maintain backward compatibility, externals description files should -follow semantic versioning rules, http://semver.org/ - - - -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import logging -import os -import os.path -import re - -# ConfigParser in python2 was renamed to configparser in python3. -# In python2, ConfigParser returns byte strings, str, instead of unicode. -# We need unicode to be compatible with xml and json parser and python3. -try: - # python2 - from ConfigParser import SafeConfigParser as config_parser - from ConfigParser import MissingSectionHeaderError - from ConfigParser import NoSectionError, NoOptionError - - USE_PYTHON2 = True - - def config_string_cleaner(text): - """convert strings into unicode - """ - return text.decode('utf-8') -except ImportError: - # python3 - from configparser import ConfigParser as config_parser - from configparser import MissingSectionHeaderError - from configparser import NoSectionError, NoOptionError - - USE_PYTHON2 = False - - def config_string_cleaner(text): - """Python3 already uses unicode strings, so just return the string - without modification. - - """ - return text - -from .utils import printlog, fatal_error, str_to_bool, expand_local_url -from .utils import execute_subprocess -from .global_constants import EMPTY_STR, PPRINTER, VERSION_SEPERATOR - -# -# Globals -# -DESCRIPTION_SECTION = 'externals_description' -VERSION_ITEM = 'schema_version' - - -def read_externals_description_file(root_dir, file_name): - """Read a file containing an externals description and - create its internal representation. - - """ - root_dir = os.path.abspath(root_dir) - msg = 'In directory : {0}'.format(root_dir) - logging.info(msg) - printlog('Processing externals description file : {0} ({1})'.format(file_name, - root_dir)) - - file_path = os.path.join(root_dir, file_name) - if not os.path.exists(file_name): - if file_name.lower() == "none": - msg = ('INTERNAL ERROR: Attempt to read externals file ' - 'from {0} when not configured'.format(file_path)) - else: - msg = ('ERROR: Model description file, "{0}", does not ' - 'exist at path:\n {1}\nDid you run from the root of ' - 'the source tree?'.format(file_name, file_path)) - - fatal_error(msg) - - externals_description = None - if file_name == ExternalsDescription.GIT_SUBMODULES_FILENAME: - externals_description = _read_gitmodules_file(root_dir, file_name) - else: - try: - config = config_parser() - config.read(file_path) - externals_description = config - except MissingSectionHeaderError: - # not a cfg file - pass - - if externals_description is None: - msg = 'Unknown file format!' - fatal_error(msg) - - return externals_description - -class LstripReader(object): - "LstripReader formats .gitmodules files to be acceptable for configparser" - def __init__(self, filename): - with open(filename, 'r') as infile: - lines = infile.readlines() - self._lines = list() - self._num_lines = len(lines) - self._index = 0 - for line in lines: - self._lines.append(line.lstrip()) - - def readlines(self): - """Return all the lines from this object's file""" - return self._lines - - def readline(self, size=-1): - """Format and return the next line or raise StopIteration""" - try: - line = self.next() - except StopIteration: - line = '' - - if (size > 0) and (len(line) < size): - return line[0:size] - - return line - - def __iter__(self): - """Begin an iteration""" - self._index = 0 - return self - - def next(self): - """Return the next line or raise StopIteration""" - if self._index >= self._num_lines: - raise StopIteration - - self._index = self._index + 1 - return self._lines[self._index - 1] - - def __next__(self): - return self.next() - -def git_submodule_status(repo_dir): - """Run the git submodule status command to obtain submodule hashes. - """ - # This function is here instead of GitRepository to avoid a dependency loop - cmd = 'git -C {repo_dir} submodule status'.format( - repo_dir=repo_dir).split() - git_output = execute_subprocess(cmd, output_to_caller=True) - submodules = {} - submods = git_output.split('\n') - for submod in submods: - if submod: - status = submod[0] - items = submod[1:].split(' ') - if len(items) > 2: - tag = items[2] - else: - tag = None - - submodules[items[1]] = {'hash':items[0], 'status':status, 'tag':tag} - - return submodules - -def parse_submodules_desc_section(section_items, file_path): - """Find the path and url for this submodule description""" - path = None - url = None - for item in section_items: - name = item[0].strip().lower() - if name == 'path': - path = item[1].strip() - elif name == 'url': - url = item[1].strip() - elif name == 'branch': - # We do not care about branch since we have a hash - silently ignore - pass - else: - msg = 'WARNING: Ignoring unknown {} property, in {}' - msg = msg.format(item[0], file_path) # fool pylint - logging.warning(msg) - - return path, url - -def _read_gitmodules_file(root_dir, file_name): - # pylint: disable=deprecated-method - # Disabling this check because the method is only used for python2 - # pylint: disable=too-many-locals - # pylint: disable=too-many-branches - # pylint: disable=too-many-statements - """Read a .gitmodules file and convert it to be compatible with an - externals description. - """ - root_dir = os.path.abspath(root_dir) - msg = 'In directory : {0}'.format(root_dir) - logging.info(msg) - - file_path = os.path.join(root_dir, file_name) - if not os.path.exists(file_name): - msg = ('ERROR: submodules description file, "{0}", does not ' - 'exist in dir:\n {1}'.format(file_name, root_dir)) - fatal_error(msg) - - submodules_description = None - externals_description = None - try: - config = config_parser() - if USE_PYTHON2: - config.readfp(LstripReader(file_path), filename=file_name) - else: - config.read_file(LstripReader(file_path), source=file_name) - - submodules_description = config - except MissingSectionHeaderError: - # not a cfg file - pass - - if submodules_description is None: - msg = 'Unknown file format!' - fatal_error(msg) - else: - # Convert the submodules description to an externals description - externals_description = config_parser() - # We need to grab all the commit hashes for this repo - submods = git_submodule_status(root_dir) - for section in submodules_description.sections(): - if section[0:9] == 'submodule': - sec_name = section[9:].strip(' "') - externals_description.add_section(sec_name) - section_items = submodules_description.items(section) - path, url = parse_submodules_desc_section(section_items, - file_path) - - if path is None: - msg = 'Submodule {} missing path'.format(sec_name) - fatal_error(msg) - - if url is None: - msg = 'Submodule {} missing url'.format(sec_name) - fatal_error(msg) - - externals_description.set(sec_name, - ExternalsDescription.PATH, path) - externals_description.set(sec_name, - ExternalsDescription.PROTOCOL, 'git') - externals_description.set(sec_name, - ExternalsDescription.REPO_URL, url) - externals_description.set(sec_name, - ExternalsDescription.REQUIRED, 'True') - if sec_name in submods: - submod_name = sec_name - else: - # The section name does not have to match the path - submod_name = path - - if submod_name in submods: - git_hash = submods[submod_name]['hash'] - externals_description.set(sec_name, - ExternalsDescription.HASH, - git_hash) - else: - emsg = "submodule status has no section, '{}'" - emsg += "\nCheck section names in externals config file" - fatal_error(emsg.format(submod_name)) - - # Required items - externals_description.add_section(DESCRIPTION_SECTION) - externals_description.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0') - - return externals_description - -def create_externals_description( - model_data, model_format='cfg', components=None, exclude=None, parent_repo=None): - """Create the a externals description object from the provided data - - components: list of component names to include, None to include all. If a - name isn't found, it is silently omitted from the return value. - exclude: list of component names to skip. - """ - externals_description = None - if model_format == 'dict': - externals_description = ExternalsDescriptionDict( - model_data, components=components, exclude=exclude) - elif model_format == 'cfg': - major, _, _ = get_cfg_schema_version(model_data) - if major == 1: - externals_description = ExternalsDescriptionConfigV1( - model_data, components=components, exclude=exclude, parent_repo=parent_repo) - else: - msg = ('Externals description file has unsupported schema ' - 'version "{0}".'.format(major)) - fatal_error(msg) - else: - msg = 'Unknown model data format "{0}"'.format(model_format) - fatal_error(msg) - return externals_description - - -def get_cfg_schema_version(model_cfg): - """Extract the major, minor, patch version of the config file schema - - Params: - model_cfg - config parser object containing the externas description data - - Returns: - major = integer major version - minor = integer minor version - patch = integer patch version - """ - semver_str = '' - try: - semver_str = model_cfg.get(DESCRIPTION_SECTION, VERSION_ITEM) - except (NoSectionError, NoOptionError): - msg = ('externals description file must have the required ' - 'section: "{0}" and item "{1}"'.format(DESCRIPTION_SECTION, - VERSION_ITEM)) - fatal_error(msg) - - # NOTE(bja, 2017-11) Assume we don't care about the - # build/pre-release metadata for now! - version_list = re.split(r'[-+]', semver_str) - version_str = version_list[0] - version = version_str.split(VERSION_SEPERATOR) - try: - major = int(version[0].strip()) - minor = int(version[1].strip()) - patch = int(version[2].strip()) - except ValueError: - msg = ('Config file schema version must have integer digits for ' - 'major, minor and patch versions. ' - 'Received "{0}"'.format(version_str)) - fatal_error(msg) - return major, minor, patch - - -class ExternalsDescription(dict): - """Base externals description class that is independent of the user input - format. Different input formats can all be converted to this - representation to provide a consistent represtentation for the - rest of the objects in the system. - - NOTE(bja, 2018-03): do NOT define _schema_major etc at the class - level in the base class. The nested/recursive nature of externals - means different schema versions may be present in a single run! - - All inheriting classes must overwrite: - self._schema_major and self._input_major - self._schema_minor and self._input_minor - self._schema_patch and self._input_patch - - where _schema_x is the supported schema, _input_x is the user - input value. - - """ - # keywords defining the interface into the externals description data; these - # are brought together by the schema below. - EXTERNALS = 'externals' # path to externals file. - BRANCH = 'branch' - SUBMODULE = 'from_submodule' - HASH = 'hash' - NAME = 'name' - PATH = 'local_path' - PROTOCOL = 'protocol' - REPO = 'repo' - REPO_URL = 'repo_url' - REQUIRED = 'required' - TAG = 'tag' - SPARSE = 'sparse' - - PROTOCOL_EXTERNALS_ONLY = 'externals_only' - PROTOCOL_GIT = 'git' - PROTOCOL_SVN = 'svn' - GIT_SUBMODULES_FILENAME = '.gitmodules' - KNOWN_PRROTOCOLS = [PROTOCOL_GIT, PROTOCOL_SVN, PROTOCOL_EXTERNALS_ONLY] - - # v1 xml keywords - _V1_TREE_PATH = 'TREE_PATH' - _V1_ROOT = 'ROOT' - _V1_TAG = 'TAG' - _V1_BRANCH = 'BRANCH' - _V1_REQ_SOURCE = 'REQ_SOURCE' - - # Dictionary keys are component names. The corresponding values are laid out - # according to this schema. - _source_schema = {REQUIRED: True, - PATH: 'string', - EXTERNALS: 'string', - SUBMODULE : True, - REPO: {PROTOCOL: 'string', - REPO_URL: 'string', - TAG: 'string', - BRANCH: 'string', - HASH: 'string', - SPARSE: 'string', - } - } - - def __init__(self, parent_repo=None): - """Convert the xml into a standardized dict that can be used to - construct the source objects - - """ - dict.__init__(self) - - self._schema_major = None - self._schema_minor = None - self._schema_patch = None - self._input_major = None - self._input_minor = None - self._input_patch = None - self._parent_repo = parent_repo - - def _verify_schema_version(self): - """Use semantic versioning rules to verify we can process this schema. - - """ - known = '{0}.{1}.{2}'.format(self._schema_major, - self._schema_minor, - self._schema_patch) - received = '{0}.{1}.{2}'.format(self._input_major, - self._input_minor, - self._input_patch) - - if self._input_major != self._schema_major: - # should never get here, the factory should handle this correctly! - msg = ('DEV_ERROR: version "{0}" parser received ' - 'version "{1}" input.'.format(known, received)) - fatal_error(msg) - - if self._input_minor > self._schema_minor: - msg = ('Incompatible schema version:\n' - ' User supplied schema version "{0}" is too new."\n' - ' Can only process version "{1}" files and ' - 'older.'.format(received, known)) - fatal_error(msg) - - if self._input_patch > self._schema_patch: - # NOTE(bja, 2018-03) ignoring for now... Not clear what - # conditions the test is needed. - pass - - def _check_user_input(self): - """Run a series of checks to attempt to validate the user input and - detect errors as soon as possible. - - NOTE(bja, 2018-03) These checks are called *after* the file is - read. That means the schema check can not occur here. - - Note: the order is important. check_optional will create - optional with null data. run check_data first to ensure - required data was provided correctly by the user. - - """ - self._check_data() - self._check_optional() - self._validate() - - def _check_data(self): - # pylint: disable=too-many-branches,too-many-statements - """Check user supplied data is valid where possible. - """ - for ext_name in self.keys(): - if (self[ext_name][self.REPO][self.PROTOCOL] - not in self.KNOWN_PRROTOCOLS): - msg = 'Unknown repository protocol "{0}" in "{1}".'.format( - self[ext_name][self.REPO][self.PROTOCOL], ext_name) - fatal_error(msg) - - if (self[ext_name][self.REPO][self.PROTOCOL] == - self.PROTOCOL_SVN): - if self.HASH in self[ext_name][self.REPO]: - msg = ('In repo description for "{0}". svn repositories ' - 'may not include the "hash" keyword.'.format( - ext_name)) - fatal_error(msg) - - if ((self[ext_name][self.REPO][self.PROTOCOL] != self.PROTOCOL_GIT) - and (self.SUBMODULE in self[ext_name])): - msg = ('self.SUBMODULE is only supported with {0} protocol, ' - '"{1}" is defined as an {2} repository') - fatal_error(msg.format(self.PROTOCOL_GIT, ext_name, - self[ext_name][self.REPO][self.PROTOCOL])) - - if (self[ext_name][self.REPO][self.PROTOCOL] != - self.PROTOCOL_EXTERNALS_ONLY): - ref_count = 0 - found_refs = '' - if self.TAG in self[ext_name][self.REPO]: - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.TAG, self[ext_name][self.REPO][self.TAG], - found_refs) - if self.BRANCH in self[ext_name][self.REPO]: - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.BRANCH, self[ext_name][self.REPO][self.BRANCH], - found_refs) - if self.HASH in self[ext_name][self.REPO]: - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.HASH, self[ext_name][self.REPO][self.HASH], - found_refs) - if (self.SUBMODULE in self[ext_name] and - self[ext_name][self.SUBMODULE]): - ref_count += 1 - found_refs = '"{0} = {1}", {2}'.format( - self.SUBMODULE, - self[ext_name][self.SUBMODULE], found_refs) - - if ref_count > 1: - msg = 'Model description is over specified! ' - if self.SUBMODULE in self[ext_name]: - msg += ('from_submodule is not compatible with ' - '"tag", "branch", or "hash" ') - else: - msg += (' Only one of "tag", "branch", or "hash" ' - 'may be specified ') - - msg += 'for repo description of "{0}".'.format(ext_name) - msg = '{0}\nFound: {1}'.format(msg, found_refs) - fatal_error(msg) - elif ref_count < 1: - msg = ('Model description is under specified! One of ' - '"tag", "branch", or "hash" must be specified for ' - 'repo description of "{0}"'.format(ext_name)) - fatal_error(msg) - - if (self.REPO_URL not in self[ext_name][self.REPO] and - (self.SUBMODULE not in self[ext_name] or - not self[ext_name][self.SUBMODULE])): - msg = ('Model description is under specified! Must have ' - '"repo_url" in repo ' - 'description for "{0}"'.format(ext_name)) - fatal_error(msg) - - if (self.SUBMODULE in self[ext_name] and - self[ext_name][self.SUBMODULE]): - if self.REPO_URL in self[ext_name][self.REPO]: - msg = ('Model description is over specified! ' - 'from_submodule keyword is not compatible ' - 'with {0} keyword for'.format(self.REPO_URL)) - msg = '{0} repo description of "{1}"'.format(msg, - ext_name) - fatal_error(msg) - - if self.PATH in self[ext_name]: - msg = ('Model description is over specified! ' - 'from_submodule keyword is not compatible with ' - '{0} keyword for'.format(self.PATH)) - msg = '{0} repo description of "{1}"'.format(msg, - ext_name) - fatal_error(msg) - - if self.REPO_URL in self[ext_name][self.REPO]: - url = expand_local_url( - self[ext_name][self.REPO][self.REPO_URL], ext_name) - self[ext_name][self.REPO][self.REPO_URL] = url - - def _check_optional(self): - # pylint: disable=too-many-branches - """Some fields like externals, repo:tag repo:branch are - (conditionally) optional. We don't want the user to be - required to enter them in every externals description file, but - still want to validate the input. Check conditions and add - default values if appropriate. - - """ - submod_desc = None # Only load submodules info once - for field in self: - # truely optional - if self.EXTERNALS not in self[field]: - self[field][self.EXTERNALS] = EMPTY_STR - - # git and svn repos must tags and branches for validation purposes. - if self.TAG not in self[field][self.REPO]: - self[field][self.REPO][self.TAG] = EMPTY_STR - if self.BRANCH not in self[field][self.REPO]: - self[field][self.REPO][self.BRANCH] = EMPTY_STR - if self.HASH not in self[field][self.REPO]: - self[field][self.REPO][self.HASH] = EMPTY_STR - if self.REPO_URL not in self[field][self.REPO]: - self[field][self.REPO][self.REPO_URL] = EMPTY_STR - if self.SPARSE not in self[field][self.REPO]: - self[field][self.REPO][self.SPARSE] = EMPTY_STR - - # from_submodule has a complex relationship with other fields - if self.SUBMODULE in self[field]: - # User wants to use submodule information, is it available? - if self._parent_repo is None: - # No parent == no submodule information - PPRINTER.pprint(self[field]) - msg = 'No parent submodule for "{0}"'.format(field) - fatal_error(msg) - elif self._parent_repo.protocol() != self.PROTOCOL_GIT: - PPRINTER.pprint(self[field]) - msg = 'Parent protocol, "{0}", does not support submodules' - fatal_error(msg.format(self._parent_repo.protocol())) - else: - args = self._repo_config_from_submodule(field, submod_desc) - repo_url, repo_path, ref_hash, submod_desc = args - - if repo_url is None: - msg = ('Cannot checkout "{0}" as a submodule, ' - 'repo not found in {1} file') - fatal_error(msg.format(field, - self.GIT_SUBMODULES_FILENAME)) - # Fill in submodule fields - self[field][self.REPO][self.REPO_URL] = repo_url - self[field][self.REPO][self.HASH] = ref_hash - self[field][self.PATH] = repo_path - - if self[field][self.SUBMODULE]: - # We should get everything from the parent submodule - # configuration. - pass - # No else (from _submodule = False is the default) - else: - # Add the default value (not using submodule information) - self[field][self.SUBMODULE] = False - - def _repo_config_from_submodule(self, field, submod_desc): - """Find the external config information for a repository from - its submodule configuration information. - """ - if submod_desc is None: - repo_path = os.getcwd() # Is this always correct? - submod_file = self._parent_repo.submodules_file(repo_path=repo_path) - if submod_file is None: - msg = ('Cannot checkout "{0}" from submodule information\n' - ' Parent repo, "{1}" does not have submodules') - fatal_error(msg.format(field, self._parent_repo.name())) - - printlog( - 'Processing submodules description file : {0} ({1})'.format( - submod_file, repo_path)) - submod_model_data= _read_gitmodules_file(repo_path, submod_file) - submod_desc = create_externals_description(submod_model_data) - - # Can we find our external? - repo_url = None - repo_path = None - ref_hash = None - for ext_field in submod_desc: - if field == ext_field: - ext = submod_desc[ext_field] - repo_url = ext[self.REPO][self.REPO_URL] - repo_path = ext[self.PATH] - ref_hash = ext[self.REPO][self.HASH] - break - - return repo_url, repo_path, ref_hash, submod_desc - - def _validate(self): - """Validate that the parsed externals description contains all necessary - fields. - - """ - def print_compare_difference(data_a, data_b, loc_a, loc_b): - """Look through the data structures and print the differences. - - """ - for item in data_a: - if item in data_b: - if not isinstance(data_b[item], type(data_a[item])): - printlog(" {item}: {loc} = {val} ({val_type})".format( - item=item, loc=loc_a, val=data_a[item], - val_type=type(data_a[item]))) - printlog(" {item} {loc} = {val} ({val_type})".format( - item=' ' * len(item), loc=loc_b, val=data_b[item], - val_type=type(data_b[item]))) - else: - printlog(" {item}: {loc} = {val} ({val_type})".format( - item=item, loc=loc_a, val=data_a[item], - val_type=type(data_a[item]))) - printlog(" {item} {loc} missing".format( - item=' ' * len(item), loc=loc_b)) - - def validate_data_struct(schema, data): - """Compare a data structure against a schema and validate all required - fields are present. - - """ - is_valid = False - in_ref = True - valid = True - if isinstance(schema, dict) and isinstance(data, dict): - # Both are dicts, recursively verify that all fields - # in schema are present in the data. - for key in schema: - in_ref = in_ref and (key in data) - if in_ref: - valid = valid and ( - validate_data_struct(schema[key], data[key])) - - is_valid = in_ref and valid - else: - # non-recursive structure. verify data and schema have - # the same type. - is_valid = isinstance(data, type(schema)) - - if not is_valid: - printlog(" Unmatched schema and input:") - if isinstance(schema, dict): - print_compare_difference(schema, data, 'schema', 'input') - print_compare_difference(data, schema, 'input', 'schema') - else: - printlog(" schema = {0} ({1})".format( - schema, type(schema))) - printlog(" input = {0} ({1})".format(data, type(data))) - - return is_valid - - for field in self: - valid = validate_data_struct(self._source_schema, self[field]) - if not valid: - PPRINTER.pprint(self._source_schema) - PPRINTER.pprint(self[field]) - msg = 'ERROR: source for "{0}" did not validate'.format(field) - fatal_error(msg) - - -class ExternalsDescriptionDict(ExternalsDescription): - """Create a externals description object from a dictionary using the API - representations. Primarily used to simplify creating model - description files for unit testing. - - """ - - def __init__(self, model_data, components=None, exclude=None): - """Parse a native dictionary into a externals description. - """ - ExternalsDescription.__init__(self) - self._schema_major = 1 - self._schema_minor = 0 - self._schema_patch = 0 - self._input_major = 1 - self._input_minor = 0 - self._input_patch = 0 - self._verify_schema_version() - if components: - for key in list(model_data.keys()): - if key not in components: - del model_data[key] - - if exclude: - for key in list(model_data.keys()): - if key in exclude: - del model_data[key] - - self.update(model_data) - self._check_user_input() - - -class ExternalsDescriptionConfigV1(ExternalsDescription): - """Create a externals description object from a config_parser object, - schema version 1. - - """ - - def __init__(self, model_data, components=None, exclude=None, parent_repo=None): - """Convert the config data into a standardized dict that can be used to - construct the source objects - - components: list of component names to include, None to include all. - exclude: list of component names to skip. - """ - ExternalsDescription.__init__(self, parent_repo=parent_repo) - self._schema_major = 1 - self._schema_minor = 1 - self._schema_patch = 0 - self._input_major, self._input_minor, self._input_patch = \ - get_cfg_schema_version(model_data) - self._verify_schema_version() - self._remove_metadata(model_data) - self._parse_cfg(model_data, components=components, exclude=exclude) - self._check_user_input() - - @staticmethod - def _remove_metadata(model_data): - """Remove the metadata section from the model configuration file so - that it is simpler to look through the file and construct the - externals description. - - """ - model_data.remove_section(DESCRIPTION_SECTION) - - def _parse_cfg(self, cfg_data, components=None, exclude=None): - """Parse a config_parser object into a externals description. - - components: list of component names to include, None to include all. - exclude: list of component names to skip. - """ - def list_to_dict(input_list, convert_to_lower_case=True): - """Convert a list of key-value pairs into a dictionary. - """ - output_dict = {} - for item in input_list: - key = config_string_cleaner(item[0].strip()) - value = config_string_cleaner(item[1].strip()) - if convert_to_lower_case: - key = key.lower() - output_dict[key] = value - return output_dict - - for section in cfg_data.sections(): - name = config_string_cleaner(section.lower().strip()) - if (components and name not in components) or (exclude and name in exclude): - continue - self[name] = {} - self[name].update(list_to_dict(cfg_data.items(section))) - self[name][self.REPO] = {} - loop_keys = self[name].copy().keys() - for item in loop_keys: - if item in self._source_schema: - if isinstance(self._source_schema[item], bool): - self[name][item] = str_to_bool(self[name][item]) - elif item in self._source_schema[self.REPO]: - self[name][self.REPO][item] = self[name][item] - del self[name][item] - else: - msg = ('Invalid input: "{sect}" contains unknown ' - 'item "{item}".'.format(sect=name, item=item)) - fatal_error(msg) diff --git a/manage_externals/manic/externals_status.py b/manage_externals/manic/externals_status.py deleted file mode 100644 index 6bc29e9732..0000000000 --- a/manage_externals/manic/externals_status.py +++ /dev/null @@ -1,164 +0,0 @@ -"""ExternalStatus - -Class to store status and state information about repositories and -create a string representation. - -""" -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -from .global_constants import EMPTY_STR -from .utils import printlog, indent_string -from .global_constants import VERBOSITY_VERBOSE, VERBOSITY_DUMP - - -class ExternalStatus(object): - """Class to represent the status of a given source repository or tree. - - Individual repositories determine their own status in the - Repository objects. This object is just resposible for storing the - information and passing it up to a higher level for reporting or - global decisions. - - There are two states of concern: - - * If the repository is in-sync with the externals description file. - - * If the repostiory working copy is clean and there are no pending - transactions (e.g. add, remove, rename, untracked files). - - """ - # sync_state and clean_state can be one of the following: - DEFAULT = '-' # not set yet (sync_state). clean_state can be this if sync_state is EMPTY. - UNKNOWN = '?' - EMPTY = 'e' - MODEL_MODIFIED = 's' # repo version != externals (sync_state only) - DIRTY = 'M' # repo is dirty (clean_state only) - STATUS_OK = ' ' # repo is clean (clean_state) or matches externals version (sync_state) - STATUS_ERROR = '!' - - # source_type can be one of the following: - OPTIONAL = 'o' - STANDALONE = 's' - MANAGED = ' ' - - def __init__(self): - self.sync_state = self.DEFAULT - self.clean_state = self.DEFAULT - self.source_type = self.DEFAULT - self.path = EMPTY_STR - self.current_version = EMPTY_STR - self.expected_version = EMPTY_STR - self.status_output = EMPTY_STR - - def log_status_message(self, verbosity): - """Write status message to the screen and log file - """ - printlog(self._default_status_message()) - if verbosity >= VERBOSITY_VERBOSE: - printlog(self._verbose_status_message()) - if verbosity >= VERBOSITY_DUMP: - printlog(self._dump_status_message()) - - def __repr__(self): - return self._default_status_message() - - def _default_status_message(self): - """Return the default terse status message string - """ - return '{sync}{clean}{src_type} {path}'.format( - sync=self.sync_state, clean=self.clean_state, - src_type=self.source_type, path=self.path) - - def _verbose_status_message(self): - """Return the verbose status message string - """ - clean_str = self.DEFAULT - if self.clean_state == self.STATUS_OK: - clean_str = 'clean sandbox' - elif self.clean_state == self.DIRTY: - clean_str = 'modified sandbox' - - sync_str = 'on {0}'.format(self.current_version) - if self.sync_state != self.STATUS_OK: - sync_str = '{current} --> {expected}'.format( - current=self.current_version, expected=self.expected_version) - return ' {clean}, {sync}'.format(clean=clean_str, sync=sync_str) - - def _dump_status_message(self): - """Return the dump status message string - """ - return indent_string(self.status_output, 12) - - def safe_to_update(self): - """Report if it is safe to update a repository. Safe is defined as: - - * If a repository is empty, it is safe to update. - - * If a repository exists and has a clean working copy state - with no pending transactions. - - """ - safe_to_update = False - repo_exists = self.exists() - if not repo_exists: - safe_to_update = True - else: - # If the repo exists, it must be in ok or modified - # sync_state. Any other sync_state at this point - # represents a logic error that should have been handled - # before now! - sync_safe = ((self.sync_state == ExternalStatus.STATUS_OK) or - (self.sync_state == ExternalStatus.MODEL_MODIFIED)) - if sync_safe: - # The clean_state must be STATUS_OK to update. Otherwise we - # are dirty or there was a missed error previously. - if self.clean_state == ExternalStatus.STATUS_OK: - safe_to_update = True - return safe_to_update - - def exists(self): - """Determine if the repo exists. This is indicated by: - - * sync_state is not EMPTY - - * if the sync_state is empty, then the valid states for - clean_state are default, empty or unknown. Anything else - and there was probably an internal logic error. - - NOTE(bja, 2017-10) For the moment we are considering a - sync_state of default or unknown to require user intervention, - but we may want to relax this convention. This is probably a - result of a network error or internal logic error but more - testing is needed. - - """ - is_empty = (self.sync_state == ExternalStatus.EMPTY) - clean_valid = ((self.clean_state == ExternalStatus.DEFAULT) or - (self.clean_state == ExternalStatus.EMPTY) or - (self.clean_state == ExternalStatus.UNKNOWN)) - - if is_empty and clean_valid: - exists = False - else: - exists = True - return exists - - -def check_safe_to_update_repos(tree_status): - """Check if *ALL* repositories are in a safe state to update. We don't - want to do a partial update of the repositories then die, leaving - the model in an inconsistent state. - - Note: if there is an update to do, the repositories will by - definiation be out of synce with the externals description, so we - can't use that as criteria for updating. - - """ - safe_to_update = True - for comp in tree_status: - stat = tree_status[comp] - safe_to_update &= stat.safe_to_update() - - return safe_to_update diff --git a/manage_externals/manic/global_constants.py b/manage_externals/manic/global_constants.py deleted file mode 100644 index 0e91cffc90..0000000000 --- a/manage_externals/manic/global_constants.py +++ /dev/null @@ -1,18 +0,0 @@ -"""Globals shared across modules -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import pprint - -EMPTY_STR = '' -LOCAL_PATH_INDICATOR = '.' -VERSION_SEPERATOR = '.' -LOG_FILE_NAME = 'manage_externals.log' -PPRINTER = pprint.PrettyPrinter(indent=4) - -VERBOSITY_DEFAULT = 0 -VERBOSITY_VERBOSE = 1 -VERBOSITY_DUMP = 2 diff --git a/manage_externals/manic/repository.py b/manage_externals/manic/repository.py deleted file mode 100644 index ea4230fb7b..0000000000 --- a/manage_externals/manic/repository.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Base class representation of a repository -""" - -from .externals_description import ExternalsDescription -from .utils import fatal_error -from .global_constants import EMPTY_STR - - -class Repository(object): - """ - Class to represent and operate on a repository description. - """ - - def __init__(self, component_name, repo): - """ - Parse repo externals description - """ - self._name = component_name - self._protocol = repo[ExternalsDescription.PROTOCOL] - self._tag = repo[ExternalsDescription.TAG] - self._branch = repo[ExternalsDescription.BRANCH] - self._hash = repo[ExternalsDescription.HASH] - self._url = repo[ExternalsDescription.REPO_URL] - self._sparse = repo[ExternalsDescription.SPARSE] - - if self._url is EMPTY_STR: - fatal_error('repo must have a URL') - - if ((self._tag is EMPTY_STR) and (self._branch is EMPTY_STR) and - (self._hash is EMPTY_STR)): - fatal_error('{0} repo must have a branch, tag or hash element') - - ref_count = 0 - if self._tag is not EMPTY_STR: - ref_count += 1 - if self._branch is not EMPTY_STR: - ref_count += 1 - if self._hash is not EMPTY_STR: - ref_count += 1 - if ref_count != 1: - fatal_error('repo {0} must have exactly one of ' - 'tag, branch or hash.'.format(self._name)) - - def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): # pylint: disable=unused-argument - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly update the source. - If the repo destination directory does not exist, checkout the correce - branch or tag. - NB: is include as an argument for compatibility with - git functionality (repository_git.py) - """ - msg = ('DEV_ERROR: checkout method must be implemented in all ' - 'repository classes! {0}'.format(self.__class__.__name__)) - fatal_error(msg) - - def status(self, stat, repo_dir_path): # pylint: disable=unused-argument - """Report the status of the repo - - """ - msg = ('DEV_ERROR: status method must be implemented in all ' - 'repository classes! {0}'.format(self.__class__.__name__)) - fatal_error(msg) - - def submodules_file(self, repo_path=None): - # pylint: disable=no-self-use,unused-argument - """Stub for use by non-git VC systems""" - return None - - def url(self): - """Public access of repo url. - """ - return self._url - - def tag(self): - """Public access of repo tag - """ - return self._tag - - def branch(self): - """Public access of repo branch. - """ - return self._branch - - def hash(self): - """Public access of repo hash. - """ - return self._hash - - def name(self): - """Public access of repo name. - """ - return self._name - - def protocol(self): - """Public access of repo protocol. - """ - return self._protocol diff --git a/manage_externals/manic/repository_factory.py b/manage_externals/manic/repository_factory.py deleted file mode 100644 index 18c73ffc4b..0000000000 --- a/manage_externals/manic/repository_factory.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Factory for creating and initializing the appropriate repository class -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -from .repository_git import GitRepository -from .repository_svn import SvnRepository -from .externals_description import ExternalsDescription -from .utils import fatal_error - - -def create_repository(component_name, repo_info, svn_ignore_ancestry=False): - """Determine what type of repository we have, i.e. git or svn, and - create the appropriate object. - - Can return None (e.g. if protocol is 'externals_only'). - """ - protocol = repo_info[ExternalsDescription.PROTOCOL].lower() - if protocol == 'git': - repo = GitRepository(component_name, repo_info) - elif protocol == 'svn': - repo = SvnRepository(component_name, repo_info, ignore_ancestry=svn_ignore_ancestry) - elif protocol == 'externals_only': - repo = None - else: - msg = 'Unknown repo protocol "{0}"'.format(protocol) - fatal_error(msg) - return repo diff --git a/manage_externals/manic/repository_git.py b/manage_externals/manic/repository_git.py deleted file mode 100644 index adc666cc57..0000000000 --- a/manage_externals/manic/repository_git.py +++ /dev/null @@ -1,849 +0,0 @@ -"""Class for interacting with git repositories -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import copy -import os - -from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR -from .global_constants import VERBOSITY_VERBOSE -from .repository import Repository -from .externals_status import ExternalStatus -from .externals_description import ExternalsDescription, git_submodule_status -from .utils import expand_local_url, split_remote_url, is_remote_url -from .utils import fatal_error, printlog -from .utils import execute_subprocess - - -class GitRepository(Repository): - """Class to represent and operate on a repository description. - - For testing purpose, all system calls to git should: - - * be isolated in separate functions with no application logic - * of the form: - - cmd = 'git -C {dirname} ...'.format(dirname=dirname).split() - - value = execute_subprocess(cmd, output_to_caller={T|F}, - status_to_caller={T|F}) - - return value - * be static methods (not rely on self) - * name as _git_subcommand_args(user_args) - - This convention allows easy unit testing of the repository logic - by mocking the specific calls to return predefined results. - - """ - - def __init__(self, component_name, repo): - """ - repo: ExternalsDescription. - """ - Repository.__init__(self, component_name, repo) - self._gitmodules = None - self._submods = None - - # ---------------------------------------------------------------- - # - # Public API, defined by Repository - # - # ---------------------------------------------------------------- - def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly update the source. - If the repo destination directory does not exist, checkout the correct - branch or tag. - """ - repo_dir_path = os.path.join(base_dir_path, repo_dir_name) - repo_dir_exists = os.path.exists(repo_dir_path) - if (repo_dir_exists and not os.listdir( - repo_dir_path)) or not repo_dir_exists: - self._clone_repo(base_dir_path, repo_dir_name, verbosity) - self._checkout_ref(repo_dir_path, verbosity, recursive) - gmpath = os.path.join(repo_dir_path, - ExternalsDescription.GIT_SUBMODULES_FILENAME) - if os.path.exists(gmpath): - self._gitmodules = gmpath - self._submods = git_submodule_status(repo_dir_path) - else: - self._gitmodules = None - self._submods = None - - def status(self, stat, repo_dir_path): - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly update the source. - If the repo destination directory does not exist, checkout the correct - branch or tag. - """ - self._check_sync(stat, repo_dir_path) - if os.path.exists(repo_dir_path): - self._status_summary(stat, repo_dir_path) - - def submodules_file(self, repo_path=None): - if repo_path is not None: - gmpath = os.path.join(repo_path, - ExternalsDescription.GIT_SUBMODULES_FILENAME) - if os.path.exists(gmpath): - self._gitmodules = gmpath - self._submods = git_submodule_status(repo_path) - - return self._gitmodules - - # ---------------------------------------------------------------- - # - # Internal work functions - # - # ---------------------------------------------------------------- - def _clone_repo(self, base_dir_path, repo_dir_name, verbosity): - """Clones repo_dir_name into base_dir_path. - """ - self._git_clone(self._url, os.path.join(base_dir_path, repo_dir_name), - verbosity=verbosity) - - def _current_ref(self, dirname): - """Determine the *name* associated with HEAD at dirname. - - If we're on a tag, then returns the tag name; otherwise, returns - the current hash. Returns an empty string if no reference can be - determined (e.g., if we're not actually in a git repository). - - If we're on a branch, then the branch name is also included in - the returned string (in addition to the tag / hash). - """ - ref_found = False - - # If we're exactly at a tag, use that as the current ref - tag_found, tag_name = self._git_current_tag(dirname) - if tag_found: - current_ref = tag_name - ref_found = True - - if not ref_found: - # Otherwise, use current hash as the current ref - hash_found, hash_name = self._git_current_hash(dirname) - if hash_found: - current_ref = hash_name - ref_found = True - - if ref_found: - # If we're on a branch, include branch name in current ref - branch_found, branch_name = self._git_current_branch(dirname) - if branch_found: - current_ref = "{} (branch {})".format(current_ref, branch_name) - else: - # If we still can't find a ref, return empty string. This - # can happen if we're not actually in a git repo - current_ref = '' - - return current_ref - - def _check_sync(self, stat, repo_dir_path): - """Determine whether a git repository is in-sync with the model - description. - - Because repos can have multiple remotes, the only criteria is - whether the branch or tag is the same. - - """ - if not os.path.exists(repo_dir_path): - # NOTE(bja, 2017-10) condition should have been determined - # by _Source() object and should never be here! - stat.sync_state = ExternalStatus.STATUS_ERROR - else: - git_dir = os.path.join(repo_dir_path, '.git') - if not os.path.exists(git_dir): - # NOTE(bja, 2017-10) directory exists, but no git repo - # info.... Can't test with subprocess git command - # because git will move up directory tree until it - # finds the parent repo git dir! - stat.sync_state = ExternalStatus.UNKNOWN - else: - self._check_sync_logic(stat, repo_dir_path) - - def _check_sync_logic(self, stat, repo_dir_path): - """Compare the underlying hashes of the currently checkout ref and the - expected ref. - - Output: sets the sync_state as well as the current and - expected ref in the input status object. - - """ - def compare_refs(current_ref, expected_ref): - """Compare the current and expected ref. - - """ - if current_ref == expected_ref: - status = ExternalStatus.STATUS_OK - else: - status = ExternalStatus.MODEL_MODIFIED - return status - - # get the full hash of the current commit - _, current_ref = self._git_current_hash(repo_dir_path) - - if self._branch: - if self._url == LOCAL_PATH_INDICATOR: - expected_ref = self._branch - else: - remote_name = self._remote_name_for_url(self._url, - repo_dir_path) - if not remote_name: - # git doesn't know about this remote. by definition - # this is a modified state. - expected_ref = "unknown_remote/{0}".format(self._branch) - else: - expected_ref = "{0}/{1}".format(remote_name, self._branch) - elif self._hash: - expected_ref = self._hash - elif self._tag: - expected_ref = self._tag - else: - msg = 'In repo "{0}": none of branch, hash or tag are set'.format( - self._name) - fatal_error(msg) - - # record the *names* of the current and expected branches - stat.current_version = self._current_ref(repo_dir_path) - stat.expected_version = copy.deepcopy(expected_ref) - - if current_ref == EMPTY_STR: - stat.sync_state = ExternalStatus.UNKNOWN - else: - # get the underlying hash of the expected ref - revparse_status, expected_ref_hash = self._git_revparse_commit( - expected_ref, repo_dir_path) - if revparse_status: - # We failed to get the hash associated with - # expected_ref. Maybe we should assign this to some special - # status, but for now we're just calling this out-of-sync to - # remain consistent with how this worked before. - stat.sync_state = ExternalStatus.MODEL_MODIFIED - else: - # compare the underlying hashes - stat.sync_state = compare_refs(current_ref, expected_ref_hash) - - @classmethod - def _remote_name_for_url(cls, remote_url, dirname): - """Return the remote name matching remote_url (or None) - - """ - git_output = cls._git_remote_verbose(dirname) - git_output = git_output.splitlines() - for line in git_output: - data = line.strip() - if not data: - continue - data = data.split() - name = data[0].strip() - url = data[1].strip() - if remote_url == url: - return name - return None - - def _create_remote_name(self): - """The url specified in the externals description file was not known - to git. We need to add it, which means adding a unique and - safe name.... - - The assigned name needs to be safe for git to use, e.g. can't - look like a path 'foo/bar' and work with both remote and local paths. - - Remote paths include but are not limited to: git, ssh, https, - github, gitlab, bitbucket, custom server, etc. - - Local paths can be relative or absolute. They may contain - shell variables, e.g. ${REPO_ROOT}/repo_name, or username - expansion, i.e. ~/ or ~someuser/. - - Relative paths must be at least one layer of redirection, i.e. - container/../ext_repo, but may be many layers deep, e.g. - container/../../../../../ext_repo - - NOTE(bja, 2017-11) - - The base name below may not be unique, for example if the - user has local paths like: - - /path/to/my/repos/nice_repo - /path/to/other/repos/nice_repo - - But the current implementation should cover most common - use cases for remotes and still provide usable names. - - """ - url = copy.deepcopy(self._url) - if is_remote_url(url): - url = split_remote_url(url) - else: - url = expand_local_url(url, self._name) - url = url.split('/') - repo_name = url[-1] - base_name = url[-2] - # repo name should nominally already be something that git can - # deal with. We need to remove other possibly troublesome - # punctuation, e.g. /, $, from the base name. - unsafe_characters = '!@#$%^&*()[]{}\\/,;~' - for unsafe in unsafe_characters: - base_name = base_name.replace(unsafe, '') - remote_name = "{0}_{1}".format(base_name, repo_name) - return remote_name - - def _checkout_ref(self, repo_dir, verbosity, submodules): - """Checkout the user supplied reference - if is True, recursively initialize and update - the repo's submodules - """ - # import pdb; pdb.set_trace() - if self._url.strip() == LOCAL_PATH_INDICATOR: - self._checkout_local_ref(verbosity, submodules, repo_dir) - else: - self._checkout_external_ref(verbosity, submodules, repo_dir) - - if self._sparse: - self._sparse_checkout(repo_dir, verbosity) - - - def _checkout_local_ref(self, verbosity, submodules, dirname): - """Checkout the reference considering the local repo only. Do not - fetch any additional remotes or specify the remote when - checkout out the ref. - if is True, recursively initialize and update - the repo's submodules - """ - if self._tag: - ref = self._tag - elif self._branch: - ref = self._branch - else: - ref = self._hash - - self._check_for_valid_ref(ref, remote_name=None, - dirname=dirname) - self._git_checkout_ref(ref, verbosity, submodules, dirname) - - def _checkout_external_ref(self, verbosity, submodules, dirname): - """Checkout the reference from a remote repository into dirname. - if is True, recursively initialize and update - the repo's submodules. - Note that this results in a 'detached HEAD' state if checking out - a branch, because we check out the remote branch rather than the - local. See https://github.com/ESMCI/manage_externals/issues/34 for - more discussion. - """ - if self._tag: - ref = self._tag - elif self._branch: - ref = self._branch - else: - ref = self._hash - - remote_name = self._remote_name_for_url(self._url, dirname) - if not remote_name: - remote_name = self._create_remote_name() - self._git_remote_add(remote_name, self._url, dirname) - self._git_fetch(remote_name, dirname) - - # NOTE(bja, 2018-03) we need to send separate ref and remote - # name to check_for_vaild_ref, but the combined name to - # checkout_ref! - self._check_for_valid_ref(ref, remote_name, dirname) - - if self._branch: - # Prepend remote name to branch. This means we avoid various - # special cases if the local branch is not tracking the remote or - # cannot be trivially fast-forwarded to match; but, it also - # means we end up in a 'detached HEAD' state. - ref = '{0}/{1}'.format(remote_name, ref) - self._git_checkout_ref(ref, verbosity, submodules, dirname) - - def _sparse_checkout(self, repo_dir, verbosity): - """Use git read-tree to thin the working tree.""" - cmd = ['cp', os.path.join(repo_dir, self._sparse), - os.path.join(repo_dir, - '.git/info/sparse-checkout')] - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - self._git_sparse_checkout(verbosity, repo_dir) - - def _check_for_valid_ref(self, ref, remote_name, dirname): - """Try some basic sanity checks on the user supplied reference so we - can provide a more useful error message than calledprocess - error... - - remote_name can be NOne - """ - is_tag = self._ref_is_tag(ref, dirname) - is_branch = self._ref_is_branch(ref, remote_name, dirname) - is_hash = self._ref_is_hash(ref, dirname) - - is_valid = is_tag or is_branch or is_hash - if not is_valid: - msg = ('In repo "{0}": reference "{1}" does not appear to be a ' - 'valid tag, branch or hash! Please verify the reference ' - 'name (e.g. spelling), is available from: {2} '.format( - self._name, ref, self._url)) - fatal_error(msg) - - if is_tag: - is_unique_tag, msg = self._is_unique_tag(ref, remote_name, - dirname) - if not is_unique_tag: - msg = ('In repo "{0}": tag "{1}" {2}'.format( - self._name, self._tag, msg)) - fatal_error(msg) - - return is_valid - - def _is_unique_tag(self, ref, remote_name, dirname): - """Verify that a reference is a valid tag and is unique (not a branch) - - Tags may be tag names, or SHA id's. It is also possible that a - branch and tag have the some name. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_tag = self._ref_is_tag(ref, dirname) - is_branch = self._ref_is_branch(ref, remote_name, dirname) - is_hash = self._ref_is_hash(ref, dirname) - - msg = '' - is_unique_tag = False - if is_tag and not is_branch: - # unique tag - msg = 'is ok' - is_unique_tag = True - elif is_tag and is_branch: - msg = ('is both a branch and a tag. git may checkout the branch ' - 'instead of the tag depending on your version of git.') - is_unique_tag = False - elif not is_tag and is_branch: - msg = ('is a branch, and not a tag. If you intended to checkout ' - 'a branch, please change the externals description to be ' - 'a branch. If you intended to checkout a tag, it does not ' - 'exist. Please check the name.') - is_unique_tag = False - else: # not is_tag and not is_branch: - if is_hash: - # probably a sha1 or HEAD, etc, we call it a tag - msg = 'is ok' - is_unique_tag = True - else: - # undetermined state. - msg = ('does not appear to be a valid tag, branch or hash! ' - 'Please check the name and repository.') - is_unique_tag = False - - return is_unique_tag, msg - - def _ref_is_tag(self, ref, dirname): - """Verify that a reference is a valid tag according to git. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - """ - is_tag = False - value = self._git_showref_tag(ref, dirname) - if value == 0: - is_tag = True - return is_tag - - def _ref_is_branch(self, ref, remote_name, dirname): - """Verify if a ref is any kind of branch (local, tracked remote, - untracked remote). - - remote_name can be None. - """ - local_branch = False - remote_branch = False - if remote_name: - remote_branch = self._ref_is_remote_branch(ref, remote_name, - dirname) - local_branch = self._ref_is_local_branch(ref, dirname) - - is_branch = False - if local_branch or remote_branch: - is_branch = True - return is_branch - - def _ref_is_local_branch(self, ref, dirname): - """Verify that a reference is a valid branch according to git. - - show-ref branch returns local branches that have been - previously checked out. It will not necessarily pick up - untracked remote branches. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_branch = False - value = self._git_showref_branch(ref, dirname) - if value == 0: - is_branch = True - return is_branch - - def _ref_is_remote_branch(self, ref, remote_name, dirname): - """Verify that a reference is a valid branch according to git. - - show-ref branch returns local branches that have been - previously checked out. It will not necessarily pick up - untracked remote branches. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_branch = False - value = self._git_lsremote_branch(ref, remote_name, dirname) - if value == 0: - is_branch = True - return is_branch - - def _ref_is_commit(self, ref, dirname): - """Verify that a reference is a valid commit according to git. - - This could be a tag, branch, sha1 id, HEAD and potentially others... - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - """ - is_commit = False - value, _ = self._git_revparse_commit(ref, dirname) - if value == 0: - is_commit = True - return is_commit - - def _ref_is_hash(self, ref, dirname): - """Verify that a reference is a valid hash according to git. - - Git doesn't seem to provide an exact way to determine if user - supplied reference is an actual hash. So we verify that the - ref is a valid commit and return the underlying commit - hash. Then check that the commit hash begins with the user - supplied string. - - Note: values returned by git_showref_* and git_revparse are - shell return codes, which are zero for success, non-zero for - error! - - """ - is_hash = False - status, git_output = self._git_revparse_commit(ref, dirname) - if status == 0: - if git_output.strip().startswith(ref): - is_hash = True - return is_hash - - def _status_summary(self, stat, repo_dir_path): - """Determine the clean/dirty status of a git repository - - """ - git_output = self._git_status_porcelain_v1z(repo_dir_path) - is_dirty = self._status_v1z_is_dirty(git_output) - if is_dirty: - stat.clean_state = ExternalStatus.DIRTY - else: - stat.clean_state = ExternalStatus.STATUS_OK - - # Now save the verbose status output incase the user wants to - # see it. - stat.status_output = self._git_status_verbose(repo_dir_path) - - @staticmethod - def _status_v1z_is_dirty(git_output): - """Parse the git status output from --porcelain=v1 -z and determine if - the repo status is clean or dirty. Dirty means: - - * modified files - * missing files - * added files - * removed - * renamed - * unmerged - - Whether untracked files are considered depends on how the status - command was run (i.e., whether it was run with the '-u' option). - - NOTE: Based on the above definition, the porcelain status - should be an empty string to be considered 'clean'. Of course - this assumes we only get an empty string from an status - command on a clean checkout, and not some error - condition... Could alse use 'git diff --quiet'. - - """ - is_dirty = False - if git_output: - is_dirty = True - return is_dirty - - # ---------------------------------------------------------------- - # - # system call to git for information gathering - # - # ---------------------------------------------------------------- - @staticmethod - def _git_current_hash(dirname): - """Return the full hash of the currently checked-out version. - - Returns a tuple, (hash_found, hash), where hash_found is a - logical specifying whether a hash was found for HEAD (False - could mean we're not in a git repository at all). (If hash_found - is False, then hash is ''.) - """ - status, git_output = GitRepository._git_revparse_commit("HEAD", - dirname) - hash_found = not status - if not hash_found: - git_output = '' - return hash_found, git_output - - @staticmethod - def _git_current_remote_branch(dirname): - """Determines the name of the current remote branch, if any. - - if dir is None, uses the cwd. - - Returns a tuple, (branch_found, branch_name), where branch_found - is a bool specifying whether a branch name was found for - HEAD. (If branch_found is False, then branch_name is ''). - branch_name is in the format '$remote/$branch', e.g. 'origin/foo'. - """ - branch_found = False - branch_name = '' - - cmd = 'git -C {dirname} log -n 1 --pretty=%d HEAD'.format( - dirname=dirname).split() - status, git_output = execute_subprocess(cmd, - output_to_caller=True, - status_to_caller=True) - branch_found = 'HEAD,' in git_output - if branch_found: - # git_output is of the form " (HEAD, origin/blah)" - branch_name = git_output.split(',')[1].strip()[:-1] - return branch_found, branch_name - - @staticmethod - def _git_current_branch(dirname): - """Determines the name of the current local branch. - - Returns a tuple, (branch_found, branch_name), where branch_found - is a bool specifying whether a branch name was found for - HEAD. (If branch_found is False, then branch_name is ''.) - Note that currently we check out the remote branch rather than - the local, so this command does not return the just-checked-out - branch. See _git_current_remote_branch. - """ - cmd = 'git -C {dirname} symbolic-ref --short -q HEAD'.format( - dirname=dirname).split() - status, git_output = execute_subprocess(cmd, - output_to_caller=True, - status_to_caller=True) - branch_found = not status - if branch_found: - git_output = git_output.strip() - else: - git_output = '' - return branch_found, git_output - - @staticmethod - def _git_current_tag(dirname): - """Determines the name tag corresponding to HEAD (if any). - - if dirname is None, uses the cwd. - - Returns a tuple, (tag_found, tag_name), where tag_found is a - bool specifying whether we found a tag name corresponding to - HEAD. (If tag_found is False, then tag_name is ''.) - """ - cmd = 'git -C {dirname} describe --exact-match --tags HEAD'.format( - dirname=dirname).split() - status, git_output = execute_subprocess(cmd, - output_to_caller=True, - status_to_caller=True) - tag_found = not status - if tag_found: - git_output = git_output.strip() - else: - git_output = '' - return tag_found, git_output - - @staticmethod - def _git_showref_tag(ref, dirname): - """Run git show-ref check if the user supplied ref is a tag. - - could also use git rev-parse --quiet --verify tagname^{tag} - """ - cmd = ('git -C {dirname} show-ref --quiet --verify refs/tags/{ref}' - .format(dirname=dirname, ref=ref).split()) - status = execute_subprocess(cmd, status_to_caller=True) - return status - - @staticmethod - def _git_showref_branch(ref, dirname): - """Run git show-ref check if the user supplied ref is a local or - tracked remote branch. - - """ - cmd = ('git -C {dirname} show-ref --quiet --verify refs/heads/{ref}' - .format(dirname=dirname, ref=ref).split()) - status = execute_subprocess(cmd, status_to_caller=True) - return status - - @staticmethod - def _git_lsremote_branch(ref, remote_name, dirname): - """Run git ls-remote to check if the user supplied ref is a remote - branch that is not being tracked - - """ - cmd = ('git -C {dirname} ls-remote --exit-code --heads ' - '{remote_name} {ref}').format( - dirname=dirname, remote_name=remote_name, ref=ref).split() - status = execute_subprocess(cmd, status_to_caller=True) - return status - - @staticmethod - def _git_revparse_commit(ref, dirname): - """Run git rev-parse to detect if a reference is a SHA, HEAD or other - valid commit. - - """ - cmd = ('git -C {dirname} rev-parse --quiet --verify {ref}^{commit}' - .format(dirname=dirname, ref=ref, commit='{commit}').split()) - status, git_output = execute_subprocess(cmd, status_to_caller=True, - output_to_caller=True) - git_output = git_output.strip() - return status, git_output - - @staticmethod - def _git_status_porcelain_v1z(dirname): - """Run git status to obtain repository information. - - This is run with '--untracked=no' to ignore untracked files. - - The machine-portable format that is guaranteed not to change - between git versions or *user configuration*. - - """ - cmd = ('git -C {dirname} status --untracked-files=no --porcelain -z' - .format(dirname=dirname)).split() - git_output = execute_subprocess(cmd, output_to_caller=True) - return git_output - - @staticmethod - def _git_status_verbose(dirname): - """Run the git status command to obtain repository information. - """ - cmd = 'git -C {dirname} status'.format(dirname=dirname).split() - git_output = execute_subprocess(cmd, output_to_caller=True) - return git_output - - @staticmethod - def _git_remote_verbose(dirname): - """Run the git remote command to obtain repository information. - - Returned string is of the form: - myfork git@github.com:johnpaulalex/manage_externals_jp.git (fetch) - myfork git@github.com:johnpaulalex/manage_externals_jp.git (push) - """ - cmd = 'git -C {dirname} remote --verbose'.format( - dirname=dirname).split() - return execute_subprocess(cmd, output_to_caller=True) - - @staticmethod - def has_submodules(repo_dir_path): - """Return True iff the repository at has a - '.gitmodules' file - """ - fname = os.path.join(repo_dir_path, - ExternalsDescription.GIT_SUBMODULES_FILENAME) - - return os.path.exists(fname) - - # ---------------------------------------------------------------- - # - # system call to git for sideffects modifying the working tree - # - # ---------------------------------------------------------------- - @staticmethod - def _git_clone(url, repo_dir_name, verbosity): - """Clones url into repo_dir_name. - """ - cmd = 'git clone --quiet {url} {repo_dir_name}'.format( - url=url, repo_dir_name=repo_dir_name).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - - @staticmethod - def _git_remote_add(name, url, dirname): - """Run the git remote command for the side effect of adding a remote - """ - cmd = 'git -C {dirname} remote add {name} {url}'.format( - dirname=dirname, name=name, url=url).split() - execute_subprocess(cmd) - - @staticmethod - def _git_fetch(remote_name, dirname): - """Run the git fetch command for the side effect of updating the repo - """ - cmd = 'git -C {dirname} fetch --quiet --tags {remote_name}'.format( - dirname=dirname, remote_name=remote_name).split() - execute_subprocess(cmd) - - @staticmethod - def _git_checkout_ref(ref, verbosity, submodules, dirname): - """Run the git checkout command for the side effect of updating the repo - - Param: ref is a reference to a local or remote object in the - form 'origin/my_feature', or 'tag1'. - - """ - cmd = 'git -C {dirname} checkout --quiet {ref}'.format( - dirname=dirname, ref=ref).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - if submodules: - GitRepository._git_update_submodules(verbosity, dirname) - - @staticmethod - def _git_sparse_checkout(verbosity, dirname): - """Configure repo via read-tree.""" - cmd = 'git -C {dirname} config core.sparsecheckout true'.format( - dirname=dirname).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - cmd = 'git -C {dirname} read-tree -mu HEAD'.format( - dirname=dirname).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - - @staticmethod - def _git_update_submodules(verbosity, dirname): - """Run git submodule update for the side effect of updating this - repo's submodules. - """ - # First, verify that we have a .gitmodules file - if os.path.exists( - os.path.join(dirname, - ExternalsDescription.GIT_SUBMODULES_FILENAME)): - cmd = ('git -C {dirname} submodule update --init --recursive' - .format(dirname=dirname)).split() - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - - execute_subprocess(cmd) diff --git a/manage_externals/manic/repository_svn.py b/manage_externals/manic/repository_svn.py deleted file mode 100644 index 922855d34e..0000000000 --- a/manage_externals/manic/repository_svn.py +++ /dev/null @@ -1,288 +0,0 @@ -"""Class for interacting with svn repositories -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import re -import xml.etree.ElementTree as ET - -from .global_constants import EMPTY_STR, VERBOSITY_VERBOSE -from .repository import Repository -from .externals_status import ExternalStatus -from .utils import fatal_error, indent_string, printlog -from .utils import execute_subprocess - - -class SvnRepository(Repository): - """ - Class to represent and operate on a repository description. - - For testing purpose, all system calls to svn should: - - * be isolated in separate functions with no application logic - * of the form: - - cmd = ['svn', ...] - - value = execute_subprocess(cmd, output_to_caller={T|F}, - status_to_caller={T|F}) - - return value - * be static methods (not rely on self) - * name as _svn_subcommand_args(user_args) - - This convention allows easy unit testing of the repository logic - by mocking the specific calls to return predefined results. - - """ - RE_URLLINE = re.compile(r'^URL:') - - def __init__(self, component_name, repo, ignore_ancestry=False): - """ - Parse repo (a XML element). - """ - Repository.__init__(self, component_name, repo) - self._ignore_ancestry = ignore_ancestry - if self._url.endswith('/'): - # there is already a '/' separator in the URL; no need to add another - url_sep = '' - else: - url_sep = '/' - if self._branch: - self._url = self._url + url_sep + self._branch - elif self._tag: - self._url = self._url + url_sep + self._tag - else: - msg = "DEV_ERROR in svn repository. Shouldn't be here!" - fatal_error(msg) - - # ---------------------------------------------------------------- - # - # Public API, defined by Repository - # - # ---------------------------------------------------------------- - def checkout(self, base_dir_path, repo_dir_name, verbosity, recursive): # pylint: disable=unused-argument - """Checkout or update the working copy - - If the repo destination directory exists, switch the sandbox to - match the externals description. - - If the repo destination directory does not exist, checkout the - correct branch or tag. - NB: is include as an argument for compatibility with - git functionality (repository_git.py) - - """ - repo_dir_path = os.path.join(base_dir_path, repo_dir_name) - if os.path.exists(repo_dir_path): - cwd = os.getcwd() - os.chdir(repo_dir_path) - self._svn_switch(self._url, self._ignore_ancestry, verbosity) - # svn switch can lead to a conflict state, but it gives a - # return code of 0. So now we need to make sure that we're - # in a clean (non-conflict) state. - self._abort_if_dirty(repo_dir_path, - "Expected clean state following switch") - os.chdir(cwd) - else: - self._svn_checkout(self._url, repo_dir_path, verbosity) - - def status(self, stat, repo_dir_path): - """ - Check and report the status of the repository - """ - self._check_sync(stat, repo_dir_path) - if os.path.exists(repo_dir_path): - self._status_summary(stat, repo_dir_path) - - # ---------------------------------------------------------------- - # - # Internal work functions - # - # ---------------------------------------------------------------- - def _check_sync(self, stat, repo_dir_path): - """Check to see if repository directory exists and is at the expected - url. Return: status object - - """ - if not os.path.exists(repo_dir_path): - # NOTE(bja, 2017-10) this state should have been handled by - # the source object and we never get here! - stat.sync_state = ExternalStatus.STATUS_ERROR - else: - svn_output = self._svn_info(repo_dir_path) - if not svn_output: - # directory exists, but info returned nothing. .svn - # directory removed or incomplete checkout? - stat.sync_state = ExternalStatus.UNKNOWN - else: - stat.sync_state, stat.current_version = \ - self._check_url(svn_output, self._url) - stat.expected_version = '/'.join(self._url.split('/')[3:]) - - def _abort_if_dirty(self, repo_dir_path, message): - """Check if the repo is in a dirty state; if so, abort with a - helpful message. - - """ - - stat = ExternalStatus() - self._status_summary(stat, repo_dir_path) - if stat.clean_state != ExternalStatus.STATUS_OK: - status = self._svn_status_verbose(repo_dir_path) - status = indent_string(status, 4) - errmsg = """In directory - {cwd} - -svn status now shows: -{status} - -ERROR: {message} - -One possible cause of this problem is that there may have been untracked -files in your working directory that had the same name as tracked files -in the new revision. - -To recover: Clean up the above directory (resolving conflicts, etc.), -then rerun checkout_externals. -""".format(cwd=repo_dir_path, message=message, status=status) - - fatal_error(errmsg) - - @staticmethod - def _check_url(svn_output, expected_url): - """Determine the svn url from svn info output and return whether it - matches the expected value. - - """ - url = None - for line in svn_output.splitlines(): - if SvnRepository.RE_URLLINE.match(line): - url = line.split(': ')[1].strip() - break - if not url: - status = ExternalStatus.UNKNOWN - elif url == expected_url: - status = ExternalStatus.STATUS_OK - else: - status = ExternalStatus.MODEL_MODIFIED - - if url: - current_version = '/'.join(url.split('/')[3:]) - else: - current_version = EMPTY_STR - - return status, current_version - - def _status_summary(self, stat, repo_dir_path): - """Report whether the svn repository is in-sync with the model - description and whether the sandbox is clean or dirty. - - """ - svn_output = self._svn_status_xml(repo_dir_path) - is_dirty = self.xml_status_is_dirty(svn_output) - if is_dirty: - stat.clean_state = ExternalStatus.DIRTY - else: - stat.clean_state = ExternalStatus.STATUS_OK - - # Now save the verbose status output incase the user wants to - # see it. - stat.status_output = self._svn_status_verbose(repo_dir_path) - - @staticmethod - def xml_status_is_dirty(svn_output): - """Parse svn status xml output and determine if the working copy is - clean or dirty. Dirty is defined as: - - * modified files - * added files - * deleted files - * missing files - - Unversioned files do not affect the clean/dirty status. - - 'external' is also an acceptable state - - """ - # pylint: disable=invalid-name - SVN_EXTERNAL = 'external' - SVN_UNVERSIONED = 'unversioned' - # pylint: enable=invalid-name - - is_dirty = False - try: - xml_status = ET.fromstring(svn_output) - except BaseException: - fatal_error( - "SVN returned invalid XML message {}".format(svn_output)) - xml_target = xml_status.find('./target') - entries = xml_target.findall('./entry') - for entry in entries: - status = entry.find('./wc-status') - item = status.get('item') - if item == SVN_EXTERNAL: - continue - if item == SVN_UNVERSIONED: - continue - is_dirty = True - break - return is_dirty - - # ---------------------------------------------------------------- - # - # system call to svn for information gathering - # - # ---------------------------------------------------------------- - @staticmethod - def _svn_info(repo_dir_path): - """Return results of svn info command - """ - cmd = ['svn', 'info', repo_dir_path] - output = execute_subprocess(cmd, output_to_caller=True) - return output - - @staticmethod - def _svn_status_verbose(repo_dir_path): - """capture the full svn status output - """ - cmd = ['svn', 'status', repo_dir_path] - svn_output = execute_subprocess(cmd, output_to_caller=True) - return svn_output - - @staticmethod - def _svn_status_xml(repo_dir_path): - """ - Get status of the subversion sandbox in repo_dir - """ - cmd = ['svn', 'status', '--xml', repo_dir_path] - svn_output = execute_subprocess(cmd, output_to_caller=True) - return svn_output - - # ---------------------------------------------------------------- - # - # system call to svn for sideffects modifying the working tree - # - # ---------------------------------------------------------------- - @staticmethod - def _svn_checkout(url, repo_dir_path, verbosity): - """ - Checkout a subversion repository (repo_url) to checkout_dir. - """ - cmd = ['svn', 'checkout', '--quiet', url, repo_dir_path] - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) - - @staticmethod - def _svn_switch(url, ignore_ancestry, verbosity): - """ - Switch branches for in an svn sandbox - """ - cmd = ['svn', 'switch', '--quiet'] - if ignore_ancestry: - cmd.append('--ignore-ancestry') - cmd.append(url) - if verbosity >= VERBOSITY_VERBOSE: - printlog(' {0}'.format(' '.join(cmd))) - execute_subprocess(cmd) diff --git a/manage_externals/manic/sourcetree.py b/manage_externals/manic/sourcetree.py deleted file mode 100644 index cf2a5b7569..0000000000 --- a/manage_externals/manic/sourcetree.py +++ /dev/null @@ -1,425 +0,0 @@ -""" -Classes to represent an externals config file (SourceTree) and the components -within it (_External). -""" - -import errno -import logging -import os - -from .externals_description import ExternalsDescription -from .externals_description import read_externals_description_file -from .externals_description import create_externals_description -from .repository_factory import create_repository -from .repository_git import GitRepository -from .externals_status import ExternalStatus -from .utils import fatal_error, printlog -from .global_constants import EMPTY_STR, LOCAL_PATH_INDICATOR -from .global_constants import VERBOSITY_VERBOSE - -class _External(object): - """ - A single component hosted in an external repository (and any children). - - The component may or may not be checked-out upon construction. - """ - # pylint: disable=R0902 - - def __init__(self, root_dir, name, local_path, required, subexternals_path, - repo, svn_ignore_ancestry, subexternal_sourcetree): - """Create a single external component (checked out or not). - - Input: - root_dir : string - the (checked-out) parent repo's root dir. - local_path : string - this external's (checked-out) subdir relative - to root_dir, e.g. "components/mom" - repo: Repository - the repo object for this external. Can be None (e.g. if this external just refers to another external file). - - name : string - name of this external (as named by the parent - reference). May or may not correspond to something in the path. - - ext_description : dict - source ExternalsDescription object - - svn_ignore_ancestry : bool - use --ignore-externals with svn switch - - subexternals_path: string - path to sub-externals config file, if any. Relative to local_path, or special value 'none'. - subexternal_sourcetree: SourceTree - corresponding to subexternals_path, if subexternals_path exists (it might not, if it is not checked out yet). - """ - self._name = name - self._required = required - - self._stat = None # Populated in status() - - self._local_path = local_path - # _repo_dir_path : full repository directory, e.g. - # "/components/mom" - repo_dir = os.path.join(root_dir, local_path) - self._repo_dir_path = os.path.abspath(repo_dir) - # _base_dir_path : base directory *containing* the repository, e.g. - # "/components" - self._base_dir_path = os.path.dirname(self._repo_dir_path) - # _repo_dir_name : base_dir_path + repo_dir_name = repo_dir_path - # e.g., "mom" - self._repo_dir_name = os.path.basename(self._repo_dir_path) - self._repo = repo - - # Does this component have subcomponents aka an externals config? - self._subexternals_path = subexternals_path - self._subexternal_sourcetree = subexternal_sourcetree - - - def get_name(self): - """ - Return the external object's name - """ - return self._name - - def get_local_path(self): - """ - Return the external object's path - """ - return self._local_path - - def get_repo_dir_path(self): - return self._repo_dir_path - - def get_subexternals_path(self): - return self._subexternals_path - - def get_repo(self): - return self._repo - - def status(self, force=False, print_progress=False): - """ - Returns status of this component and all subcomponents. - - Returns a dict mapping our local path (not component name!) to an - ExternalStatus dict. Any subcomponents will have their own top-level - path keys. Note the return value includes entries for this and all - subcomponents regardless of whether they are locally installed or not. - - Side-effect: If self._stat is empty or force is True, calculates _stat. - """ - calc_stat = force or not self._stat - - if calc_stat: - self._stat = ExternalStatus() - self._stat.path = self.get_local_path() - if not self._required: - self._stat.source_type = ExternalStatus.OPTIONAL - elif self._local_path == LOCAL_PATH_INDICATOR: - # LOCAL_PATH_INDICATOR, '.' paths, are standalone - # component directories that are not managed by - # checkout_subexternals. - self._stat.source_type = ExternalStatus.STANDALONE - else: - # managed by checkout_subexternals - self._stat.source_type = ExternalStatus.MANAGED - - subcomponent_stats = {} - if not os.path.exists(self._repo_dir_path): - if calc_stat: - # No local repository. - self._stat.sync_state = ExternalStatus.EMPTY - msg = ('status check: repository directory for "{0}" does not ' - 'exist.'.format(self._name)) - logging.info(msg) - self._stat.current_version = 'not checked out' - # NOTE(bja, 2018-01) directory doesn't exist, so we cannot - # use repo to determine the expected version. We just take - # a best-guess based on the assumption that only tag or - # branch should be set, but not both. - if not self._repo: - self._stat.expected_version = 'unknown' - else: - self._stat.expected_version = self._repo.tag() + self._repo.branch() - else: - # Merge local repository state (e.g. clean/dirty) into self._stat. - if calc_stat and self._repo: - self._repo.status(self._stat, self._repo_dir_path) - - # Status of subcomponents, if any. - if self._subexternals_path and self._subexternal_sourcetree: - cwd = os.getcwd() - # SourceTree.status() expects to be called from the correct - # root directory. - os.chdir(self._repo_dir_path) - subcomponent_stats = self._subexternal_sourcetree.status(self._local_path, force=force, print_progress=print_progress) - os.chdir(cwd) - - # Merge our status + subcomponent statuses into one return dict keyed - # by component path. - all_stats = {} - # don't add the root component because we don't manage it - # and can't provide useful info about it. - if self._local_path != LOCAL_PATH_INDICATOR: - # store the stats under the local_path, not comp name so - # it will be sorted correctly - all_stats[self._stat.path] = self._stat - - if subcomponent_stats: - all_stats.update(subcomponent_stats) - - return all_stats - - def checkout(self, verbosity): - """ - If the repo destination directory exists, ensure it is correct (from - correct URL, correct branch or tag), and possibly updateit. - If the repo destination directory does not exist, checkout the correct - branch or tag. - Does not check out sub-externals, see SourceTree.checkout(). - """ - # Make sure we are in correct location - if not os.path.exists(self._repo_dir_path): - # repository directory doesn't exist. Need to check it - # out, and for that we need the base_dir_path to exist - try: - os.makedirs(self._base_dir_path) - except OSError as error: - if error.errno != errno.EEXIST: - msg = 'Could not create directory "{0}"'.format( - self._base_dir_path) - fatal_error(msg) - - if not self._stat: - self.status() - assert self._stat - - if self._stat.source_type != ExternalStatus.STANDALONE: - if verbosity >= VERBOSITY_VERBOSE: - # NOTE(bja, 2018-01) probably do not want to pass - # verbosity in this case, because if (verbosity == - # VERBOSITY_DUMP), then the previous status output would - # also be dumped, adding noise to the output. - self._stat.log_status_message(VERBOSITY_VERBOSE) - - if self._repo: - if self._stat.sync_state == ExternalStatus.STATUS_OK: - # If we're already in sync, avoid showing verbose output - # from the checkout command, unless the verbosity level - # is 2 or more. - checkout_verbosity = verbosity - 1 - else: - checkout_verbosity = verbosity - - self._repo.checkout(self._base_dir_path, self._repo_dir_name, - checkout_verbosity, self.clone_recursive()) - - def replace_subexternal_sourcetree(self, sourcetree): - self._subexternal_sourcetree = sourcetree - - def clone_recursive(self): - 'Return True iff any .gitmodules files should be processed' - # Try recursive .gitmodules unless there is an externals entry - recursive = not self._subexternals_path - - return recursive - - -class SourceTree(object): - """ - SourceTree represents a group of managed externals. - - Those externals may not be checked out locally yet, they might only - have Repository objects pointing to their respective repositories. - """ - - @classmethod - def from_externals_file(cls, parent_repo_dir_path, parent_repo, - externals_path): - """Creates a SourceTree representing the given externals file. - - Looks up a git submodules file as an optional backup if there is no - externals file specified. - - Returns None if there is no externals file (i.e. it's None or 'none'), - or if the externals file hasn't been checked out yet. - - parent_repo_dir_path: parent repo root dir - parent_repo: parent repo. - externals_path: path to externals file, relative to parent_repo_dir_path. - """ - if not os.path.exists(parent_repo_dir_path): - # NOTE(bja, 2017-10) repository has not been checked out - # yet, can't process the externals file. Assume we are - # checking status before code is checkoud out and this - # will be handled correctly later. - return None - - if externals_path.lower() == 'none': - # With explicit 'none', do not look for git submodules file. - return None - - cwd = os.getcwd() - os.chdir(parent_repo_dir_path) - - if not externals_path: - if GitRepository.has_submodules(parent_repo_dir_path): - externals_path = ExternalsDescription.GIT_SUBMODULES_FILENAME - else: - return None - - if not os.path.exists(externals_path): - # NOTE(bja, 2017-10) this check is redundant with the one - # in read_externals_description_file! - msg = ('Externals description file "{0}" ' - 'does not exist! In directory: {1}'.format( - externals_path, parent_repo_dir_path)) - fatal_error(msg) - - externals_root = parent_repo_dir_path - # model_data is a dict-like object which mirrors the file format. - model_data = read_externals_description_file(externals_root, - externals_path) - # ext_description is another dict-like object (see ExternalsDescription) - ext_description = create_externals_description(model_data, - parent_repo=parent_repo) - externals_sourcetree = SourceTree(externals_root, ext_description) - os.chdir(cwd) - return externals_sourcetree - - def __init__(self, root_dir, ext_description, svn_ignore_ancestry=False): - """ - Build a SourceTree object from an ExternalDescription. - - root_dir: the (checked-out) parent repo root dir. - """ - self._root_dir = os.path.abspath(root_dir) - self._all_components = {} # component_name -> _External - self._required_compnames = [] - for comp, desc in ext_description.items(): - local_path = desc[ExternalsDescription.PATH] - required = desc[ExternalsDescription.REQUIRED] - repo_info = desc[ExternalsDescription.REPO] - subexternals_path = desc[ExternalsDescription.EXTERNALS] - - repo = create_repository(comp, - repo_info, - svn_ignore_ancestry=svn_ignore_ancestry) - - sourcetree = None - # Treat a .gitmodules file as a backup externals config - if not subexternals_path: - parent_repo_dir_path = os.path.abspath(os.path.join(root_dir, - local_path)) - if GitRepository.has_submodules(parent_repo_dir_path): - subexternals_path = ExternalsDescription.GIT_SUBMODULES_FILENAME - - # Might return None (if the subexternal isn't checked out yet, or subexternal is None or 'none') - subexternal_sourcetree = SourceTree.from_externals_file( - os.path.join(self._root_dir, local_path), - repo, - subexternals_path) - src = _External(self._root_dir, comp, local_path, required, - subexternals_path, repo, svn_ignore_ancestry, - subexternal_sourcetree) - - self._all_components[comp] = src - if required: - self._required_compnames.append(comp) - - def status(self, relative_path_base=LOCAL_PATH_INDICATOR, - force=False, print_progress=False): - """Return a dictionary of local path->ExternalStatus. - - Notes about the returned dictionary: - * It is keyed by local path (e.g. 'components/mom'), not by - component name (e.g. 'mom'). - * It contains top-level keys for all traversed components, whether - discovered by recursion or top-level. - * It contains entries for all components regardless of whether they - are locally installed or not, or required or optional. -x """ - load_comps = self._all_components.keys() - - summary = {} # Holds merged statuses from all components. - for comp in load_comps: - if print_progress: - printlog('{0}, '.format(comp), end='') - stat = self._all_components[comp].status(force=force, - print_progress=print_progress) - - # Returned status dictionary is keyed by local path; prepend - # relative_path_base if not already there. - stat_final = {} - for name in stat.keys(): - if stat[name].path.startswith(relative_path_base): - stat_final[name] = stat[name] - else: - modified_path = os.path.join(relative_path_base, - stat[name].path) - stat_final[modified_path] = stat[name] - stat_final[modified_path].path = modified_path - summary.update(stat_final) - - return summary - - def _find_installed_optional_components(self): - """Returns a list of installed optional component names, if any.""" - installed_comps = [] - for comp_name, ext in self._all_components.items(): - if comp_name in self._required_compnames: - continue - # Note that in practice we expect this status to be cached. - path_to_stat = ext.status() - - # If any part of this component exists locally, consider it - # installed and therefore eligible for updating. - if any(s.sync_state != ExternalStatus.EMPTY - for s in path_to_stat.values()): - installed_comps.append(comp_name) - return installed_comps - - def checkout(self, verbosity, load_all, load_comp=None): - """ - Checkout or update indicated components into the configured subdirs. - - If load_all is True, checkout all externals (required + optional), recursively. - If load_all is False and load_comp is set, checkout load_comp (and any required subexternals, plus any optional subexternals that are already checked out, recursively) - If load_all is False and load_comp is None, checkout all required externals, plus any optionals that are already checked out, recursively. - """ - if load_all: - tmp_comps = self._all_components.keys() - elif load_comp is not None: - tmp_comps = [load_comp] - else: - local_optional_compnames = self._find_installed_optional_components() - tmp_comps = self._required_compnames + local_optional_compnames - if local_optional_compnames: - printlog('Found locally installed optional components: ' + - ', '.join(local_optional_compnames)) - bad_compnames = set(local_optional_compnames) - set(self._all_components.keys()) - if bad_compnames: - printlog('Internal error: found locally installed components that are not in the global list of all components: ' + ','.join(bad_compnames)) - - if verbosity >= VERBOSITY_VERBOSE: - printlog('Checking out externals: ') - else: - printlog('Checking out externals: ', end='') - - # Sort by path so that if paths are nested the - # parent repo is checked out first. - load_comps = sorted(tmp_comps, key=lambda comp: self._all_components[comp].get_local_path()) - - # checkout. - for comp_name in load_comps: - if verbosity < VERBOSITY_VERBOSE: - printlog('{0}, '.format(comp_name), end='') - else: - # verbose output handled by the _External object, just - # output a newline - printlog(EMPTY_STR) - c = self._all_components[comp_name] - # Does not recurse. - c.checkout(verbosity) - # Recursively check out subexternals, if any. Returns None - # if there's no subexternals path. - component_subexternal_sourcetree = SourceTree.from_externals_file( - c.get_repo_dir_path(), - c.get_repo(), - c.get_subexternals_path()) - c.replace_subexternal_sourcetree(component_subexternal_sourcetree) - if component_subexternal_sourcetree: - component_subexternal_sourcetree.checkout(verbosity, load_all) - printlog('') diff --git a/manage_externals/manic/utils.py b/manage_externals/manic/utils.py deleted file mode 100644 index 9c63ffe65e..0000000000 --- a/manage_externals/manic/utils.py +++ /dev/null @@ -1,330 +0,0 @@ -#!/usr/bin/env python3 -""" -Common public utilities for manic package - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import logging -import os -import subprocess -import sys -from threading import Timer - -from .global_constants import LOCAL_PATH_INDICATOR - -# --------------------------------------------------------------------- -# -# screen and logging output and functions to massage text for output -# -# --------------------------------------------------------------------- - - -def log_process_output(output): - """Log each line of process output at debug level so it can be - filtered if necessary. By default, output is a single string, and - logging.debug(output) will only put log info heading on the first - line. This makes it hard to filter with grep. - - """ - output = output.split('\n') - for line in output: - logging.debug(line) - - -def printlog(msg, **kwargs): - """Wrapper script around print to ensure that everything printed to - the screen also gets logged. - - """ - logging.info(msg) - if kwargs: - print(msg, **kwargs) - else: - print(msg) - sys.stdout.flush() - - -def last_n_lines(the_string, n_lines, truncation_message=None): - """Returns the last n lines of the given string - - Args: - the_string: str - n_lines: int - truncation_message: str, optional - - Returns a string containing the last n lines of the_string - - If truncation_message is provided, the returned string begins with - the given message if and only if the string is greater than n lines - to begin with. - """ - - lines = the_string.splitlines(True) - if len(lines) <= n_lines: - return_val = the_string - else: - lines_subset = lines[-n_lines:] - str_truncated = ''.join(lines_subset) - if truncation_message: - str_truncated = truncation_message + '\n' + str_truncated - return_val = str_truncated - - return return_val - - -def indent_string(the_string, indent_level): - """Indents the given string by a given number of spaces - - Args: - the_string: str - indent_level: int - - Returns a new string that is the same as the_string, except that - each line is indented by 'indent_level' spaces. - - In python3, this can be done with textwrap.indent. - """ - - lines = the_string.splitlines(True) - padding = ' ' * indent_level - lines_indented = [padding + line for line in lines] - return ''.join(lines_indented) - -# --------------------------------------------------------------------- -# -# error handling -# -# --------------------------------------------------------------------- - - -def fatal_error(message): - """ - Error output function - """ - logging.error(message) - raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) - - -# --------------------------------------------------------------------- -# -# Data conversion / manipulation -# -# --------------------------------------------------------------------- -def str_to_bool(bool_str): - """Convert a sting representation of as boolean into a true boolean. - - Conversion should be case insensitive. - """ - value = None - str_lower = bool_str.lower() - if str_lower in ('true', 't'): - value = True - elif str_lower in ('false', 'f'): - value = False - if value is None: - msg = ('ERROR: invalid boolean string value "{0}". ' - 'Must be "true" or "false"'.format(bool_str)) - fatal_error(msg) - return value - - -REMOTE_PREFIXES = ['http://', 'https://', 'ssh://', 'git@'] - - -def is_remote_url(url): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - """ - remote_url = False - for prefix in REMOTE_PREFIXES: - if url.startswith(prefix): - remote_url = True - return remote_url - - -def split_remote_url(url): - """check if the user provided a local file path or a - remote. If remote, try to strip off protocol info. - - """ - remote_url = is_remote_url(url) - if not remote_url: - return url - - for prefix in REMOTE_PREFIXES: - url = url.replace(prefix, '') - - if '@' in url: - url = url.split('@')[1] - - if ':' in url: - url = url.split(':')[1] - - return url - - -def expand_local_url(url, field): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - Note: local paths of LOCAL_PATH_INDICATOR have special meaning and - represent local copy only, don't work with the remotes. - - """ - remote_url = is_remote_url(url) - if not remote_url: - if url.strip() == LOCAL_PATH_INDICATOR: - pass - else: - url = os.path.expandvars(url) - url = os.path.expanduser(url) - if not os.path.isabs(url): - msg = ('WARNING: Externals description for "{0}" contains a ' - 'url that is not remote and does not expand to an ' - 'absolute path. Version control operations may ' - 'fail.\n\nurl={1}'.format(field, url)) - printlog(msg) - else: - url = os.path.normpath(url) - return url - - -# --------------------------------------------------------------------- -# -# subprocess -# -# --------------------------------------------------------------------- - -# Give the user a helpful message if we detect that a command seems to -# be hanging. -_HANGING_SEC = 300 - - -def _hanging_msg(working_directory, command): - print(""" - -Command '{command}' -from directory {working_directory} -has taken {hanging_sec} seconds. It may be hanging. - -The command will continue to run, but you may want to abort -manage_externals with ^C and investigate. A possible cause of hangs is -when svn or git require authentication to access a private -repository. On some systems, svn and git requests for authentication -information will not be displayed to the user. In this case, the program -will appear to hang. Ensure you can run svn and git manually and access -all repositories without entering your authentication information. - -""".format(command=command, - working_directory=working_directory, - hanging_sec=_HANGING_SEC)) - - -def execute_subprocess(commands, status_to_caller=False, - output_to_caller=False): - """Wrapper around subprocess.check_output to handle common - exceptions. - - check_output runs a command with arguments and waits - for it to complete. - - check_output raises an exception on a nonzero return code. if - status_to_caller is true, execute_subprocess returns the subprocess - return code, otherwise execute_subprocess treats non-zero return - status as an error and raises an exception. - - """ - cwd = os.getcwd() - msg = 'In directory: {0}\nexecute_subprocess running command:'.format(cwd) - logging.info(msg) - commands_str = ' '.join(commands) - logging.info(commands_str) - return_to_caller = status_to_caller or output_to_caller - status = -1 - output = '' - hanging_timer = Timer(_HANGING_SEC, _hanging_msg, - kwargs={"working_directory": cwd, - "command": commands_str}) - hanging_timer.start() - try: - output = subprocess.check_output(commands, stderr=subprocess.STDOUT, - universal_newlines=True) - log_process_output(output) - status = 0 - except OSError as error: - msg = failed_command_msg( - 'Command execution failed. Does the executable exist?', - commands) - logging.error(error) - fatal_error(msg) - except ValueError as error: - msg = failed_command_msg( - 'DEV_ERROR: Invalid arguments trying to run subprocess', - commands) - logging.error(error) - fatal_error(msg) - except subprocess.CalledProcessError as error: - # Only report the error if we are NOT returning to the - # caller. If we are returning to the caller, then it may be a - # simple status check. If returning, it is the callers - # responsibility determine if an error occurred and handle it - # appropriately. - if not return_to_caller: - msg_context = ('Process did not run successfully; ' - 'returned status {0}'.format(error.returncode)) - msg = failed_command_msg(msg_context, commands, - output=error.output) - logging.error(error) - logging.error(msg) - log_process_output(error.output) - fatal_error(msg) - status = error.returncode - finally: - hanging_timer.cancel() - - if status_to_caller and output_to_caller: - ret_value = (status, output) - elif status_to_caller: - ret_value = status - elif output_to_caller: - ret_value = output - else: - ret_value = None - - return ret_value - - -def failed_command_msg(msg_context, command, output=None): - """Template for consistent error messages from subprocess calls. - - If 'output' is given, it should provide the output from the failed - command - """ - - if output: - output_truncated = last_n_lines(output, 20, - truncation_message='[... Output truncated for brevity ...]') - errmsg = ('Failed with output:\n' + - indent_string(output_truncated, 4) + - '\nERROR: ') - else: - errmsg = '' - - command_str = ' '.join(command) - errmsg += """In directory - {cwd} -{context}: - {command} -""".format(cwd=os.getcwd(), context=msg_context, command=command_str) - - if output: - errmsg += 'See above for output from failed command.\n' - - return errmsg diff --git a/manage_externals/test/.coveragerc b/manage_externals/test/.coveragerc deleted file mode 100644 index 8b681888b8..0000000000 --- a/manage_externals/test/.coveragerc +++ /dev/null @@ -1,7 +0,0 @@ -[run] -branch = True -omit = test_unit_*.py - test_sys_*.py - /usr/* - .local/* - */site-packages/* \ No newline at end of file diff --git a/manage_externals/test/.gitignore b/manage_externals/test/.gitignore deleted file mode 100644 index dd5795998f..0000000000 --- a/manage_externals/test/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -# virtual environments -env_python* - -# python code coverage tool output -.coverage -htmlcov - diff --git a/manage_externals/test/.pylint.rc b/manage_externals/test/.pylint.rc deleted file mode 100644 index 64abd03e42..0000000000 --- a/manage_externals/test/.pylint.rc +++ /dev/null @@ -1,426 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist= - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=.git,.svn,env2 - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=1 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=bad-continuation,useless-object-inheritance - - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable= - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -msg-template={msg_id}:{line:3d},{column:2d}: {msg} ({symbol}) - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages -#reports=yes - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - - -[BASIC] - -# Naming hint for argument names -argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct argument names -argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Naming hint for attribute names -attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct attribute names -attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Naming hint for class attribute names -class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Regular expression matching correct class attribute names -class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ - -# Naming hint for class names -class-name-hint=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression matching correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Naming hint for constant names -const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression matching correct constant names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming hint for function names -function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct function names -function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Naming hint for inline iteration names -inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ - -# Regular expression matching correct inline iteration names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Naming hint for method names -method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct method names -method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Naming hint for module names -module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression matching correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming hint for variable names -variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - -# Regular expression matching correct variable names -variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma,dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_,_cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,future.builtins - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,TERMIOS,Bastion,rexec - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/manage_externals/test/Makefile b/manage_externals/test/Makefile deleted file mode 100644 index 293e360757..0000000000 --- a/manage_externals/test/Makefile +++ /dev/null @@ -1,124 +0,0 @@ -python = not-set -verbose = not-set -debug = not-set - -ifneq ($(python), not-set) -PYTHON=$(python) -else -PYTHON=python -endif - -# we need the python path to point one level up to access the package -# and executables -PYPATH=PYTHONPATH=..: - -# common args for running tests -TEST_ARGS=-m unittest discover - -ifeq ($(debug), not-set) - ifeq ($(verbose), not-set) - # summary only output - TEST_ARGS+=--buffer - else - # show individual test summary - TEST_ARGS+=--buffer --verbose - endif -else - # show detailed test output - TEST_ARGS+=--verbose -endif - - -# auto reformat the code -AUTOPEP8=autopep8 -AUTOPEP8_ARGS=--aggressive --in-place - -# run lint -PYLINT=pylint -PYLINT_ARGS=-j 2 --rcfile=.pylint.rc - -# code coverage -COVERAGE=coverage -COVERAGE_ARGS=--rcfile=.coveragerc - -# source files -SRC = \ - ../checkout_externals \ - ../manic/*.py - -CHECKOUT_EXE = ../checkout_externals - -TEST_DIR = . - -README = ../README.md - -# -# testing -# -.PHONY : utest -utest : FORCE - $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_unit_*.py' - -.PHONY : stest -stest : FORCE - $(PYPATH) $(PYTHON) $(TEST_ARGS) --pattern 'test_sys_*.py' - -.PHONY : test -test : utest stest - -# -# documentation -# -.PHONY : readme -readme : $(CHECKOUT_EXE) - printf "%s\n\n" "-- AUTOMATICALLY GENERATED FILE. DO NOT EDIT --" > $(README) - printf "%s" '[![Build Status](https://travis-ci.org/ESMCI/manage_externals.svg?branch=master)](https://travis-ci.org/ESMCI/manage_externals)' >> $(README) - printf "%s" '[![Coverage Status](https://coveralls.io/repos/github/ESMCI/manage_externals/badge.svg?branch=master)](https://coveralls.io/github/ESMCI/manage_externals?branch=master)' >> $(README) - printf "\n%s\n" '```' >> $(README) - $(CHECKOUT_EXE) --help >> $(README) - -# -# coding standards -# -.PHONY : style -style : FORCE - $(AUTOPEP8) $(AUTOPEP8_ARGS) --recursive $(SRC) $(TEST_DIR)/test_*.py - -.PHONY : lint -lint : FORCE - $(PYLINT) $(PYLINT_ARGS) $(SRC) $(TEST_DIR)/test_*.py - -.PHONY : stylint -stylint : style lint - -.PHONY : coverage -# Need to use a single coverage run with a single pattern rather than -# using two separate commands with separate patterns for test_unit_*.py -# and test_sys_*.py: The latter clobbers some results from the first -# run, even if we use the --append flag to 'coverage run'. -coverage : FORCE - $(PYPATH) $(COVERAGE) erase - $(PYPATH) $(COVERAGE) run $(COVERAGE_ARGS) $(TEST_ARGS) --pattern 'test_*.py' - $(PYPATH) $(COVERAGE) html - -# -# virtual environment creation -# -.PHONY : env -env : FORCE - $(PYPATH) virtualenv --python $(PYTHON) $@_$(PYTHON) - . $@_$(PYTHON)/bin/activate; pip install -r requirements.txt - -# -# utilites -# -.PHONY : clean -clean : FORCE - -rm -rf *~ *.pyc tmp fake htmlcov - -.PHONY : clobber -clobber : clean - -rm -rf env_* - -FORCE : - diff --git a/manage_externals/test/README.md b/manage_externals/test/README.md deleted file mode 100644 index 1e8f2eaa77..0000000000 --- a/manage_externals/test/README.md +++ /dev/null @@ -1,53 +0,0 @@ -# Testing for checkout_externals - -## Unit tests - -```SH - cd checkout_externals/test - make utest -``` - -## System tests - -```SH - cd checkout_externals/test - make stest -``` - -Example to run a single test: -```SH - cd checkout_externals - python -m unittest test.test_sys_checkout.TestSysCheckout.test_container_simple_required -``` - -## Static analysis - -checkout_externals is difficult to test thoroughly because it relies -on git and svn, and svn requires a live network connection and -repository. Static analysis will help catch bugs in code paths that -are not being executed, but it requires conforming to community -standards and best practices. autopep8 and pylint should be run -regularly for automatic code formatting and linting. - -```SH - cd checkout_externals/test - make lint -``` - -The canonical formatting for the code is whatever autopep8 -generates. All issues identified by pylint should be addressed. - - -## Code coverage - -All changes to the code should include maintaining existing tests and -writing new tests for new or changed functionality. To ensure test -coverage, run the code coverage tool: - -```SH - cd checkout_externals/test - make coverage - open -a Firefox.app htmlcov/index.html -``` - - diff --git a/manage_externals/test/doc/.gitignore b/manage_externals/test/doc/.gitignore deleted file mode 100644 index d4e11e5ea0..0000000000 --- a/manage_externals/test/doc/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -_build - diff --git a/manage_externals/test/doc/Makefile b/manage_externals/test/doc/Makefile deleted file mode 100644 index 18f4d5bf99..0000000000 --- a/manage_externals/test/doc/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SPHINXPROJ = ManageExternals -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/manage_externals/test/doc/conf.py b/manage_externals/test/doc/conf.py deleted file mode 100644 index 469c0b0dc5..0000000000 --- a/manage_externals/test/doc/conf.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Manage Externals documentation build configuration file, created by -# sphinx-quickstart on Wed Nov 29 10:53:25 2017. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = ['sphinx.ext.autodoc', - 'sphinx.ext.todo', - 'sphinx.ext.coverage', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'Manage Externals' -copyright = u'2017, CSEG at NCAR' -author = u'CSEG at NCAR' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = u'1.0.0' -# The full version, including alpha/beta/rc tags. -release = u'1.0.0' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - '**': [ - 'relations.html', # needs 'show_related': True theme option to display - 'searchbox.html', - ] -} - - -# -- Options for HTMLHelp output ------------------------------------------ - -# Output file base name for HTML help builder. -htmlhelp_basename = 'ManageExternalsdoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'ManageExternals.tex', u'Manage Externals Documentation', - u'CSEG at NCAR', 'manual'), -] - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'manageexternals', u'Manage Externals Documentation', - [author], 1) -] - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'ManageExternals', u'Manage Externals Documentation', - author, 'ManageExternals', 'One line description of project.', - 'Miscellaneous'), -] - - - diff --git a/manage_externals/test/doc/develop.rst b/manage_externals/test/doc/develop.rst deleted file mode 100644 index b817b7b093..0000000000 --- a/manage_externals/test/doc/develop.rst +++ /dev/null @@ -1,202 +0,0 @@ -Developer Guidelines -==================== - -The manage externals utilities are a light weight replacement for svn -externals that will work with git repositories pulling in a mixture of -git and svn dependencies. - -Given an externals description and a working copy: - -* *checkout_externals* attempts to make the working copy agree with the - externals description - -* *generate_externals* attempts to make the externals description agree - with the working copy. - -For these operations utilities should: - -* operate consistently across git and svn - -* operate simply with minimal user complexity - -* robustly across a wide range of repository states - -* provide explicit error messages when a problem occurs - -* leave the working copy in a valid state - -The utilities in manage externals are **NOT** generic wrappers around -revision control operations or a replacement for common tasks. Users -are expected to: - -* create branches prior to starting development - -* add remotes and push changes - -* create tags - -* delete branches - -These types of tasks are often highly workflow dependent, e.g. branch -naming conventions may vary between repositories, have the potential -to destroy user data, introduce significant code complexit and 'edge -cases' that are extremely difficult to detect and test, and often -require subtle decision making, especially if a problem occurs. - -Users who want to automate these types are encouraged to create their -own tools. The externals description files are explicitly versioned -and the internal APIs are intended to be stable for these purposes. - -Core Design Principles ------------------------ - -1. Users can, and are actively encouraged to, modify the externals - directories using revision control outside of manage_externals - tools. You can't make any assumptions about the state of the - working copy. Examples: adding a remote, creating a branch, - switching to a branch, deleting the directory entirely. - -2. Give that the user can do anything, the manage externals library - can not preserve state between calls. The only information it can - rely on is what it expectes based on the content of the externals - description file, and what the actual state of the directory tree - is. - -3. Do *not* do anything that will possibly destroy user data! - - a. Do not remove files from the file system. We are operating on - user supplied input. If you don't call 'rm', you can't - accidentally remove the user's data. Thinking of calling - ``shutil.rmtree(user_input)``? What if the user accidentally - specified user_input such that it resolves to their home - directory.... Yeah. Don't go there. - - b. Rely on git and svn to do their job as much as possible. Don't - duplicate functionality. Examples: - - i. We require the working copies to be 'clean' as reported by - ``git status`` and ``svn status``. What if there are misc - editor files floating around that prevent an update? Use the - git and svn ignore functionality so they are not - reported. Don't try to remove them from manage_externals or - determine if they are 'safe' to ignore. - - ii. Do not use '--force'. Ever. This is a sign you are doing - something dangerous, it may not be what the user - wants. Remember, they are encouraged to modify their repo. - -4. There are often multiple ways to obtain a particular piece of - information from git. Scraping screen output is brittle and - generally not considered a stable API across different versions of - git. Given a choice between: - - a. a lower level git 'plumbing' command that processes a - specific request and returns a sucess/failure status. - - b. high level git command that produces a bunch of output - that must be processed. - - We always prefer the former. It almost always involves - writing and maintaining less code and is more likely to be - stable. - -5. Backward compatibility is critical. We have *nested* - repositories. They are trivially easy to change versions. They may - have very different versions of the top level manage_externals. The - ability to read and work with old model description files is - critical to avoid problems for users. We also have automated tools - (testdb) that must generate and read external description - files. Backward compatibility will make staging changes vastly - simpler. - -Model Users ------------ - -Consider the needs of the following model userswhen developing manage_externals: - -* Users who will checkout the code once, and never change versions. - -* Users who will checkout the code once, then work for several years, - never updating. before trying to update or request integration. - -* Users develope code but do not use revision control beyond the - initial checkout. If they have modified or untracked files in the - repo, they may be irreplacable. Don't destroy user data. - -* Intermediate users who are working with multiple repos or branches - on a regular basis. They may only use manage_externals weekly or - monthly. Keep the user interface and documentation simple and - explicit. The more command line options they have to remember or - look up, the more frustrated they git. - -* Software engineers who use the tools multiple times a day. It should - get out of their way. - -User Interface --------------- - -Basic operation for the most standard use cases should be kept as -simple as possible. Many users will only rarely run the manage -utilities. Even advanced users don't like reading a lot of help -documentation or struggling to remember commands and piece together -what they need to run. Having many command line options, even if not -needed, is exteremly frustrating and overwhelming for most users. A few -simple, explicitly named commands are better than a single command -with many options. - -How will users get help if something goes wrong? This is a custom, -one-off solution. Searching the internet for manage_externals, will -only return the user doc for this project at best. There isn't likely -to be a stackoverflow question or blog post where someone else already -answered a user's question. And very few people outside this community -will be able to provide help if something goes wrong. The sooner we -kick users out of these utilities and into standard version control -tools, the better off they are going to be if they run into a problem. - -Repositories ------------- - -There are three basic types of repositories that must be considered: - -* container repositories - repositories that are always top level - repositories, and have a group of externals that must be managed. - -* simple repositories - repositories that are externals to another - repository, and do not have any of their own externals that will be - managed. - -* mixed use repositories - repositories that can act as a top level - container repository or as an external to a top level - container. They may also have their own sub-externals that are - required. They may have different externals needs depening on - whether they are top level or not. - -Repositories must be able to checkout and switch to both branches and -tags. - -Development -=========== - -The functionality to manage externals is broken into a library of core -functionality and applications built with the library. - -The core library is called 'manic', pseduo-homophone of (man)age -(ex)ternals that is: short, pronounceable and spell-checkable. It is -also no more or less meaningful to an unfamiliar user than a random -jumble of letters forming an acronym. - -The core architecture of manic is: - -* externals description - an abstract description on an external, - including of how to obtain it, where to obtain it, where it goes in - the working tree. - -* externals - the software object representing an external. - -* source trees - collection of externals - -* repository wrappers - object oriented wrappers around repository - operations. So the higher level management of the soure tree and - external does not have to be concerned with how a particular - external is obtained and managed. - diff --git a/manage_externals/test/doc/index.rst b/manage_externals/test/doc/index.rst deleted file mode 100644 index 9ab287ad8c..0000000000 --- a/manage_externals/test/doc/index.rst +++ /dev/null @@ -1,22 +0,0 @@ -.. Manage Externals documentation master file, created by - sphinx-quickstart on Wed Nov 29 10:53:25 2017. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to Manage Externals's documentation! -============================================ - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - - - develop.rst - testing.rst - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/manage_externals/test/doc/testing.rst b/manage_externals/test/doc/testing.rst deleted file mode 100644 index 623f0e431c..0000000000 --- a/manage_externals/test/doc/testing.rst +++ /dev/null @@ -1,123 +0,0 @@ -Testing -======= - -The manage_externals package has an automated test suite. All pull -requests are expected to pass 100% of the automated tests, as well as -be pep8 and lint 'clean' and maintain approximately constant (at a -minimum) level of code coverage. - -Quick Start ------------ - -Do nothing approach -~~~~~~~~~~~~~~~~~~~ - -When you create a pull request on GitHub, Travis-CI continuous -integration testing will run the test suite in both python2 and -python3. Test results, lint results, and code coverage results are -available online. - -Do something approach -~~~~~~~~~~~~~~~~~~~~~ - -In the test directory, run: - -.. code-block:: shell - - make env - make lint - make test - make coverage - - -Automated Testing ------------------ - -The manage_externals manic library and executables are developed to be -python2 and python3 compatible using only the standard library. The -test suites meet the same requirements. But additional tools are -required to provide lint and code coverage metrics and generate -documentation. The requirements are maintained in the requirements.txt -file, and can be automatically installed into an isolated environment -via Makefile. - -Bootstrap requirements: - -* python2 - version 2.7.x or later - -* python3 - version 3.6 tested other versions may work - -* pip and virtualenv for python2 and python3 - -Note: all make rules can be of the form ``make python=pythonX rule`` -or ``make rule`` depending if you want to use the default system -python or specify a specific version. - -The Makefile in the test directory has the following rules: - -* ``make python=pythonX env`` - create a python virtual environment - for python2 or python3 and install all required packages. These - packages are required to run lint or coverage. - -* ``make style`` - runs autopep8 - -* ``make lint`` - runs autopep8 and pylint - -* ``make test`` - run the full test suite - -* ``make utest`` - run jus the unit tests - -* ``make stest`` - run jus the system integration tests - -* ``make coverage`` - run the full test suite through the code - coverage tool and generate an html report. - -* ``make readme`` - automatically generate the README files. - -* ``make clean`` - remove editor and pyc files - -* ``make clobber`` - remove all generated test files, including - virtual environments, coverage reports, and temporary test - repository directories. - -Unit Tests ----------- - -Unit tests are probably not 'true unit tests' for the pedantic, but -are pragmatic unit tests. They cover small practicle code blocks: -functions, class methods, and groups of functions and class methods. - -System Integration Tests ------------------------- - -NOTE(bja, 2017-11) The systems integration tests currently do not include svn repositories. - -The manage_externals package is extremely tedious and error prone to test manually. - -Combinations that must be tested to ensure basic functionality are: - -* container repository pulling in simple externals - -* container repository pulling in mixed externals with sub-externals. - -* mixed repository acting as a container, pulling in simple externals and sub-externals - -Automatic system tests are handled the same way manual testing is done: - -* clone a test repository - -* create an externals description file for the test - -* run the executable with the desired args - -* check the results - -* potentially modify the repo (checkout a different branch) - -* rerun and test - -* etc - -The automated system stores small test repositories in the main repo -by adding them as bare repositories. These repos are cloned via a -subprocess call to git and manipulated during the tests. diff --git a/manage_externals/test/repos/README.md b/manage_externals/test/repos/README.md deleted file mode 100644 index 8a3502c35f..0000000000 --- a/manage_externals/test/repos/README.md +++ /dev/null @@ -1,33 +0,0 @@ -Git repositories for testing git-related behavior. For usage and terminology notes, see test/test_sys_checkout.py. - -To list files and view file contents at HEAD: -``` -cd -git ls-tree --full-tree -r --name-only HEAD -git cat-file -p HEAD: -``` - -File contents at a glance: -``` -container.git/ - readme.txt - -simple-ext.git/ - (has branches: feature2, feature3) - (has tags: tag1, tag2) - readme.txt - simple_subdir/subdir_file.txt - -simple-ext-fork.git/ - (has tags: abandoned-feature, forked-feature-v1, tag1) - (has branch: feature2) - readme.txt - -mixed-cont-ext.git/ - (has branch: new-feature) - readme.txt - sub-externals.cfg ('simp_branch' section refers to 'feature2' branch in simple-ext.git/ repo) - -error/ - (no git repo here, just a readme.txt in the clear) -``` diff --git a/manage_externals/test/repos/container.git/HEAD b/manage_externals/test/repos/container.git/HEAD deleted file mode 100644 index cb089cd89a..0000000000 --- a/manage_externals/test/repos/container.git/HEAD +++ /dev/null @@ -1 +0,0 @@ -ref: refs/heads/master diff --git a/manage_externals/test/repos/container.git/config b/manage_externals/test/repos/container.git/config deleted file mode 100644 index e6da231579..0000000000 --- a/manage_externals/test/repos/container.git/config +++ /dev/null @@ -1,6 +0,0 @@ -[core] - repositoryformatversion = 0 - filemode = true - bare = true - ignorecase = true - precomposeunicode = true diff --git a/manage_externals/test/repos/container.git/description b/manage_externals/test/repos/container.git/description deleted file mode 100644 index 498b267a8c..0000000000 --- a/manage_externals/test/repos/container.git/description +++ /dev/null @@ -1 +0,0 @@ -Unnamed repository; edit this file 'description' to name the repository. diff --git a/manage_externals/test/repos/container.git/info/exclude b/manage_externals/test/repos/container.git/info/exclude deleted file mode 100644 index a5196d1be8..0000000000 --- a/manage_externals/test/repos/container.git/info/exclude +++ /dev/null @@ -1,6 +0,0 @@ -# git ls-files --others --exclude-from=.git/info/exclude -# Lines that start with '#' are comments. -# For a project mostly in C, the following would be a good set of -# exclude patterns (uncomment them if you want to use them): -# *.[oa] -# *~ diff --git a/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/container.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 deleted file mode 100644 index f65234e17f32800b1be0aa9908cc706458b14605..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 133 zcmV;00DAv;0acB$4#OY}L_6~pma=t7)Fr=DfpLNr2P1F?mVSF_r3_t8x_fuJAR6GY zuD1yyS3=Xu)WDKA@Ra});Xx7fWf1zv2~1TS@=422pQw4`eHcB9X3EwU=O)-GQ}s5s nqUZ%S7HaN3i|$`ck;m7Sz6S{Y_}`UoN%K{iOGozsJ+C?sZtFeC diff --git a/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de b/manage_externals/test/repos/container.git/objects/71/5b8f3e4afe1802a178e1d603af404ba45d59de deleted file mode 100644 index 9759965b1ba440f1899216c1c82c0780fb65f46e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 136 zcmV;30C)d*0hNtQ3c@fDKwak)a{8g?ULFizQ5yOj!O$#BY{3QX>9e{j4e8<)

AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn~QNUpn5)Pnq=ii~6DWK2pp8O#dS+Wke_L diff --git a/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 b/manage_externals/test/repos/container.git/objects/f9/e08370a737e941de6f6492e3f427c2ef4c1a03 deleted file mode 100644 index 460fd7781917e095c826e8bc77ad53d943f199aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 81 zcmV-X0IvUd0R_Ry4S+BV1VG+Yu@&$_q5vvMU;#^(9XS?9_smrFie(;Fw=7}|1e56wgzpa&}fBkqfO*k&i_)dY`l?1hv=p}Fj<2Ge{uRcq{saZ z%j{g@HZ3wNvQv&lo|o_6gr*rieLQOSK`~u|R`NhFUI)68@B`BlpbA~$UTB9Ga*~zx a%Jelj*-|I)LF@ttC5adD0subgY(|R<&Qf{+ diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 b/manage_externals/test/repos/mixed-cont-ext.git/objects/01/97458f2dbe5fcd6bc44fa46983be0a30282379 deleted file mode 100644 index 032f4b1ca6bf0d25f1f9f419b1e7ab2aae1ef6c8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 171 zcmV;c095~Y0Zomu4uUWgMV;SQFt~~^I5;?f2d%r6C&MNz$f6Pi}^^zp3SC&knSt>TGbz78}9=ZOL8&?Fv(cG!`VtgKgN ZY{1E$27wP^7dQxMoWuzLd;nlTMfbC)Q$zp& diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 b/manage_externals/test/repos/mixed-cont-ext.git/objects/06/ea30b03ffa2f8574705f8b9583f7ca7e2dccf7 deleted file mode 100644 index 13d15a96a5071e98f0ba0cfbbdb2992c03990151..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 136 zcmV;30C)d*0hNtG4#FT106p`H{eaDGEd>%|)SJ(su+=pM4B|mwZ+(Kd$t05rB_(M< zmNu<2!_Lge2B#67kHO(Q1a!#· -MP…tæÇM¯0v&ù>î°KciåÇüÇ8V; \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f b/manage_externals/test/repos/mixed-cont-ext.git/objects/1f/01fa46c17b1f38b37e6259f6e9d041bda3144f deleted file mode 100644 index 7bacde68db5f1201015d4532aba9551660b05399..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 167 zcmV;Y09gNc0hNxy4Z<)C0C{H$F90%4+_Vxxz$T7kLnR0(O(n*sumP`oo$loMcuWmC z-)~w~gsCf*eiQX=*_sZfntbAHl&dTZ&5gE zmqjc(UfS(h;i3i3C0B(5e{oub>rV4>ggxy?ABf1q79*mQ-&@oFEO*Ws<|S?Qy{d)p VGuU)ju(jTFZd1AL+y`g^OR&}EOOOBn diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 b/manage_externals/test/repos/mixed-cont-ext.git/objects/37/f0e70b609adc90f4c09ee21d82ed1d79c81d69 deleted file mode 100644 index 8c6b04837ae4456cc5dc53ea7572610e6635d0d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn`tC9-|*xG$A9N diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c b/manage_externals/test/repos/mixed-cont-ext.git/objects/38/9a2b876b8965d3c91a3db8d28a483eaf019d5c deleted file mode 100644 index 1a35b74d479fdfb4bf24bcf199663fbb52036eee..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmV-|0Db>>0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV kQ>>Sqmd-GFVdTA?;?e&$HE}Vp-My(>AuMbJ03PHp2Cniq;{X5v diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 b/manage_externals/test/repos/mixed-cont-ext.git/objects/41/1de5d96ee418c1c55f3e96e6e6e7c06bb95801 deleted file mode 100644 index f65234e17f32800b1be0aa9908cc706458b14605..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 133 zcmV;00DAv;0acB$4#OY}L_6~pma=t7)Fr=DfpLNr2P1F?mVSF_r3_t8x_fuJAR6GY zuD1yyS3=Xu)WDKA@Ra});Xx7fWf1zv2~1TS@=422pQw4`eHcB9X3EwU=O)-GQ}s5s nqUZ%S7HaN3i|$`ck;m7Sz6S{Y_}`UoN%K{iOGozsJ+C?sZtFeC diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 b/manage_externals/test/repos/mixed-cont-ext.git/objects/6e/9f4baa6e94a0af4e094836c2eb55ccedef5fc4 deleted file mode 100644 index 6b2146cae4080fe2369401ecf5009fd9612c363c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 129 zcmV-{0Dk{?0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV jQ>>Sqmd+qz)?FYbw&JLT!Zra%FYj6GAw1sz`R^`7StK`- diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a b/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a deleted file mode 100644 index 852a051139..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/objects/6f/c379457ecb4e576a13c7610ae1fa73f845ee6a +++ /dev/null @@ -1 +0,0 @@ -x•ANÄ09çsãÄÊŽ;‘~2±ÛÊJÄ^MÆ,Ï'ì8õ¥«ÔÚ¾_•ÆyyR3ØlmvˆÆ•PB°Œ˜FCñ¼Î>»y¸± *Ùbla’«-n^]D§¥,Ùx»fvÖû2p×­ }¢ÒGÍzå¿xï‰å‚ÜßÈNvq~Z¢¡Òc›âÔ èÇyäç+QåT¤íÔt;]ŠC:ÝA¹Õg¥¿AÚ( XA÷G‰®µ*=i\†_øÀ^' \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 b/manage_externals/test/repos/mixed-cont-ext.git/objects/93/a159deb9175bfeb2820a0006ddd92d78131332 deleted file mode 100644 index 682d799898667fc1b506c6daece665c1af824fc1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 169 zcmV;a09OBa0X2=i4uUWgg`MwHFu01a>EM8d!9g*|M#xPmH`1igdRvT%@!c&N$Mf@@ z(`wU3>1MmAof<6C(>I`v6dJAYeYA@l%k@73%f=gNbntJ=1Cup4@hq3GQ+7Tcu*$C$ z?z1w-GQSj97De^`@|sp*JpN(#NilT+t9T-4S&VZ28ie!20Ci{*k3u`_$Vpb#D>F9W XWKV;@2eAt}0BM}W2>^TmrSn6;Se#N% diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 b/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 deleted file mode 100644 index 33c9f6cdf1..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/objects/95/80ecc12f16334ce44e42287d5d46f927bb7b75 +++ /dev/null @@ -1 +0,0 @@ -x•ŽKNÄ0Yç½cÅÈŸLlK7é´Ÿ5#{ä´ŽO˜°z›ªÒ“¶mW%Ó“v€8¹³äÈÁ&¶eFö²òìÙ±$/¦äéÆUÉžÝz°RœÎJ¶¡”%ZY“ |YS“ìÄC/­Ó'*}ÔÜA¯ü7ïC¸ŸÇÛ‘²ÉÏ‹1‘^L0f’Ç7Åÿ¬©cì übå/ª¼Jo5½-Å®;íî Üê³Ò…¿AÚH:XA÷D×Z:ïÚ‡èè8M¿¸^æ \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd b/manage_externals/test/repos/mixed-cont-ext.git/objects/a9/288dcd8a719a1f4ed3cba43a2a387ae7cd60fd deleted file mode 100644 index 73e7cbfbc8e106cee027f798dcb163ec6c5d21e6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmV-|0Db>>0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV kQ>>Sqmd?O9-L+qLU;NqZBmPS=oA+@UXed_#01>J$$h2KJZU6uP diff --git a/manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 b/manage_externals/test/repos/mixed-cont-ext.git/objects/e8/ea32a11d30ee703f6f661ae7c2376f4ab84d38 deleted file mode 100644 index 189ed85bb3c8b8642ae353d29a759f67040b5786..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 130 zcmV-|0Db>>0V^p=O;s>7GGs6`FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3=S(WPsHSWJ^Nk8w52YBee>u{sG;Ra}~+n_5wlT9lWV kQ>>Sqmd?Q7Ty=.p¢ˆA -!ìÜ  w4ݵ¡¸Qªé€Øú=©Ã¤á¨ÏZ9ü0„þûkÌ éžG)* \ No newline at end of file diff --git a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master deleted file mode 100644 index 1e0eef1ea3..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/master +++ /dev/null @@ -1 +0,0 @@ -6fc379457ecb4e576a13c7610ae1fa73f845ee6a diff --git a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature b/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature deleted file mode 100644 index 607e80d1bc..0000000000 --- a/manage_externals/test/repos/mixed-cont-ext.git/refs/heads/new-feature +++ /dev/null @@ -1 +0,0 @@ -9580ecc12f16334ce44e42287d5d46f927bb7b75 diff --git a/manage_externals/test/repos/simple-ext-fork.git/HEAD b/manage_externals/test/repos/simple-ext-fork.git/HEAD deleted file mode 100644 index cb089cd89a..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/HEAD +++ /dev/null @@ -1 +0,0 @@ -ref: refs/heads/master diff --git a/manage_externals/test/repos/simple-ext-fork.git/config b/manage_externals/test/repos/simple-ext-fork.git/config deleted file mode 100644 index 04eba17870..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/config +++ /dev/null @@ -1,8 +0,0 @@ -[core] - repositoryformatversion = 0 - filemode = true - bare = true - ignorecase = true - precomposeunicode = true -[remote "origin"] - url = /Users/andreb/projects/ncar/git-conversion/checkout-model-dev/cesm-demo-externals/manage_externals/test/repos/simple-ext.git diff --git a/manage_externals/test/repos/simple-ext-fork.git/description b/manage_externals/test/repos/simple-ext-fork.git/description deleted file mode 100644 index 498b267a8c..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/description +++ /dev/null @@ -1 +0,0 @@ -Unnamed repository; edit this file 'description' to name the repository. diff --git a/manage_externals/test/repos/simple-ext-fork.git/info/exclude b/manage_externals/test/repos/simple-ext-fork.git/info/exclude deleted file mode 100644 index a5196d1be8..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/info/exclude +++ /dev/null @@ -1,6 +0,0 @@ -# git ls-files --others --exclude-from=.git/info/exclude -# Lines that start with '#' are comments. -# For a project mostly in C, the following would be a good set of -# exclude patterns (uncomment them if you want to use them): -# *.[oa] -# *~ diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext-fork.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f deleted file mode 100644 index ae28c037e5e8773bab7a7f9b6b050a01c3c8402a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznAV=y!@Ff%bx&`ZxO$xP47FG^)_lzn&Ekz!U-;cU~)E`&5u^pl|A>?=DrCt|Zp*KGhtORPb%uc6q&p;{~x`YAHy z#2GbEv6YQH#`fOIuH1gSE*yL=Ojyh~{nIdqe*nnpf*T V&^Fln@|2-4tBgli^9u#mM`!{nPaFUM diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c b/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c deleted file mode 100644 index 564e7bba63..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/objects/11/a76e3d9a67313dec7ce1230852ab5c86352c5c +++ /dev/null @@ -1,2 +0,0 @@ -x%ŒK -Â0@]çse&ßDÔ›L’!´˜¶„l¼½).¼Åãu.@Æ_ö¸Jê0ÇàìlM–Ä~v:ÄèmLÌÆi™åY*/ŸÛè@ŽpòÞ W ˆJ¥&Üå¿ø)´*Í \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f b/manage_externals/test/repos/simple-ext-fork.git/objects/16/5506a7408a482f50493434e13fffeb44af893f deleted file mode 100644 index 0d738af68b021dcd9918c8f2047aa4fff55bf6e4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznO)_H(Z zem6QZm^^8RnmiI`ubHzgrPye+FKRN0H9F;O5%17>8Q`NMJ?ehWT|!t)2i0Np3Z=u$N9svC-|`;J-!jY5fUp SfzGuJhQeX2oy8Y4sYkDN{z{Sn diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf b/manage_externals/test/repos/simple-ext-fork.git/objects/32/7e97d86e941047d809dba58f2804740c6c30cf deleted file mode 100644 index 0999f0d4b9b4297e5677a96f3c9677bf408ee8d9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzniemt(y-3DP$mtIvOOf diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 b/manage_externals/test/repos/simple-ext-fork.git/objects/36/418b4e5665956a90725c9a1b5a8e551c5f3d48 deleted file mode 100644 index 9da8434f65ef3bfdb57cb8117e312a56663a31a2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 159 zcmV;Q0AT-k0hNwh3c@fD0R7G>_5#Z8=Ft>H)JyoiX*NFFNQn2h9>Kq1U|^;?&-V_@ zcGH_GU?Q(kip?&NPmV1)rl3VdZ7GGKLl-2Pw=`WkjA`(0bci¹`ý}0…M”؇BÚÁs0/µâ¿}öï:: \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b b/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b deleted file mode 100644 index 9a31c7ef2e..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/objects/3d/ec1fdf8e2f5edba28148c5db2fe8d7a842360b +++ /dev/null @@ -1,2 +0,0 @@ -x•ŽKnÃ0 ³Ö)x”,ÊI½EÑŸ´–A¹Ü#t7o€ŒìÛ¶vp.žzS…ÁšÆƒ&oÑ„©d¦8¹xLd@™Ì‹›ÖCð6f¯% -œpt$‰m&ŽJd…¦¡øhøÝ—½Á—VxÔÒ®ùÉpŸ7^/²o7°d­K1ÂGDsØ#¯ë¿æ{o?Z 7®²€,\g½˜AV=y!@Ff%bx&`ZxO$xP47FG^)_lznAV=y!@Ff%bx&`ZxO$xP47FG^)_lznvGy0&Z${j?E8>6rD10GHRYE2d diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae b/manage_externals/test/repos/simple-ext-fork.git/objects/5f/1d4786d12e52d7ab28d2f2f1118c1059a9f1ae deleted file mode 100644 index 25488b7bfe52fd0d530e20393b752815d9aaf16f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 93 zcmV-j0HXhR0S(JB4ue1p1i;kyiv0l8%LNPZurX=iP=VtPL2T>`g? zkh3=;83|{%kTn0{lH8#Nev_`XVPmImRbRpwOIgehnBL{IWwXg diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 b/manage_externals/test/repos/simple-ext-fork.git/objects/67/136e5ab4d5c1c65d10c8048763b96b0e53c1d6 deleted file mode 100644 index d3dfe31113715fe07ea6833f0c2a25e868ac20b2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 165 zcmV;W09yZe0hNwR4#F@DL|Nw)z5pm6r*$QSfIWwB8k=t$6s6+&lq0Ykjo#?ZSf=UT zz+~D012)4Gj)~xM%ugTv-b1AFi TQ|c4S3@Y4~D&BknM3zUWvn5b3 diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 b/manage_externals/test/repos/simple-ext-fork.git/objects/7b/0bd630ac13865735a1dff3437a137d8ab50663 deleted file mode 100644 index 0a2ec0494bc1600144cb54b61a6d7b43c7f3e806..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 119 zcmV--0Eqv10X50d4FVw$MNz-0;#IJTYiz*^YyjkKAhHY@MpwI+#E{&tb3>7U^YwDN zr`$2}=y`92Fm{8oNzW$w#gQ$c3ivT<^#zfQHTwFÁ©¹£rPkÖSèkJ´^ë \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 b/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 deleted file mode 100644 index d8ba654548..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/objects/a4/2fe9144f5707bc1e9515ce1b44681f7aba6f95 +++ /dev/null @@ -1,3 +0,0 @@ -xUÌ[ -Â0…aŸ³ŠÙ@%Is+ˆ¨;™¤c/˜DÂq÷VðÅ×Ã>Æ ”w‡WJ Ú˜>8ò!¤!&'ƒS=)í±×CòF+ÑI2‚ßO‚Ts^Xðn`Ä2ÖBcw'ä­Ñw¨Á -\ËØNqÝ›F—)ãò8îç3(«¬Œ2:é¥ÿü0x-<×!6,i ª9 \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 b/manage_externals/test/repos/simple-ext-fork.git/objects/b9/3737be3ea6b19f6255983748a0a0f4d622f936 deleted file mode 100644 index 9b40a0afa00b93a318cd503d3b29db1162978b03..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznD—}ÂD>£Nƒv“{ŠZ¼M˜I…¥?jƒ‹Ìpžs8ÄgøÓ½„qÚ¥ZŽ€qo j†­f­ÕJ×{]þÕµÓ¥®¥Om/¨3Ü$ô¥‰Q_@ÞH© \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext-fork.git/packed-refs b/manage_externals/test/repos/simple-ext-fork.git/packed-refs deleted file mode 100644 index b8f9e86308..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/packed-refs +++ /dev/null @@ -1,5 +0,0 @@ -# pack-refs with: peeled fully-peeled sorted -36418b4e5665956a90725c9a1b5a8e551c5f3d48 refs/heads/feature2 -9b75494003deca69527bb64bcaa352e801611dd2 refs/heads/master -11a76e3d9a67313dec7ce1230852ab5c86352c5c refs/tags/tag1 -^9b75494003deca69527bb64bcaa352e801611dd2 diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 b/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 deleted file mode 100644 index d223b0362d..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/refs/heads/feature2 +++ /dev/null @@ -1 +0,0 @@ -f268d4e56d067da9bd1d85e55bdc40a8bd2b0bca diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature deleted file mode 100644 index 8a18bf08e9..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/abandoned-feature +++ /dev/null @@ -1 +0,0 @@ -a42fe9144f5707bc1e9515ce1b44681f7aba6f95 diff --git a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 b/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 deleted file mode 100644 index 2764b552d5..0000000000 --- a/manage_externals/test/repos/simple-ext-fork.git/refs/tags/forked-feature-v1 +++ /dev/null @@ -1 +0,0 @@ -8d2b3b35126224c975d23f109aa1e3cbac452989 diff --git a/manage_externals/test/repos/simple-ext.git/HEAD b/manage_externals/test/repos/simple-ext.git/HEAD deleted file mode 100644 index cb089cd89a..0000000000 --- a/manage_externals/test/repos/simple-ext.git/HEAD +++ /dev/null @@ -1 +0,0 @@ -ref: refs/heads/master diff --git a/manage_externals/test/repos/simple-ext.git/config b/manage_externals/test/repos/simple-ext.git/config deleted file mode 100644 index e6da231579..0000000000 --- a/manage_externals/test/repos/simple-ext.git/config +++ /dev/null @@ -1,6 +0,0 @@ -[core] - repositoryformatversion = 0 - filemode = true - bare = true - ignorecase = true - precomposeunicode = true diff --git a/manage_externals/test/repos/simple-ext.git/description b/manage_externals/test/repos/simple-ext.git/description deleted file mode 100644 index 498b267a8c..0000000000 --- a/manage_externals/test/repos/simple-ext.git/description +++ /dev/null @@ -1 +0,0 @@ -Unnamed repository; edit this file 'description' to name the repository. diff --git a/manage_externals/test/repos/simple-ext.git/info/exclude b/manage_externals/test/repos/simple-ext.git/info/exclude deleted file mode 100644 index a5196d1be8..0000000000 --- a/manage_externals/test/repos/simple-ext.git/info/exclude +++ /dev/null @@ -1,6 +0,0 @@ -# git ls-files --others --exclude-from=.git/info/exclude -# Lines that start with '#' are comments. -# For a project mostly in C, the following would be a good set of -# exclude patterns (uncomment them if you want to use them): -# *.[oa] -# *~ diff --git a/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f b/manage_externals/test/repos/simple-ext.git/objects/00/fd13e76189f9134b0506b4b8ed3172723b467f deleted file mode 100644 index ae28c037e5e8773bab7a7f9b6b050a01c3c8402a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznbW_*ltIGSP}@rN;eRaRvTe4jec)&9#mV ztc{ztsDi^RDN|POQ7IsM3R)Zn^fb6Ap%fNDG*4c1YCyeUO2}@P$+4Hjj2b9dvLb3- zmJ-WQ2E*@mn-@6i1g9x43VXTpcO0*k$48gudH@`(^)|-1gKbZJZ&teIHT_#Om*271 ST(#ZC=?eOIX=gtC)=0=UK}@j# diff --git a/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 b/manage_externals/test/repos/simple-ext.git/objects/0b/15e8af3d4615b42314216efeae3fff184046a8 deleted file mode 100644 index 32d6896e3cb813edde3e4f0d0ca2d21963c2f1b0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lznåY*/ŸÛè@ŽpòÞ W ˆJ¥&Üå¿ø)´*Í \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d b/manage_externals/test/repos/simple-ext.git/objects/14/2711fdbbcb8034d7cad6bae6801887b12fe61d deleted file mode 100644 index acaf7889b47c54ee0dea121c73d505ca14ad369b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 83 zcmV-Z0IdIb0ZYosPg1ZjWC+Q~ELKR%%t=)!&d4v#Nl{3x$Sf{V$jnnnRLILO%1z8s pNX|%2&dx6_QAh$}pz8eG%#xDS6o{JQg2bZYRJa;FE&z4gA7ySEC>H<# diff --git a/manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 b/manage_externals/test/repos/simple-ext.git/objects/31/dbcd6de441e671a467ef317146539b7ffabb11 deleted file mode 100644 index 0f0db6797fe19372f1d2122ebe8aa5361df07c61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 90 zcmV-g0HyzU0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn_5#Z8=Ft>H)JyoiX*NFFNQn2h9>Kq1U|^;?&-V_@ zcGH_GU?Q(kip?&NPmV1)rl3VdZ7GGKLl-2Pw=`WkjA`(0bciMsIi;$9n_k z!>-M$Ac)DAYy~^^qUu9WLY{J}xkT>CQ3)XSxÁ©¹£rPkÖSèkJ´^ë \ No newline at end of file diff --git a/manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 b/manage_externals/test/repos/simple-ext.git/objects/b7/692b6d391899680da7b9b6fd8af4c413f06fe7 deleted file mode 100644 index 1b3b2724425492336f2816dc6ba4d818474f3c78..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 137 zcmV;40CxX)0Tql%4#F@D06FIs{s71hZqrBz@d!Q;98>0V^p=O;s>7G-EI{FfcPQQP4}zEXhpI%P&f0aFl&|Gw+GS!K3kZ)1Ezh zejs~i1S3>cQEFmJZmM2MMG3KlvCEtNF?@%PbVOT{Nm)vLb%0Bl_``r7C@umAu6 diff --git a/manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 b/manage_externals/test/repos/simple-ext.git/objects/d8/ed2f33179d751937f8fde2e33921e4827babf4 deleted file mode 100644 index f08ae820c9c89927f9898c5646134f7c519a6b04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 60 zcmV-C0K@-y0V^p=O;s>4W-v4`Ff%bxC@xJ($t;Rb%gjmDE2$`95K$NWyZdy5$@Np$ Sc0Fs5Xy2&+OcnsW_!K#a0~pW% diff --git a/manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff b/manage_externals/test/repos/simple-ext.git/objects/df/312890f93ba4d2c694208599b665c4a08afeff deleted file mode 100644 index 4018ea5914ee89b76d88fc282b6c98d80e4aaccd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 89 zcmV-f0H*(V0V^p=O;s>AV=y!@Ff%bx&`ZxO$xP47FG^)_lzn=1.7.0 -autopep8>=1.3.0 -coverage>=4.4.0 -coveralls>=1.2.0 -sphinx>=1.6.0 diff --git a/manage_externals/test/test_sys_checkout.py b/manage_externals/test/test_sys_checkout.py deleted file mode 100644 index ab4f77e88f..0000000000 --- a/manage_externals/test/test_sys_checkout.py +++ /dev/null @@ -1,1896 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Terminology: - * 'container': a repo that has externals - * 'simple': a repo that has no externals, but is referenced as an external by another repo. - * 'mixed': a repo that both has externals and is referenced as an external by another repo. - - * 'clean': the local repo matches the version in the externals and has no local modifications. - * 'empty': the external isn't checked out at all. - -Note: this script assume the path to the manic and -checkout_externals module is already in the python path. This is -usually handled by the makefile. If you call it directly, you may need -to adjust your path. - -NOTE(bja, 2017-11) If a test fails, we want to keep the repo for that -test. But the tests will keep running, so we need a unique name. Also, -tearDown is always called after each test. I haven't figured out how -to determine if an assertion failed and whether it is safe to clean up -the test repos. - -So the solution is: - -* assign a unique id to each test repo. - -* never cleanup during the run. - -* Erase any existing repos at the begining of the module in -setUpModule. -""" - -# NOTE(bja, 2017-11) pylint complains that the module is too big, but -# I'm still working on how to break up the tests and still have the -# temporary directory be preserved.... -# pylint: disable=too-many-lines - - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import logging -import os -import os.path -import shutil -import unittest - -from manic.externals_description import ExternalsDescription -from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM -from manic.externals_description import git_submodule_status -from manic.externals_status import ExternalStatus -from manic.repository_git import GitRepository -from manic.utils import printlog, execute_subprocess -from manic.global_constants import LOCAL_PATH_INDICATOR, VERBOSITY_DEFAULT -from manic.global_constants import LOG_FILE_NAME -from manic import checkout - -# ConfigParser was renamed in python2 to configparser. In python2, -# ConfigParser returns byte strings, str, instead of unicode. We need -# unicode to be compatible with xml and json parser and python3. -try: - # python2 - from ConfigParser import SafeConfigParser as config_parser -except ImportError: - # python3 - from configparser import ConfigParser as config_parser - -# --------------------------------------------------------------------- -# -# Global constants -# -# --------------------------------------------------------------------- - - -# Module-wide root directory for all the per-test subdirs we'll create on -# the fly (which are placed under wherever $CWD is when the test runs). -# Set by setupModule(). -module_tmp_root_dir = None -TMP_REPO_DIR_NAME = 'tmp' # subdir under $CWD - -# subdir under test/ that holds all of our checked-in repositories (which we -# will clone for these tests). -BARE_REPO_ROOT_NAME = 'repos' - -# Environment var referenced by checked-in externals file in mixed-cont-ext.git, -# which should be pointed to the fully-resolved BARE_REPO_ROOT_NAME directory. -# We explicitly clear this after every test, via tearDown(). -MIXED_CONT_EXT_ROOT_ENV_VAR = 'MANIC_TEST_BARE_REPO_ROOT' - -# Subdirs under bare repo root, each holding a repository. For more info -# on the contents of these repositories, see test/repos/README.md. In these -# tests the 'parent' repos are cloned as a starting point, whereas the 'child' -# repos are checked out when the tests run checkout_externals. -CONTAINER_REPO = 'container.git' # Parent repo -SIMPLE_REPO = 'simple-ext.git' # Child repo -SIMPLE_FORK_REPO = 'simple-ext-fork.git' # Child repo -MIXED_REPO = 'mixed-cont-ext.git' # Both parent and child - -# Standard (arbitrary) external names for test configs -TAG_SECTION = 'simp_tag' -BRANCH_SECTION = 'simp_branch' -HASH_SECTION = 'simp_hash' - -# All the configs we construct check out their externals into these local paths. -EXTERNALS_PATH = 'externals' -SUB_EXTERNALS_PATH = 'src' # For mixed test repos, - -# For testing behavior with '.' instead of an explicit paths. -SIMPLE_LOCAL_ONLY_NAME = '.' - -# Externals files. -CFG_NAME = 'externals.cfg' # We construct this on a per-test basis. -CFG_SUB_NAME = 'sub-externals.cfg' # Already exists in mixed-cont-ext repo. - -# Arbitrary text file in all the test repos. -README_NAME = 'readme.txt' - -# Branch that exists in both the simple and simple-fork repos. -REMOTE_BRANCH_FEATURE2 = 'feature2' - -SVN_TEST_REPO = 'https://github.com/escomp/cesm' - -# Disable too-many-public-methods error -# pylint: disable=R0904 - -def setUpModule(): # pylint: disable=C0103 - """Setup for all tests in this module. It is called once per module! - """ - logging.basicConfig(filename=LOG_FILE_NAME, - format='%(levelname)s : %(asctime)s : %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', - level=logging.DEBUG) - repo_root = os.path.join(os.getcwd(), TMP_REPO_DIR_NAME) - repo_root = os.path.abspath(repo_root) - # delete if it exists from previous runs - try: - shutil.rmtree(repo_root) - except BaseException: - pass - # create clean dir for this run - os.mkdir(repo_root) - - # Make available to all tests in this file. - global module_tmp_root_dir - assert module_tmp_root_dir == None, module_tmp_root_dir - module_tmp_root_dir = repo_root - - -class RepoUtils(object): - """Convenience methods for interacting with git repos.""" - @staticmethod - def create_branch(repo_base_dir, external_name, branch, with_commit=False): - """Create branch and optionally (with_commit) add a single commit. - """ - # pylint: disable=R0913 - cwd = os.getcwd() - repo_root = os.path.join(repo_base_dir, EXTERNALS_PATH, external_name) - os.chdir(repo_root) - cmd = ['git', 'checkout', '-b', branch, ] - execute_subprocess(cmd) - if with_commit: - msg = 'start work on {0}'.format(branch) - with open(README_NAME, 'a') as handle: - handle.write(msg) - cmd = ['git', 'add', README_NAME, ] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-m', msg, ] - execute_subprocess(cmd) - os.chdir(cwd) - - @staticmethod - def create_commit(repo_base_dir, external_name): - """Make a commit to the given external. - - This is used to test sync state changes from local commits on - detached heads and tracking branches. - """ - cwd = os.getcwd() - repo_root = os.path.join(repo_base_dir, EXTERNALS_PATH, external_name) - os.chdir(repo_root) - - msg = 'work on great new feature!' - with open(README_NAME, 'a') as handle: - handle.write(msg) - cmd = ['git', 'add', README_NAME, ] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-m', msg, ] - execute_subprocess(cmd) - os.chdir(cwd) - - @staticmethod - def clone_test_repo(bare_root, test_id, parent_repo_name, dest_dir_in): - """Clone repo at / into dest_dir_in or local per-test-subdir. - - Returns output dir. - """ - parent_repo_dir = os.path.join(bare_root, parent_repo_name) - if dest_dir_in is None: - # create unique subdir for this test - test_dir_name = test_id - print("Test repository name: {0}".format(test_dir_name)) - dest_dir = os.path.join(module_tmp_root_dir, test_dir_name) - else: - dest_dir = dest_dir_in - - # pylint: disable=W0212 - GitRepository._git_clone(parent_repo_dir, dest_dir, VERBOSITY_DEFAULT) - return dest_dir - - @staticmethod - def add_file_to_repo(under_test_dir, filename, tracked): - """Add a file to the repository so we can put it into a dirty state - - """ - cwd = os.getcwd() - os.chdir(under_test_dir) - with open(filename, 'w') as tmp: - tmp.write('Hello, world!') - - if tracked: - # NOTE(bja, 2018-01) brittle hack to obtain repo dir and - # file name - path_data = filename.split('/') - repo_dir = os.path.join(path_data[0], path_data[1]) - os.chdir(repo_dir) - tracked_file = path_data[2] - cmd = ['git', 'add', tracked_file] - execute_subprocess(cmd) - - os.chdir(cwd) - -class GenerateExternalsDescriptionCfgV1(object): - """Building blocks to create ExternalsDescriptionCfgV1 files. - - Basic usage: create_config() multiple create_*(), then write_config(). - Optionally after that: write_with_*(). - """ - - def __init__(self, bare_root): - self._schema_version = '1.1.0' - self._config = None - - # directory where we have test repositories (which we will clone for - # tests) - self._bare_root = bare_root - - def write_config(self, dest_dir, filename=CFG_NAME): - """Write self._config to disk - - """ - dest_path = os.path.join(dest_dir, filename) - with open(dest_path, 'w') as configfile: - self._config.write(configfile) - - def create_config(self): - """Create an config object and add the required metadata section - - """ - self._config = config_parser() - self.create_metadata() - - def create_metadata(self): - """Create the metadata section of the config file - """ - self._config.add_section(DESCRIPTION_SECTION) - - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, - self._schema_version) - - def url_for_repo_path(self, repo_path, repo_path_abs=None): - if repo_path_abs is not None: - return repo_path_abs - else: - return os.path.join(self._bare_root, repo_path) - - def create_section(self, repo_path, name, tag='', branch='', - ref_hash='', required=True, path=EXTERNALS_PATH, - sub_externals='', repo_path_abs=None, from_submodule=False, - sparse='', nested=False): - # pylint: disable=too-many-branches - """Create a config ExternalsDescription section with the given name. - - Autofills some items and handles some optional items. - - repo_path_abs overrides repo_path (which is relative to the bare repo) - path is a subdir under repo_path to check out to. - """ - # pylint: disable=R0913 - self._config.add_section(name) - if not from_submodule: - if nested: - self._config.set(name, ExternalsDescription.PATH, path) - else: - self._config.set(name, ExternalsDescription.PATH, - os.path.join(path, name)) - - self._config.set(name, ExternalsDescription.PROTOCOL, - ExternalsDescription.PROTOCOL_GIT) - - # from_submodules is incompatible with some other options, turn them off - if (from_submodule and - ((repo_path_abs is not None) or tag or ref_hash or branch)): - printlog('create_section: "from_submodule" is incompatible with ' - '"repo_url", "tag", "hash", and "branch" options;\n' - 'Ignoring those options for {}'.format(name)) - repo_url = None - tag = '' - ref_hash = '' - branch = '' - - repo_url = self.url_for_repo_path(repo_path, repo_path_abs) - - if not from_submodule: - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - self._config.set(name, ExternalsDescription.REQUIRED, str(required)) - - if tag: - self._config.set(name, ExternalsDescription.TAG, tag) - - if branch: - self._config.set(name, ExternalsDescription.BRANCH, branch) - - if ref_hash: - self._config.set(name, ExternalsDescription.HASH, ref_hash) - - if sub_externals: - self._config.set(name, ExternalsDescription.EXTERNALS, - sub_externals) - - if sparse: - self._config.set(name, ExternalsDescription.SPARSE, sparse) - - if from_submodule: - self._config.set(name, ExternalsDescription.SUBMODULE, "True") - - def create_section_reference_to_subexternal(self, name): - """Just a reference to another externals file. - - """ - # pylint: disable=R0913 - self._config.add_section(name) - self._config.set(name, ExternalsDescription.PATH, LOCAL_PATH_INDICATOR) - - self._config.set(name, ExternalsDescription.PROTOCOL, - ExternalsDescription.PROTOCOL_EXTERNALS_ONLY) - - self._config.set(name, ExternalsDescription.REPO_URL, - LOCAL_PATH_INDICATOR) - - self._config.set(name, ExternalsDescription.REQUIRED, str(True)) - - self._config.set(name, ExternalsDescription.EXTERNALS, CFG_SUB_NAME) - - def create_svn_external(self, name, tag='', branch=''): - """Create a config section for an svn repository. - - """ - self._config.add_section(name) - self._config.set(name, ExternalsDescription.PATH, - os.path.join(EXTERNALS_PATH, name)) - - self._config.set(name, ExternalsDescription.PROTOCOL, - ExternalsDescription.PROTOCOL_SVN) - - self._config.set(name, ExternalsDescription.REPO_URL, SVN_TEST_REPO) - - self._config.set(name, ExternalsDescription.REQUIRED, str(True)) - - if tag: - self._config.set(name, ExternalsDescription.TAG, tag) - - if branch: - self._config.set(name, ExternalsDescription.BRANCH, branch) - - def write_with_git_branch(self, dest_dir, name, branch, new_remote_repo_path=None): - """Update fields in our config and write it to disk. - - name is the key of the ExternalsDescription in self._config to update. - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.BRANCH, branch) - - if new_remote_repo_path: - if new_remote_repo_path == SIMPLE_LOCAL_ONLY_NAME: - repo_url = SIMPLE_LOCAL_ONLY_NAME - else: - repo_url = os.path.join(self._bare_root, new_remote_repo_path) - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - try: - # remove the tag if it existed - self._config.remove_option(name, ExternalsDescription.TAG) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_with_svn_branch(self, dest_dir, name, branch): - """Update a repository branch, and potentially the remote. - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.BRANCH, branch) - - try: - # remove the tag if it existed - self._config.remove_option(name, ExternalsDescription.TAG) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_with_tag_and_remote_repo(self, dest_dir, name, tag, new_remote_repo_path, - remove_branch=True): - """Update a repository tag and the remote. - - NOTE(bja, 2017-11) remove_branch=False should result in an - overspecified external with both a branch and tag. This is - used for error condition testing. - - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.TAG, tag) - - if new_remote_repo_path: - repo_url = os.path.join(self._bare_root, new_remote_repo_path) - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - try: - # remove the branch if it existed - if remove_branch: - self._config.remove_option(name, ExternalsDescription.BRANCH) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_without_branch_tag(self, dest_dir, name): - """Update a repository protocol, and potentially the remote - """ - # pylint: disable=R0913 - try: - # remove the branch if it existed - self._config.remove_option(name, ExternalsDescription.BRANCH) - except BaseException: - pass - - try: - # remove the tag if it existed - self._config.remove_option(name, ExternalsDescription.TAG) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_without_repo_url(self, dest_dir, name): - """Update a repository protocol, and potentially the remote - """ - # pylint: disable=R0913 - try: - # remove the repo url if it existed - self._config.remove_option(name, ExternalsDescription.REPO_URL) - except BaseException: - pass - - self.write_config(dest_dir) - - def write_with_protocol(self, dest_dir, name, protocol, repo_path=None): - """Update a repository protocol, and potentially the remote - """ - # pylint: disable=R0913 - self._config.set(name, ExternalsDescription.PROTOCOL, protocol) - - if repo_path: - repo_url = os.path.join(self._bare_root, repo_path) - self._config.set(name, ExternalsDescription.REPO_URL, repo_url) - - self.write_config(dest_dir) - - -def _execute_checkout_in_dir(dirname, args, debug_env=''): - """Execute the checkout command in the appropriate repo dir with the - specified additional args. - - args should be a list of strings. - debug_env shuld be a string of the form 'FOO=bar' or the empty string. - - Note that we are calling the command line processing and main - routines and not using a subprocess call so that we get code - coverage results! Note this means that environment variables are passed - to checkout_externals via os.environ; debug_env is just used to aid - manual reproducibility of a given call. - - Returns (overall_status, tree_status) - where overall_status is 0 for success, nonzero otherwise. - and tree_status is set if --status was passed in, None otherwise. - - Note this command executes the checkout command, it doesn't - necessarily do any checking out (e.g. if --status is passed in). - """ - cwd = os.getcwd() - - # Construct a command line for reproducibility; this command is not - # actually executed in the test. - os.chdir(dirname) - cmdline = ['--externals', CFG_NAME, ] - cmdline += args - manual_cmd = ('Running equivalent of:\n' - 'pushd {dirname}; ' - '{debug_env} /path/to/checkout_externals {args}'.format( - dirname=dirname, debug_env=debug_env, - args=' '.join(cmdline))) - printlog(manual_cmd) - options = checkout.commandline_arguments(cmdline) - overall_status, tree_status = checkout.main(options) - os.chdir(cwd) - return overall_status, tree_status - -class BaseTestSysCheckout(unittest.TestCase): - """Base class of reusable systems level test setup for - checkout_externals - - """ - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. - # pylint: disable=invalid-name - - # Command-line args for checkout_externals, used in execute_checkout_in_dir() - status_args = ['--status'] - checkout_args = [] - optional_args = ['--optional'] - verbose_args = ['--status', '--verbose'] - - def setUp(self): - """Setup for all individual checkout_externals tests - """ - # directory we want to return to after the test system and - # checkout_externals are done cd'ing all over the place. - self._return_dir = os.getcwd() - - self._test_id = self.id().split('.')[-1] - - # find root - if os.path.exists(os.path.join(os.getcwd(), 'checkout_externals')): - root_dir = os.path.abspath(os.getcwd()) - else: - # maybe we are in a subdir, search up - root_dir = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) - while os.path.basename(root_dir): - if os.path.exists(os.path.join(root_dir, 'checkout_externals')): - break - root_dir = os.path.dirname(root_dir) - - if not os.path.exists(os.path.join(root_dir, 'checkout_externals')): - raise RuntimeError('Cannot find checkout_externals') - - # path to the executable - self._checkout = os.path.join(root_dir, 'checkout_externals') - - # directory where we have test repositories (which we will clone for - # tests) - self._bare_root = os.path.abspath( - os.path.join(root_dir, 'test', BARE_REPO_ROOT_NAME)) - - # set the input file generator - self._generator = GenerateExternalsDescriptionCfgV1(self._bare_root) - # set the input file generator for secondary externals - self._sub_generator = GenerateExternalsDescriptionCfgV1(self._bare_root) - - def tearDown(self): - """Tear down for individual tests - """ - # return to our common starting point - os.chdir(self._return_dir) - - # (in case this was set) Don't pollute environment of other tests. - os.environ.pop(MIXED_CONT_EXT_ROOT_ENV_VAR, - None) # Don't care if key wasn't set. - - def clone_test_repo(self, parent_repo_name, dest_dir_in=None): - """Clones repo under self._bare_root""" - return RepoUtils.clone_test_repo(self._bare_root, self._test_id, - parent_repo_name, dest_dir_in) - - def execute_checkout_in_dir(self, dirname, args, debug_env=''): - overall_status, tree_status = _execute_checkout_in_dir(dirname, args, - debug_env=debug_env) - self.assertEqual(overall_status, 0) - return tree_status - - def execute_checkout_with_status(self, dirname, args, debug_env=''): - """Calls checkout a second time to get status if needed.""" - tree_status = self.execute_checkout_in_dir( - dirname, args, debug_env=debug_env) - if tree_status is None: - tree_status = self.execute_checkout_in_dir(dirname, - self.status_args, - debug_env=debug_env) - self.assertNotEqual(tree_status, None) - return tree_status - - def _check_sync_clean(self, ext_status, expected_sync_state, - expected_clean_state): - self.assertEqual(ext_status.sync_state, expected_sync_state) - self.assertEqual(ext_status.clean_state, expected_clean_state) - - @staticmethod - def _external_path(section_name, base_path=EXTERNALS_PATH): - return './{0}/{1}'.format(base_path, section_name) - - def _check_file_exists(self, repo_dir, pathname): - "Check that exists in " - self.assertTrue(os.path.exists(os.path.join(repo_dir, pathname))) - - def _check_file_absent(self, repo_dir, pathname): - "Check that does not exist in " - self.assertFalse(os.path.exists(os.path.join(repo_dir, pathname))) - - -class TestSysCheckout(BaseTestSysCheckout): - """Run systems level tests of checkout_externals - """ - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. - # pylint: disable=invalid-name - - # ---------------------------------------------------------------- - # - # Run systems tests - # - # ---------------------------------------------------------------- - def test_required_bytag(self): - """Check out a required external pointing to a git tag.""" - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - local_path_rel = self._external_path(TAG_SECTION) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - local_path_abs = os.path.join(cloned_repo_dir, local_path_rel) - self.assertFalse(os.path.exists(local_path_abs)) - - # after checkout, the external is 'clean' aka at the correct version. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # Actually checked out the desired repo. - self.assertEqual('origin', GitRepository._remote_name_for_url( - # Which url to look up - self._generator.url_for_repo_path(SIMPLE_REPO), - # Which directory has the local checked-out repo. - dirname=local_path_abs)) - - # Actually checked out the desired tag. - (tag_found, tag_name) = GitRepository._git_current_tag(local_path_abs) - self.assertEqual(tag_name, 'tag1') - - # Check existence of some simp_tag files - tag_path = os.path.join('externals', TAG_SECTION) - self._check_file_exists(cloned_repo_dir, - os.path.join(tag_path, README_NAME)) - # Subrepo should not exist (not referenced by configs). - self._check_file_absent(cloned_repo_dir, os.path.join(tag_path, - 'simple_subdir', - 'subdir_file.txt')) - - def test_required_bybranch(self): - """Check out a required external pointing to a git branch.""" - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - local_path_rel = self._external_path(BRANCH_SECTION) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - local_path_abs = os.path.join(cloned_repo_dir, local_path_rel) - self.assertFalse(os.path.exists(local_path_abs)) - - # after checkout, the external is 'clean' aka at the correct version. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertTrue(os.path.exists(local_path_abs)) - - # Actually checked out the desired repo. - self.assertEqual('origin', GitRepository._remote_name_for_url( - # Which url to look up - self._generator.url_for_repo_path(SIMPLE_REPO), - # Which directory has the local checked-out repo. - dirname=local_path_abs)) - - # Actually checked out the desired branch. - (branch_found, branch_name) = GitRepository._git_current_remote_branch( - local_path_abs) - self.assertEquals(branch_name, 'origin/' + REMOTE_BRANCH_FEATURE2) - - def test_required_byhash(self): - """Check out a required external pointing to a git hash.""" - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, HASH_SECTION, - ref_hash='60b1cc1a38d63') - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - local_path_rel = self._external_path(HASH_SECTION) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - local_path_abs = os.path.join(cloned_repo_dir, local_path_rel) - self.assertFalse(os.path.exists(local_path_abs)) - - # after checkout, the externals are 'clean' aka at their correct version. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[local_path_rel], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # Actually checked out the desired repo. - self.assertEqual('origin', GitRepository._remote_name_for_url( - # Which url to look up - self._generator.url_for_repo_path(SIMPLE_REPO), - # Which directory has the local checked-out repo. - dirname=local_path_abs)) - - # Actually checked out the desired hash. - (hash_found, hash_name) = GitRepository._git_current_hash( - local_path_abs) - self.assertTrue(hash_name.startswith('60b1cc1a38d63'), - msg=hash_name) - - def test_container_nested_required(self): - """Verify that a container with nested subrepos generates the correct initial status. - Tests over all possible permutations - """ - # Output subdirs for each of the externals, to test that one external can be - # checked out in a subdir of another. - NESTED_SUBDIR = ['./fred', './fred/wilma', './fred/wilma/barney'] - - # Assert that each type of external (e.g. tag vs branch) can be at any parent level - # (e.g. child/parent/grandparent). - orders = [[0, 1, 2], [1, 2, 0], [2, 0, 1], - [0, 2, 1], [2, 1, 0], [1, 0, 2]] - for n, order in enumerate(orders): - dest_dir = os.path.join(module_tmp_root_dir, self._test_id, - "test"+str(n)) - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO, - dest_dir_in=dest_dir) - self._generator.create_config() - # We happen to check out each section via a different reference (tag/branch/hash) but - # those don't really matter, we just need to check out three repos into a nested set of - # directories. - self._generator.create_section( - SIMPLE_REPO, TAG_SECTION, nested=True, - tag='tag1', path=NESTED_SUBDIR[order[0]]) - self._generator.create_section( - SIMPLE_REPO, BRANCH_SECTION, nested=True, - branch=REMOTE_BRANCH_FEATURE2, path=NESTED_SUBDIR[order[1]]) - self._generator.create_section( - SIMPLE_REPO, HASH_SECTION, nested=True, - ref_hash='60b1cc1a38d63', path=NESTED_SUBDIR[order[2]]) - self._generator.write_config(cloned_repo_dir) - - # all externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - self._check_sync_clean(tree[NESTED_SUBDIR[order[0]]], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[NESTED_SUBDIR[order[1]]], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[NESTED_SUBDIR[order[2]]], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - # after checkout, all the repos are 'clean'. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[NESTED_SUBDIR[order[0]]], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[NESTED_SUBDIR[order[1]]], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[NESTED_SUBDIR[order[2]]], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_simple_optional(self): - """Verify that container with an optional simple subrepos generates - the correct initial status. - - """ - # create repo and externals config. - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, 'simp_req', - tag='tag1') - - self._generator.create_section(SIMPLE_REPO, 'simp_opt', - tag='tag1', required=False) - - self._generator.write_config(cloned_repo_dir) - - # all externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - req_status = tree[self._external_path('simp_req')] - self._check_sync_clean(req_status, - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self.assertEqual(req_status.source_type, ExternalStatus.MANAGED) - - opt_status = tree[self._external_path('simp_opt')] - self._check_sync_clean(opt_status, - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL) - - # after checkout, required external is clean, optional is still empty. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - req_status = tree[self._external_path('simp_req')] - self._check_sync_clean(req_status, - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertEqual(req_status.source_type, ExternalStatus.MANAGED) - - opt_status = tree[self._external_path('simp_opt')] - self._check_sync_clean(opt_status, - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL) - - # after checking out optionals, the optional external is also clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.optional_args) - req_status = tree[self._external_path('simp_req')] - self._check_sync_clean(req_status, - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertEqual(req_status.source_type, ExternalStatus.MANAGED) - - opt_status = tree[self._external_path('simp_opt')] - self._check_sync_clean(opt_status, - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self.assertEqual(opt_status.source_type, ExternalStatus.OPTIONAL) - - def test_container_simple_verbose(self): - """Verify that verbose status matches non-verbose. - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # after checkout, all externals should be 'clean'. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # 'Verbose' status should tell the same story. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.verbose_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_simple_dirty(self): - """Verify that a container with a new tracked file is marked dirty. - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # checkout, should start out clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # add a tracked file to the simp_tag external, should be dirty. - RepoUtils.add_file_to_repo(cloned_repo_dir, - 'externals/{0}/tmp.txt'.format(TAG_SECTION), - tracked=True) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.DIRTY) - - # Re-checkout; simp_tag should still be dirty. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.DIRTY) - - def test_container_simple_untracked(self): - """Verify that a container with simple subrepos and a untracked files - is not considered 'dirty' and will attempt an update. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - self._generator.write_config(cloned_repo_dir) - - # checkout, should start out clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # add an untracked file to the simp_tag external, should stay clean. - RepoUtils.add_file_to_repo(cloned_repo_dir, - 'externals/{0}/tmp.txt'.format(TAG_SECTION), - tracked=False) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # After checkout, the external should still be 'clean'. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_simple_detached_sync(self): - """Verify that a container with simple subrepos generates the correct - out of sync status when making commits from a detached head - state. - - For more info about 'detached head' state: https://www.cloudbees.com/blog/git-detached-head - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - - self._generator.create_section(SIMPLE_REPO, 'simp_hash', - ref_hash='60b1cc1a38d63') - - self._generator.write_config(cloned_repo_dir) - - # externals start out 'empty' aka not checked out. - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - # checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # Commit on top of the tag and hash (creating the detached head state in those two - # externals' repos) - # The branch commit does not create the detached head state, but here for completeness. - RepoUtils.create_commit(cloned_repo_dir, TAG_SECTION) - RepoUtils.create_commit(cloned_repo_dir, HASH_SECTION) - RepoUtils.create_commit(cloned_repo_dir, BRANCH_SECTION) - - # sync status of all three should be 'modified' (uncommitted changes) - # clean status is 'ok' (matches externals version) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # after checkout, all externals should be totally clean (no uncommitted changes, - # and matches externals version). - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_remote_branch(self): - """Verify that a container with remote branch change works - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the branch external to point to a different remote with the same branch, - # then simp_branch should be out of sync - self._generator.write_with_git_branch(cloned_repo_dir, - name=BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2, - new_remote_repo_path=SIMPLE_FORK_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # checkout new externals, now simp_branch should be clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_remote_tag_same_branch(self): - """Verify that a container with remote tag change works. The new tag - should not be in the original repo, only the new remote - fork. The new tag is automatically fetched because it is on - the branch. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the config file to point to a different remote with - # the new tag replacing the old branch. Tag MUST NOT be in the original - # repo! status of simp_branch should then be out of sync - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='forked-feature-v1', - new_remote_repo_path=SIMPLE_FORK_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.status_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # checkout new externals, then should be synced. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_remote_tag_fetch_all(self): - """Verify that a container with remote tag change works. The new tag - should not be in the original repo, only the new remote - fork. It should also not be on a branch that will be fetched, - and therefore not fetched by default with 'git fetch'. It will - only be retrieved by 'git fetch --tags' - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the config file to point to a different remote with - # the new tag instead of the old branch. Tag MUST NOT be in the original - # repo! status of simp_branch should then be out of sync. - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='abandoned-feature', - new_remote_repo_path=SIMPLE_FORK_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - - # checkout new externals, should be clean again. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_preserve_dot(self): - """Verify that after inital checkout, modifying an external git repo - url to '.' and the current branch will leave it unchanged. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # initial checkout - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update the config file to point to a different remote with - # the same branch. - self._generator.write_with_git_branch(cloned_repo_dir, name=BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2, - new_remote_repo_path=SIMPLE_FORK_REPO) - # after checkout, should be clean again. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # update branch to point to a new branch that only exists in - # the local fork - RepoUtils.create_branch(cloned_repo_dir, external_name=BRANCH_SECTION, - branch='private-feature', with_commit=True) - self._generator.write_with_git_branch(cloned_repo_dir, name=BRANCH_SECTION, - branch='private-feature', - new_remote_repo_path=SIMPLE_LOCAL_ONLY_NAME) - # after checkout, should be clean again. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_mixed_subrepo(self): - """Verify container with mixed subrepo. - - The mixed subrepo has a sub-externals file with different - sub-externals on different branches. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - self._generator.create_config() - self._generator.create_section(MIXED_REPO, 'mixed_req', - branch='master', sub_externals=CFG_SUB_NAME) - self._generator.write_config(cloned_repo_dir) - - # The subrepo has a repo_url that uses this environment variable. - # It'll be cleared in tearDown(). - os.environ[MIXED_CONT_EXT_ROOT_ENV_VAR] = self._bare_root - debug_env = MIXED_CONT_EXT_ROOT_ENV_VAR + '=' + self._bare_root - - # inital checkout: all requireds are clean, and optional is empty. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args, - debug_env=debug_env) - mixed_req_path = self._external_path('mixed_req') - self._check_sync_clean(tree[mixed_req_path], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - sub_ext_base_path = "{0}/{1}/{2}".format(EXTERNALS_PATH, 'mixed_req', SUB_EXTERNALS_PATH) - # The already-checked-in subexternals file has a 'simp_branch' section - self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # update the mixed-use external to point to different branch - # status should become out of sync for mixed_req, but sub-externals - # are still in sync - self._generator.write_with_git_branch(cloned_repo_dir, name='mixed_req', - branch='new-feature', - new_remote_repo_path=MIXED_REPO) - tree = self.execute_checkout_in_dir(cloned_repo_dir, self.status_args, - debug_env=debug_env) - self._check_sync_clean(tree[mixed_req_path], - ExternalStatus.MODEL_MODIFIED, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # run the checkout. Now the mixed use external and its sub-externals should be clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, self.checkout_args, - debug_env=debug_env) - self._check_sync_clean(tree[mixed_req_path], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path('simp_branch', base_path=sub_ext_base_path)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_component(self): - """Verify that optional component checkout works - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - # create the top level externals file - self._generator.create_config() - # Optional external, by tag. - self._generator.create_section(SIMPLE_REPO, 'simp_opt', - tag='tag1', required=False) - - # Required external, by branch. - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - - # Required external, by hash. - self._generator.create_section(SIMPLE_REPO, HASH_SECTION, - ref_hash='60b1cc1a38d63') - self._generator.write_config(cloned_repo_dir) - - # inital checkout, first try a nonexistent component argument noref - checkout_args = ['simp_opt', 'noref'] - checkout_args.extend(self.checkout_args) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, checkout_args) - - # Now explicitly check out one optional component.. - # Explicitly listed component (opt) should be present, the other two not. - checkout_args = ['simp_opt'] - checkout_args.extend(self.checkout_args) - tree = self.execute_checkout_with_status(cloned_repo_dir, - checkout_args) - self._check_sync_clean(tree[self._external_path('simp_opt')], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - # Check out a second component, this one required. - # Explicitly listed component (branch) should be present, the still-unlisted one (tag) not. - checkout_args.append(BRANCH_SECTION) - tree = self.execute_checkout_with_status(cloned_repo_dir, - checkout_args) - self._check_sync_clean(tree[self._external_path('simp_opt')], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - - - def test_container_exclude_component(self): - """Verify that exclude component checkout works - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag1') - - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - - self._generator.create_section(SIMPLE_REPO, 'simp_hash', - ref_hash='60b1cc1a38d63') - - self._generator.write_config(cloned_repo_dir) - - # inital checkout should result in all externals being clean except excluded TAG_SECTION. - checkout_args = ['--exclude', TAG_SECTION] - checkout_args.extend(self.checkout_args) - tree = self.execute_checkout_with_status(cloned_repo_dir, checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.EMPTY, - ExternalStatus.DEFAULT) - self._check_sync_clean(tree[self._external_path(BRANCH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path(HASH_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_subexternal(self): - """Verify that an externals file can be brought in as a reference. - - """ - cloned_repo_dir = self.clone_test_repo(MIXED_REPO) - - self._generator.create_config() - self._generator.create_section_reference_to_subexternal('mixed_base') - self._generator.write_config(cloned_repo_dir) - - # The subrepo has a repo_url that uses this environment variable. - # It'll be cleared in tearDown(). - os.environ[MIXED_CONT_EXT_ROOT_ENV_VAR] = self._bare_root - debug_env = MIXED_CONT_EXT_ROOT_ENV_VAR + '=' + self._bare_root - - # After checkout, confirm required's are clean and the referenced - # subexternal's contents are also clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args, - debug_env=debug_env) - - self._check_sync_clean( - tree[self._external_path(BRANCH_SECTION, base_path=SUB_EXTERNALS_PATH)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - def test_container_sparse(self): - """Verify that 'full' container with simple subrepo - can run a sparse checkout and generate the correct initial status. - - """ - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - # Create a file to list filenames to checkout. - sparse_filename = 'sparse_checkout' - with open(os.path.join(cloned_repo_dir, sparse_filename), 'w') as sfile: - sfile.write(README_NAME) - - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, - tag='tag2') - - # Same tag as above, but with a sparse file too. - sparse_relpath = '../../' + sparse_filename - self._generator.create_section(SIMPLE_REPO, 'simp_sparse', - tag='tag2', sparse=sparse_relpath) - - self._generator.write_config(cloned_repo_dir) - - # inital checkout, confirm required's are clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._external_path('simp_sparse')], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - # Check existence of some files - full set in TAG_SECTION, and sparse set - # in 'simp_sparse'. - subrepo_path = os.path.join('externals', TAG_SECTION) - self._check_file_exists(cloned_repo_dir, - os.path.join(subrepo_path, README_NAME)) - self._check_file_exists(cloned_repo_dir, os.path.join(subrepo_path, - 'simple_subdir', - 'subdir_file.txt')) - subrepo_path = os.path.join('externals', 'simp_sparse') - self._check_file_exists(cloned_repo_dir, - os.path.join(subrepo_path, README_NAME)) - self._check_file_absent(cloned_repo_dir, os.path.join(subrepo_path, - 'simple_subdir', - 'subdir_file.txt')) - - -class TestSysCheckoutSVN(BaseTestSysCheckout): - """Run systems level tests of checkout_externals accessing svn repositories - - SVN tests - these tests use the svn repository interface. Since - they require an active network connection, they are significantly - slower than the git tests. But svn testing is critical. So try to - design the tests to only test svn repository functionality - (checkout, switch) and leave generic testing of functionality like - 'optional' to the fast git tests. - - Example timing as of 2017-11: - - * All other git and unit tests combined take between 4-5 seconds - - * Just checking if svn is available for a single test takes 2 seconds. - - * The single svn test typically takes between 10 and 25 seconds - (depending on the network)! - - NOTE(bja, 2017-11) To enable CI testing we can't use a real remote - repository that restricts access and it seems inappropriate to hit - a random open source repo. For now we are just hitting one of our - own github repos using the github svn server interface. This - should be "good enough" for basic checkout and swich - functionality. But if additional svn functionality is required, a - better solution will be necessary. I think eventually we want to - create a small local svn repository on the fly (doesn't require an - svn server or network connection!) and use it for testing. - - """ - - @staticmethod - def _svn_branch_name(): - return './{0}/svn_branch'.format(EXTERNALS_PATH) - - @staticmethod - def _svn_tag_name(): - return './{0}/svn_tag'.format(EXTERNALS_PATH) - - def _check_tag_branch_svn_tag_clean(self, tree): - self._check_sync_clean(tree[self._external_path(TAG_SECTION)], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._svn_branch_name()], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - self._check_sync_clean(tree[self._svn_tag_name()], - ExternalStatus.STATUS_OK, - ExternalStatus.STATUS_OK) - - @staticmethod - def _have_svn_access(): - """Check if we have svn access so we can enable tests that use svn. - - """ - have_svn = False - cmd = ['svn', 'ls', SVN_TEST_REPO, ] - try: - execute_subprocess(cmd) - have_svn = True - except BaseException: - pass - return have_svn - - def _skip_if_no_svn_access(self): - """Function decorator to disable svn tests when svn isn't available - """ - have_svn = self._have_svn_access() - if not have_svn: - raise unittest.SkipTest("No svn access") - - def test_container_simple_svn(self): - """Verify that a container repo can pull in an svn branch and svn tag. - - """ - self._skip_if_no_svn_access() - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - - self._generator.create_config() - # Git repo. - self._generator.create_section(SIMPLE_REPO, TAG_SECTION, tag='tag1') - - # Svn repos. - self._generator.create_svn_external('svn_branch', branch='trunk') - self._generator.create_svn_external('svn_tag', tag='tags/cesm2.0.beta07') - - self._generator.write_config(cloned_repo_dir) - - # checkout, make sure all sections are clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_tag_branch_svn_tag_clean(tree) - - # update description file to make the tag into a branch and - # trigger a switch - self._generator.write_with_svn_branch(cloned_repo_dir, 'svn_tag', - 'trunk') - - # checkout, again the results should be clean. - tree = self.execute_checkout_with_status(cloned_repo_dir, - self.checkout_args) - self._check_tag_branch_svn_tag_clean(tree) - - # add an untracked file to the repo - tracked = False - RepoUtils.add_file_to_repo(cloned_repo_dir, - 'externals/svn_branch/tmp.txt', tracked) - - # run a no-op checkout. - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - # update description file to make the branch into a tag and - # trigger a modified sync status - self._generator.write_with_svn_branch(cloned_repo_dir, 'svn_tag', - 'tags/cesm2.0.beta07') - - self.execute_checkout_in_dir(cloned_repo_dir,self.checkout_args) - - # verify status is still clean and unmodified, last - # checkout modified the working dir state. - tree = self.execute_checkout_in_dir(cloned_repo_dir, - self.verbose_args) - self._check_tag_branch_svn_tag_clean(tree) - -class TestSubrepoCheckout(BaseTestSysCheckout): - # Need to store information at setUp time for checking - # pylint: disable=too-many-instance-attributes - """Run tests to ensure proper handling of repos with submodules. - - By default, submodules in git repositories are checked out. A git - repository checked out as a submodule is treated as if it was - listed in an external with the same properties as in the source - .gitmodules file. - """ - - def setUp(self): - """Setup for all submodule checkout tests - Create a repo with two submodule repositories. - """ - - # Run the basic setup - super().setUp() - # create test repo - # We need to do this here (rather than have a static repo) because - # git submodules do not allow for variables in .gitmodules files - self._test_repo_name = 'test_repo_with_submodules' - self._bare_branch_name = 'subrepo_branch' - self._config_branch_name = 'subrepo_config_branch' - self._container_extern_name = 'externals_container.cfg' - self._my_test_dir = os.path.join(module_tmp_root_dir, self._test_id) - self._repo_dir = os.path.join(self._my_test_dir, self._test_repo_name) - self._checkout_dir = 'repo_with_submodules' - check_dir = self.clone_test_repo(CONTAINER_REPO, - dest_dir_in=self._repo_dir) - self.assertTrue(self._repo_dir == check_dir) - # Add the submodules - cwd = os.getcwd() - fork_repo_dir = os.path.join(self._bare_root, SIMPLE_FORK_REPO) - simple_repo_dir = os.path.join(self._bare_root, SIMPLE_REPO) - self._simple_ext_fork_name = os.path.splitext(SIMPLE_FORK_REPO)[0] - self._simple_ext_name = os.path.join('sourc', - os.path.splitext(SIMPLE_REPO)[0]) - os.chdir(self._repo_dir) - # Add a branch with a subrepo - cmd = ['git', 'branch', self._bare_branch_name, 'master'] - execute_subprocess(cmd) - cmd = ['git', 'checkout', self._bare_branch_name] - execute_subprocess(cmd) - cmd = ['git', 'submodule', 'add', fork_repo_dir] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-am', "'Added simple-ext-fork as a submodule'"] - execute_subprocess(cmd) - # Save the fork repo hash for comparison - os.chdir(self._simple_ext_fork_name) - self._fork_hash_check = self.get_git_hash() - os.chdir(self._repo_dir) - # Now, create a branch to test from_sbmodule - cmd = ['git', 'branch', - self._config_branch_name, self._bare_branch_name] - execute_subprocess(cmd) - cmd = ['git', 'checkout', self._config_branch_name] - execute_subprocess(cmd) - cmd = ['git', 'submodule', 'add', '--name', SIMPLE_REPO, - simple_repo_dir, self._simple_ext_name] - execute_subprocess(cmd) - # Checkout feature2 - os.chdir(self._simple_ext_name) - cmd = ['git', 'branch', 'feature2', 'origin/feature2'] - execute_subprocess(cmd) - cmd = ['git', 'checkout', 'feature2'] - execute_subprocess(cmd) - # Save the fork repo hash for comparison - self._simple_hash_check = self.get_git_hash() - os.chdir(self._repo_dir) - self.write_externals_config(filename=self._container_extern_name, - dest_dir=self._repo_dir, from_submodule=True) - cmd = ['git', 'add', self._container_extern_name] - execute_subprocess(cmd) - cmd = ['git', 'commit', '-am', "'Added simple-ext as a submodule'"] - execute_subprocess(cmd) - # Reset to master - cmd = ['git', 'checkout', 'master'] - execute_subprocess(cmd) - os.chdir(cwd) - - @staticmethod - def get_git_hash(revision="HEAD"): - """Return the hash for """ - cmd = ['git', 'rev-parse', revision] - git_out = execute_subprocess(cmd, output_to_caller=True) - return git_out.strip() - - def write_externals_config(self, name='', dest_dir=None, - filename=CFG_NAME, - branch_name=None, sub_externals=None, - from_submodule=False): - # pylint: disable=too-many-arguments - """Create a container externals file with only simple externals. - - """ - self._generator.create_config() - - if dest_dir is None: - dest_dir = self._my_test_dir - - if from_submodule: - self._generator.create_section(SIMPLE_FORK_REPO, - self._simple_ext_fork_name, - from_submodule=True) - self._generator.create_section(SIMPLE_REPO, - self._simple_ext_name, - branch='feature3', path='', - from_submodule=False) - else: - if branch_name is None: - branch_name = 'master' - - self._generator.create_section(self._test_repo_name, - self._checkout_dir, - branch=branch_name, - path=name, sub_externals=sub_externals, - repo_path_abs=self._repo_dir) - - self._generator.write_config(dest_dir, filename=filename) - - def idempotence_check(self, checkout_dir): - """Verify that calling checkout_externals and - checkout_externals --status does not cause errors""" - cwd = os.getcwd() - os.chdir(checkout_dir) - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - self.execute_checkout_in_dir(self._my_test_dir, - self.status_args) - os.chdir(cwd) - - def test_submodule_checkout_bare(self): - """Verify that a git repo with submodule is properly checked out - This test if for where there is no 'externals' keyword in the - parent repo. - Correct behavior is that the submodule is checked out using - normal git submodule behavior. - """ - simple_ext_fork_tag = "(tag1)" - simple_ext_fork_status = " " - self.write_externals_config(branch_name=self._bare_branch_name) - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - cwd = os.getcwd() - checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir) - fork_file = os.path.join(checkout_dir, - self._simple_ext_fork_name, "readme.txt") - self.assertTrue(os.path.exists(fork_file)) - - submods = git_submodule_status(checkout_dir) - print('checking status of', checkout_dir, ':', submods) - self.assertEqual(len(submods.keys()), 1) - self.assertTrue(self._simple_ext_fork_name in submods) - submod = submods[self._simple_ext_fork_name] - self.assertTrue('hash' in submod) - self.assertEqual(submod['hash'], self._fork_hash_check) - self.assertTrue('status' in submod) - self.assertEqual(submod['status'], simple_ext_fork_status) - self.assertTrue('tag' in submod) - self.assertEqual(submod['tag'], simple_ext_fork_tag) - self.idempotence_check(checkout_dir) - - def test_submodule_checkout_none(self): - """Verify that a git repo with submodule is properly checked out - This test is for when 'externals=None' is in parent repo's - externals cfg file. - Correct behavior is the submodle is not checked out. - """ - self.write_externals_config(branch_name=self._bare_branch_name, - sub_externals="none") - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - cwd = os.getcwd() - checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir) - fork_file = os.path.join(checkout_dir, - self._simple_ext_fork_name, "readme.txt") - self.assertFalse(os.path.exists(fork_file)) - os.chdir(cwd) - self.idempotence_check(checkout_dir) - - def test_submodule_checkout_config(self): # pylint: disable=too-many-locals - """Verify that a git repo with submodule is properly checked out - This test if for when the 'from_submodule' keyword is used in the - parent repo. - Correct behavior is that the submodule is checked out using - normal git submodule behavior. - """ - tag_check = None # Not checked out as submodule - status_check = "-" # Not checked out as submodule - self.write_externals_config(branch_name=self._config_branch_name, - sub_externals=self._container_extern_name) - self.execute_checkout_in_dir(self._my_test_dir, - self.checkout_args) - cwd = os.getcwd() - checkout_dir = os.path.join(self._my_test_dir, self._checkout_dir) - fork_file = os.path.join(checkout_dir, - self._simple_ext_fork_name, "readme.txt") - self.assertTrue(os.path.exists(fork_file)) - os.chdir(checkout_dir) - # Check submodule status - submods = git_submodule_status(checkout_dir) - self.assertEqual(len(submods.keys()), 2) - self.assertTrue(self._simple_ext_fork_name in submods) - submod = submods[self._simple_ext_fork_name] - self.assertTrue('hash' in submod) - self.assertEqual(submod['hash'], self._fork_hash_check) - self.assertTrue('status' in submod) - self.assertEqual(submod['status'], status_check) - self.assertTrue('tag' in submod) - self.assertEqual(submod['tag'], tag_check) - self.assertTrue(self._simple_ext_name in submods) - submod = submods[self._simple_ext_name] - self.assertTrue('hash' in submod) - self.assertEqual(submod['hash'], self._simple_hash_check) - self.assertTrue('status' in submod) - self.assertEqual(submod['status'], status_check) - self.assertTrue('tag' in submod) - self.assertEqual(submod['tag'], tag_check) - # Check fork repo status - os.chdir(self._simple_ext_fork_name) - self.assertEqual(self.get_git_hash(), self._fork_hash_check) - os.chdir(checkout_dir) - os.chdir(self._simple_ext_name) - hash_check = self.get_git_hash('origin/feature3') - self.assertEqual(self.get_git_hash(), hash_check) - os.chdir(cwd) - self.idempotence_check(checkout_dir) - -class TestSysCheckoutErrors(BaseTestSysCheckout): - """Run systems level tests of error conditions in checkout_externals - - Error conditions - these tests are designed to trigger specific - error conditions and ensure that they are being handled as - runtime errors (and hopefully usefull error messages) instead of - the default internal message that won't mean anything to the - user, e.g. key error, called process error, etc. - - These are not 'expected failures'. They are pass when a - RuntimeError is raised, fail if any other error is raised (or no - error is raised). - - """ - - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. - # pylint: disable=invalid-name - - def test_error_unknown_protocol(self): - """Verify that a runtime error is raised when the user specified repo - protocol is not known. - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_protocol(cloned_repo_dir, BRANCH_SECTION, - 'this-protocol-does-not-exist') - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_switch_protocol(self): - """Verify that a runtime error is raised when the user switches - protocols, git to svn. - - TODO(bja, 2017-11) This correctly results in an error, but it - isn't a helpful error message. - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_protocol(cloned_repo_dir, BRANCH_SECTION, 'svn') - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_unknown_tag(self): - """Verify that a runtime error is raised when the user specified tag - does not exist. - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='this-tag-does-not-exist', - new_remote_repo_path=SIMPLE_REPO) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_overspecify_tag_branch(self): - """Verify that a runtime error is raised when the user specified both - tag and a branch - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_with_tag_and_remote_repo(cloned_repo_dir, BRANCH_SECTION, - tag='this-tag-does-not-exist', - new_remote_repo_path=SIMPLE_REPO, - remove_branch=False) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_underspecify_tag_branch(self): - """Verify that a runtime error is raised when the user specified - neither a tag or a branch - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_without_branch_tag(cloned_repo_dir, BRANCH_SECTION) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - def test_error_missing_url(self): - """Verify that a runtime error is raised when the user specified - neither a tag or a branch - - """ - # create repo - cloned_repo_dir = self.clone_test_repo(CONTAINER_REPO) - self._generator.create_config() - self._generator.create_section(SIMPLE_REPO, BRANCH_SECTION, - branch=REMOTE_BRANCH_FEATURE2) - self._generator.write_config(cloned_repo_dir) - - # update the config file to point to a different remote with - # the tag instead of branch. Tag MUST NOT be in the original - # repo! - self._generator.write_without_repo_url(cloned_repo_dir, - BRANCH_SECTION) - - with self.assertRaises(RuntimeError): - self.execute_checkout_in_dir(cloned_repo_dir, self.checkout_args) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_sys_repository_git.py b/manage_externals/test/test_sys_repository_git.py deleted file mode 100644 index 7e5fb5020d..0000000000 --- a/manage_externals/test/test_sys_repository_git.py +++ /dev/null @@ -1,238 +0,0 @@ -#!/usr/bin/env python3 - -"""Tests of some of the functionality in repository_git.py that actually -interacts with git repositories. - -We're calling these "system" tests because we expect them to be a lot -slower than most of the unit tests. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import shutil -import tempfile -import unittest - -from manic.repository_git import GitRepository -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.utils import execute_subprocess - -# NOTE(wjs, 2018-04-09) I find a mix of camel case and underscores to be -# more readable for unit test names, so I'm disabling pylint's naming -# convention check -# pylint: disable=C0103 - -# Allow access to protected members -# pylint: disable=W0212 - - -class GitTestCase(unittest.TestCase): - """Adds some git-specific unit test functionality on top of TestCase""" - - def assertIsHash(self, maybe_hash): - """Assert that the string given by maybe_hash really does look - like a git hash. - """ - - # Ensure it is non-empty - self.assertTrue(maybe_hash, msg="maybe_hash is empty") - - # Ensure it has a single string - self.assertEqual(1, len(maybe_hash.split()), - msg="maybe_hash has multiple strings: {}".format(maybe_hash)) - - # Ensure that the only characters in the string are ones allowed - # in hashes - allowed_chars_set = set('0123456789abcdef') - self.assertTrue(set(maybe_hash) <= allowed_chars_set, - msg="maybe_hash has non-hash characters: {}".format(maybe_hash)) - - -class TestGitTestCase(GitTestCase): - """Tests GitTestCase""" - - def test_assertIsHash_true(self): - """Ensure that assertIsHash passes for something that looks - like a hash""" - self.assertIsHash('abc123') - - def test_assertIsHash_empty(self): - """Ensure that assertIsHash raises an AssertionError for an - empty string""" - with self.assertRaises(AssertionError): - self.assertIsHash('') - - def test_assertIsHash_multipleStrings(self): - """Ensure that assertIsHash raises an AssertionError when - given multiple strings""" - with self.assertRaises(AssertionError): - self.assertIsHash('abc123 def456') - - def test_assertIsHash_badChar(self): - """Ensure that assertIsHash raises an AssertionError when given a - string that has a character that doesn't belong in a hash - """ - with self.assertRaises(AssertionError): - self.assertIsHash('abc123g') - - -class TestGitRepositoryGitCommands(GitTestCase): - """Test some git commands in RepositoryGit - - It's silly that we need to create a repository in order to test - these git commands. Much or all of the git functionality that is - currently in repository_git.py should eventually be moved to a - separate module that is solely responsible for wrapping git - commands; that would allow us to test it independently of this - repository class. - """ - - # ======================================================================== - # Test helper functions - # ======================================================================== - - def setUp(self): - # directory we want to return to after the test system and - # checkout_externals are done cd'ing all over the place. - self._return_dir = os.getcwd() - - self._tmpdir = tempfile.mkdtemp() - os.chdir(self._tmpdir) - - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: - 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: '', - ExternalsDescription.REPO: rdata, - }, - } - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - def tearDown(self): - # return to our common starting point - os.chdir(self._return_dir) - - shutil.rmtree(self._tmpdir, ignore_errors=True) - - @staticmethod - def make_cwd_git_repo(): - """Turn the current directory into an empty git repository""" - execute_subprocess(['git', 'init']) - - @staticmethod - def add_cwd_git_commit(): - """Add a git commit in the current directory""" - with open('README', 'a') as myfile: - myfile.write('more info') - execute_subprocess(['git', 'add', 'README']) - execute_subprocess(['git', 'commit', '-m', 'my commit message']) - - @staticmethod - def checkout_cwd_git_branch(branchname): - """Checkout a new branch in the current directory""" - execute_subprocess(['git', 'checkout', '-b', branchname]) - - @staticmethod - def make_cwd_git_tag(tagname): - """Make a lightweight tag at the current commit""" - execute_subprocess(['git', 'tag', '-m', 'making a tag', tagname]) - - @staticmethod - def checkout_cwd_ref(refname): - """Checkout the given refname in the current directory""" - execute_subprocess(['git', 'checkout', refname]) - - # ======================================================================== - # Begin actual tests - # ======================================================================== - - def test_currentHash_returnsHash(self): - """Ensure that the _git_current_hash function returns a hash""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - hash_found, myhash = self._repo._git_current_hash(os.getcwd()) - self.assertTrue(hash_found) - self.assertIsHash(myhash) - - def test_currentHash_outsideGitRepo(self): - """Ensure that the _git_current_hash function returns False when - outside a git repository""" - hash_found, myhash = self._repo._git_current_hash(os.getcwd()) - self.assertFalse(hash_found) - self.assertEqual('', myhash) - - def test_currentBranch_onBranch(self): - """Ensure that the _git_current_branch function returns the name - of the branch""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.checkout_cwd_git_branch('foo') - branch_found, mybranch = self._repo._git_current_branch(os.getcwd()) - self.assertTrue(branch_found) - self.assertEqual('foo', mybranch) - - def test_currentBranch_notOnBranch(self): - """Ensure that the _git_current_branch function returns False - when not on a branch""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.make_cwd_git_tag('mytag') - self.checkout_cwd_ref('mytag') - branch_found, mybranch = self._repo._git_current_branch(os.getcwd()) - self.assertFalse(branch_found) - self.assertEqual('', mybranch) - - def test_currentBranch_outsideGitRepo(self): - """Ensure that the _git_current_branch function returns False - when outside a git repository""" - branch_found, mybranch = self._repo._git_current_branch(os.getcwd()) - self.assertFalse(branch_found) - self.assertEqual('', mybranch) - - def test_currentTag_onTag(self): - """Ensure that the _git_current_tag function returns the name of - the tag""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.make_cwd_git_tag('some_tag') - tag_found, mytag = self._repo._git_current_tag(os.getcwd()) - self.assertTrue(tag_found) - self.assertEqual('some_tag', mytag) - - def test_currentTag_notOnTag(self): - """Ensure tha the _git_current_tag function returns False when - not on a tag""" - self.make_cwd_git_repo() - self.add_cwd_git_commit() - self.make_cwd_git_tag('some_tag') - self.add_cwd_git_commit() - tag_found, mytag = self._repo._git_current_tag(os.getcwd()) - self.assertFalse(tag_found) - self.assertEqual('', mytag) - - def test_currentTag_outsideGitRepo(self): - """Ensure that the _git_current_tag function returns False when - outside a git repository""" - tag_found, mytag = self._repo._git_current_tag(os.getcwd()) - self.assertFalse(tag_found) - self.assertEqual('', mytag) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_externals_description.py b/manage_externals/test/test_unit_externals_description.py deleted file mode 100644 index 30e5288499..0000000000 --- a/manage_externals/test/test_unit_externals_description.py +++ /dev/null @@ -1,478 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import os.path -import shutil -import unittest - -try: - # python2 - from ConfigParser import SafeConfigParser as config_parser - - def config_string_cleaner(text): - """convert strings into unicode - """ - return text.decode('utf-8') -except ImportError: - # python3 - from configparser import ConfigParser as config_parser - - def config_string_cleaner(text): - """Python3 already uses unicode strings, so just return the string - without modification. - - """ - return text - -from manic.externals_description import DESCRIPTION_SECTION, VERSION_ITEM -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.externals_description import ExternalsDescriptionConfigV1 -from manic.externals_description import get_cfg_schema_version -from manic.externals_description import read_externals_description_file -from manic.externals_description import create_externals_description - -from manic.global_constants import EMPTY_STR - - -class TestCfgSchemaVersion(unittest.TestCase): - """Test that schema identification for the externals description - returns the correct results. - - """ - - def setUp(self): - """Reusable config object - """ - self._config = config_parser() - self._config.add_section('section1') - self._config.set('section1', 'keword', 'value') - - self._config.add_section(DESCRIPTION_SECTION) - - def test_schema_version_valid(self): - """Test that schema identification returns the correct version for a - valid tag. - - """ - version_str = '2.1.3' - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, version_str) - major, minor, patch = get_cfg_schema_version(self._config) - expected_major = 2 - expected_minor = 1 - expected_patch = 3 - self.assertEqual(expected_major, major) - self.assertEqual(expected_minor, minor) - self.assertEqual(expected_patch, patch) - - def test_schema_section_missing(self): - """Test that an error is returned if the schema section is missing - from the input file. - - """ - self._config.remove_section(DESCRIPTION_SECTION) - with self.assertRaises(RuntimeError): - get_cfg_schema_version(self._config) - - def test_schema_version_missing(self): - """Test that a externals description file without a version raises a - runtime error. - - """ - # Note: the default setup method shouldn't include a version - # keyword, but remove it just to be future proof.... - self._config.remove_option(DESCRIPTION_SECTION, VERSION_ITEM) - with self.assertRaises(RuntimeError): - get_cfg_schema_version(self._config) - - def test_schema_version_not_int(self): - """Test that a externals description file a version that doesn't - decompose to integer major, minor and patch versions raises - runtime error. - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, 'unknown') - with self.assertRaises(RuntimeError): - get_cfg_schema_version(self._config) - - -class TestModelDescritionConfigV1(unittest.TestCase): - """Test that parsing config/ini fileproduces a correct dictionary - for the externals description. - - """ - # pylint: disable=R0902 - - def setUp(self): - """Boiler plate construction of string containing xml for multiple components. - """ - self._comp1_name = 'comp1' - self._comp1_path = 'path/to/comp1' - self._comp1_protocol = 'svn' - self._comp1_url = 'https://svn.somewhere.com/path/of/comp1' - self._comp1_tag = 'a_nice_tag_v1' - self._comp1_is_required = 'True' - self._comp1_externals = '' - - self._comp2_name = 'comp2' - self._comp2_path = 'path/to/comp2' - self._comp2_protocol = 'git' - self._comp2_url = '/local/clone/of/comp2' - self._comp2_branch = 'a_very_nice_branch' - self._comp2_is_required = 'False' - self._comp2_externals = 'path/to/comp2.cfg' - - def _setup_comp1(self, config): - """Boiler plate construction of xml string for componet 1 - """ - config.add_section(self._comp1_name) - config.set(self._comp1_name, 'local_path', self._comp1_path) - config.set(self._comp1_name, 'protocol', self._comp1_protocol) - config.set(self._comp1_name, 'repo_url', self._comp1_url) - config.set(self._comp1_name, 'tag', self._comp1_tag) - config.set(self._comp1_name, 'required', self._comp1_is_required) - - def _setup_comp2(self, config): - """Boiler plate construction of xml string for componet 2 - """ - config.add_section(self._comp2_name) - config.set(self._comp2_name, 'local_path', self._comp2_path) - config.set(self._comp2_name, 'protocol', self._comp2_protocol) - config.set(self._comp2_name, 'repo_url', self._comp2_url) - config.set(self._comp2_name, 'branch', self._comp2_branch) - config.set(self._comp2_name, 'required', self._comp2_is_required) - config.set(self._comp2_name, 'externals', self._comp2_externals) - - @staticmethod - def _setup_externals_description(config): - """Add the required exernals description section - """ - - config.add_section(DESCRIPTION_SECTION) - config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.1') - - def _check_comp1(self, model): - """Test that component one was constructed correctly. - """ - self.assertTrue(self._comp1_name in model) - comp1 = model[self._comp1_name] - self.assertEqual(comp1[ExternalsDescription.PATH], self._comp1_path) - self.assertTrue(comp1[ExternalsDescription.REQUIRED]) - repo = comp1[ExternalsDescription.REPO] - self.assertEqual(repo[ExternalsDescription.PROTOCOL], - self._comp1_protocol) - self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp1_url) - self.assertEqual(repo[ExternalsDescription.TAG], self._comp1_tag) - self.assertEqual(EMPTY_STR, comp1[ExternalsDescription.EXTERNALS]) - - def _check_comp2(self, model): - """Test that component two was constucted correctly. - """ - self.assertTrue(self._comp2_name in model) - comp2 = model[self._comp2_name] - self.assertEqual(comp2[ExternalsDescription.PATH], self._comp2_path) - self.assertFalse(comp2[ExternalsDescription.REQUIRED]) - repo = comp2[ExternalsDescription.REPO] - self.assertEqual(repo[ExternalsDescription.PROTOCOL], - self._comp2_protocol) - self.assertEqual(repo[ExternalsDescription.REPO_URL], self._comp2_url) - self.assertEqual(repo[ExternalsDescription.BRANCH], self._comp2_branch) - self.assertEqual(self._comp2_externals, - comp2[ExternalsDescription.EXTERNALS]) - - def test_one_tag_required(self): - """Test that a component source with a tag is correctly parsed. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - model = ExternalsDescriptionConfigV1(config) - print(model) - self._check_comp1(model) - - def test_one_branch_externals(self): - """Test that a component source with a branch is correctly parsed. - """ - config = config_parser() - self._setup_comp2(config) - self._setup_externals_description(config) - model = ExternalsDescriptionConfigV1(config) - print(model) - self._check_comp2(model) - - def test_two_sources(self): - """Test that multiple component sources are correctly parsed. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_comp2(config) - self._setup_externals_description(config) - model = ExternalsDescriptionConfigV1(config) - print(model) - self._check_comp1(model) - self._check_comp2(model) - - def test_cfg_v1_reject_unknown_item(self): - """Test that a v1 description object will reject unknown items - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - config.set(self._comp1_name, 'junk', 'foobar') - with self.assertRaises(RuntimeError): - ExternalsDescriptionConfigV1(config) - - def test_cfg_v1_reject_v2(self): - """Test that a v1 description object won't try to parse a v2 file. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - config.set(DESCRIPTION_SECTION, VERSION_ITEM, '2.0.1') - with self.assertRaises(RuntimeError): - ExternalsDescriptionConfigV1(config) - - def test_cfg_v1_reject_v1_too_new(self): - """Test that a v1 description object won't try to parse a v2 file. - """ - config = config_parser() - self._setup_comp1(config) - self._setup_externals_description(config) - config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.100.0') - with self.assertRaises(RuntimeError): - ExternalsDescriptionConfigV1(config) - - -class TestReadExternalsDescription(unittest.TestCase): - """Test the application logic of read_externals_description_file - """ - TMP_FAKE_DIR = 'fake' - - def setUp(self): - """Setup directory for tests - """ - if not os.path.exists(self.TMP_FAKE_DIR): - os.makedirs(self.TMP_FAKE_DIR) - - def tearDown(self): - """Cleanup tmp stuff on the file system - """ - if os.path.exists(self.TMP_FAKE_DIR): - shutil.rmtree(self.TMP_FAKE_DIR) - - def test_no_file_error(self): - """Test that a runtime error is raised when the file does not exist - - """ - root_dir = os.getcwd() - filename = 'this-file-should-not-exist' - with self.assertRaises(RuntimeError): - read_externals_description_file(root_dir, filename) - - def test_no_dir_error(self): - """Test that a runtime error is raised when the file does not exist - - """ - root_dir = '/path/to/some/repo' - filename = 'externals.cfg' - with self.assertRaises(RuntimeError): - read_externals_description_file(root_dir, filename) - - def test_no_invalid_error(self): - """Test that a runtime error is raised when the file format is invalid - - """ - root_dir = os.getcwd() - filename = 'externals.cfg' - file_path = os.path.join(root_dir, filename) - file_path = os.path.abspath(file_path) - contents = """ - -invalid file format -""" - with open(file_path, 'w') as fhandle: - fhandle.write(contents) - with self.assertRaises(RuntimeError): - read_externals_description_file(root_dir, filename) - os.remove(file_path) - - -class TestCreateExternalsDescription(unittest.TestCase): - """Test the application logic of creat_externals_description - """ - - def setUp(self): - """Create config object used as basis for all tests - """ - self._config = config_parser() - self._gmconfig = config_parser() - self.setup_config() - - def setup_config(self): - """Boiler plate construction of xml string for componet 1 - """ - # Create a standard externals config with a single external - name = 'test' - self._config.add_section(name) - self._config.set(name, ExternalsDescription.PATH, 'externals') - self._config.set(name, ExternalsDescription.PROTOCOL, 'git') - self._config.set(name, ExternalsDescription.REPO_URL, '/path/to/repo') - self._config.set(name, ExternalsDescription.TAG, 'test_tag') - self._config.set(name, ExternalsDescription.REQUIRED, 'True') - - self._config.add_section(DESCRIPTION_SECTION) - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.0') - - # Create a .gitmodules test - name = 'submodule "gitmodules_test"' - self._gmconfig.add_section(name) - self._gmconfig.set(name, "path", 'externals/test') - self._gmconfig.set(name, "url", '/path/to/repo') - # NOTE(goldy, 2019-03) Should test other possible keywords such as - # fetchRecurseSubmodules, ignore, and shallow - - @staticmethod - def setup_dict_config(): - """Create the full container dictionary with simple and mixed use - externals - - """ - rdatat = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: 'simple-ext.git', - ExternalsDescription.TAG: 'tag1'} - rdatab = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: 'simple-ext.git', - ExternalsDescription.BRANCH: 'feature2'} - rdatam = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: 'mixed-cont-ext.git', - ExternalsDescription.BRANCH: 'master'} - desc = {'simp_tag': {ExternalsDescription.REQUIRED: True, - ExternalsDescription.PATH: 'simp_tag', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdatat}, - 'simp_branch' : {ExternalsDescription.REQUIRED: True, - ExternalsDescription.PATH: 'simp_branch', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdatab}, - 'simp_opt': {ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'simp_opt', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdatat}, - 'mixed_req': {ExternalsDescription.REQUIRED: True, - ExternalsDescription.PATH: 'mixed_req', - ExternalsDescription.EXTERNALS: 'sub-ext.cfg', - ExternalsDescription.REPO: rdatam}} - - return desc - - def test_cfg_v1_ok(self): - """Test that a correct cfg v1 object is created by create_externals_description - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '1.0.3') - ext = create_externals_description(self._config, model_format='cfg') - self.assertIsInstance(ext, ExternalsDescriptionConfigV1) - - def test_cfg_v1_unknown_version(self): - """Test that a config file with unknown schema version is rejected by - create_externals_description. - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '100.0.3') - with self.assertRaises(RuntimeError): - create_externals_description(self._config, model_format='cfg') - - def test_dict(self): - """Test that a correct cfg v1 object is created by create_externals_description - - """ - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: '/path/to/repo', - ExternalsDescription.TAG: 'tagv1', - } - - desc = { - 'test': { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: '../fake', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, }, - } - - ext = create_externals_description(desc, model_format='dict') - self.assertIsInstance(ext, ExternalsDescriptionDict) - - def test_cfg_component_dict(self): - """Verify that create_externals_description works with a dictionary - """ - # create the top level externals file - desc = self.setup_dict_config() - # Check external with all repos - external = create_externals_description(desc, model_format='dict') - self.assertIsInstance(external, ExternalsDescriptionDict) - self.assertTrue('simp_tag' in external) - self.assertTrue('simp_branch' in external) - self.assertTrue('simp_opt' in external) - self.assertTrue('mixed_req' in external) - - def test_cfg_exclude_component_dict(self): - """Verify that exclude component checkout works with a dictionary - """ - # create the top level externals file - desc = self.setup_dict_config() - # Test an excluded repo - external = create_externals_description(desc, model_format='dict', - exclude=['simp_tag', - 'simp_opt']) - self.assertIsInstance(external, ExternalsDescriptionDict) - self.assertFalse('simp_tag' in external) - self.assertTrue('simp_branch' in external) - self.assertFalse('simp_opt' in external) - self.assertTrue('mixed_req' in external) - - def test_cfg_opt_component_dict(self): - """Verify that exclude component checkout works with a dictionary - """ - # create the top level externals file - desc = self.setup_dict_config() - # Test an excluded repo - external = create_externals_description(desc, model_format='dict', - components=['simp_tag', - 'simp_opt']) - self.assertIsInstance(external, ExternalsDescriptionDict) - self.assertTrue('simp_tag' in external) - self.assertFalse('simp_branch' in external) - self.assertTrue('simp_opt' in external) - self.assertFalse('mixed_req' in external) - - def test_cfg_unknown_version(self): - """Test that a runtime error is raised when an unknown file version is - received - - """ - self._config.set(DESCRIPTION_SECTION, VERSION_ITEM, '123.456.789') - with self.assertRaises(RuntimeError): - create_externals_description(self._config, model_format='cfg') - - def test_cfg_unknown_format(self): - """Test that a runtime error is raised when an unknown format string is - received - - """ - with self.assertRaises(RuntimeError): - create_externals_description(self._config, model_format='unknown') - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_externals_status.py b/manage_externals/test/test_unit_externals_status.py deleted file mode 100644 index f019514e9e..0000000000 --- a/manage_externals/test/test_unit_externals_status.py +++ /dev/null @@ -1,299 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for the manic external status reporting module. - -Note: this script assumes the path to the manic package is already in -the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import unittest - -from manic.externals_status import ExternalStatus - - -class TestStatusObject(unittest.TestCase): - """Verify that the Status object behaives as expected. - """ - - def test_exists_empty_all(self): - """If the repository sync-state is empty (doesn't exist), and there is no - clean state, then it is considered not to exist. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.EMPTY - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertFalse(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertFalse(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertFalse(exists) - - # this state represtens an internal logic error in how the - # repo status was determined. - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - # this state represtens an internal logic error in how the - # repo status was determined. - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_default_all(self): - """If the repository sync-state is default, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.DEFAULT - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_unknown_all(self): - """If the repository sync-state is unknown, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_modified_all(self): - """If the repository sync-state is modified, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.MODEL_MODIFIED - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_exists_ok_all(self): - """If the repository sync-state is ok, then it is considered to exist - regardless of clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.STATUS_OK - stat.clean_state = ExternalStatus.DEFAULT - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.EMPTY - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.UNKNOWN - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.STATUS_OK - exists = stat.exists() - self.assertTrue(exists) - - stat.clean_state = ExternalStatus.DIRTY - exists = stat.exists() - self.assertTrue(exists) - - def test_update_ok_all(self): - """If the repository in-sync is ok, then it is safe to - update only if clean state is ok - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.STATUS_OK - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertTrue(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_modified_all(self): - """If the repository in-sync is modified, then it is safe to - update only if clean state is ok - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.MODEL_MODIFIED - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertTrue(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_unknown_all(self): - """If the repository in-sync is unknown, then it is not safe to - update, regardless of the clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_default_all(self): - """If the repository in-sync is default, then it is not safe to - update, regardless of the clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - def test_update_empty_all(self): - """If the repository in-sync is empty, then it is not safe to - update, regardless of the clean state. - - """ - stat = ExternalStatus() - stat.sync_state = ExternalStatus.UNKNOWN - stat.clean_state = ExternalStatus.DEFAULT - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.EMPTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.UNKNOWN - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.STATUS_OK - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - stat.clean_state = ExternalStatus.DIRTY - safe_to_update = stat.safe_to_update() - self.assertFalse(safe_to_update) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_repository.py b/manage_externals/test/test_unit_repository.py deleted file mode 100644 index 1b93861834..0000000000 --- a/manage_externals/test/test_unit_repository.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import unittest - -from manic.repository_factory import create_repository -from manic.repository_git import GitRepository -from manic.repository_svn import SvnRepository -from manic.repository import Repository -from manic.externals_description import ExternalsDescription -from manic.global_constants import EMPTY_STR - - -class TestCreateRepositoryDict(unittest.TestCase): - """Test the create_repository functionality to ensure it returns the - propper type of repository and errors for unknown repository - types. - - """ - - def setUp(self): - """Common data needed for all tests in this class - """ - self._name = 'test_name' - self._repo = {ExternalsDescription.PROTOCOL: None, - ExternalsDescription.REPO_URL: 'junk_root', - ExternalsDescription.TAG: 'junk_tag', - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - - def test_create_repo_git(self): - """Verify that several possible names for the 'git' protocol - create git repository objects. - - """ - protocols = ['git', 'GIT', 'Git', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - repo = create_repository(self._name, self._repo) - self.assertIsInstance(repo, GitRepository) - - def test_create_repo_svn(self): - """Verify that several possible names for the 'svn' protocol - create svn repository objects. - """ - protocols = ['svn', 'SVN', 'Svn', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - repo = create_repository(self._name, self._repo) - self.assertIsInstance(repo, SvnRepository) - - def test_create_repo_externals_only(self): - """Verify that an externals only repo returns None. - """ - protocols = ['externals_only', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - repo = create_repository(self._name, self._repo) - self.assertEqual(None, repo) - - def test_create_repo_unsupported(self): - """Verify that an unsupported protocol generates a runtime error. - """ - protocols = ['not_a_supported_protocol', ] - for protocol in protocols: - self._repo[ExternalsDescription.PROTOCOL] = protocol - with self.assertRaises(RuntimeError): - create_repository(self._name, self._repo) - - -class TestRepository(unittest.TestCase): - """Test the externals description processing used to create the Repository - base class shared by protocol specific repository classes. - - """ - - def test_tag(self): - """Test creation of a repository object with a tag - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - tag = 'test_tag' - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.TAG: tag, - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - repo = Repository(name, repo_info) - print(repo.__dict__) - self.assertEqual(repo.tag(), tag) - self.assertEqual(repo.url(), url) - - def test_branch(self): - """Test creation of a repository object with a branch - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = 'test_branch' - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - repo = Repository(name, repo_info) - print(repo.__dict__) - self.assertEqual(repo.branch(), branch) - self.assertEqual(repo.url(), url) - - def test_hash(self): - """Test creation of a repository object with a hash - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - ref = 'deadc0de' - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.TAG: EMPTY_STR, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - repo = Repository(name, repo_info) - print(repo.__dict__) - self.assertEqual(repo.hash(), ref) - self.assertEqual(repo.url(), url) - - def test_tag_branch(self): - """Test creation of a repository object with a tag and branch raises a - runtimer error. - - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = 'test_branch' - tag = 'test_tag' - ref = EMPTY_STR - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: tag, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - with self.assertRaises(RuntimeError): - Repository(name, repo_info) - - def test_tag_branch_hash(self): - """Test creation of a repository object with a tag, branch and hash raises a - runtimer error. - - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = 'test_branch' - tag = 'test_tag' - ref = 'deadc0de' - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: tag, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - with self.assertRaises(RuntimeError): - Repository(name, repo_info) - - def test_no_tag_no_branch(self): - """Test creation of a repository object without a tag or branch raises a - runtimer error. - - """ - name = 'test_repo' - protocol = 'test_protocol' - url = 'test_url' - branch = EMPTY_STR - tag = EMPTY_STR - ref = EMPTY_STR - sparse = EMPTY_STR - repo_info = {ExternalsDescription.PROTOCOL: protocol, - ExternalsDescription.REPO_URL: url, - ExternalsDescription.BRANCH: branch, - ExternalsDescription.TAG: tag, - ExternalsDescription.HASH: ref, - ExternalsDescription.SPARSE: sparse, } - with self.assertRaises(RuntimeError): - Repository(name, repo_info) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_repository_git.py b/manage_externals/test/test_unit_repository_git.py deleted file mode 100644 index 1c01098acf..0000000000 --- a/manage_externals/test/test_unit_repository_git.py +++ /dev/null @@ -1,811 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" -# pylint: disable=too-many-lines,protected-access - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import shutil -import unittest - -from manic.repository_git import GitRepository -from manic.externals_status import ExternalStatus -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.global_constants import EMPTY_STR - -# NOTE(bja, 2017-11) order is important here. origin should be a -# subset of other to trap errors on processing remotes! -GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM = ''' -upstream /path/to/other/repo (fetch) -upstream /path/to/other/repo (push) -other /path/to/local/repo2 (fetch) -other /path/to/local/repo2 (push) -origin /path/to/local/repo (fetch) -origin /path/to/local/repo (push) -''' - - -class TestGitRepositoryCurrentRef(unittest.TestCase): - """test the current_ref command on a git repository - """ - - def setUp(self): - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: - 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - # - # mock methods replacing git system calls - # - @staticmethod - def _git_current_branch(branch_found, branch_name): - """Return a function that takes the place of - repo._git_current_branch, which returns the given output.""" - def my_git_current_branch(dirname): - """mock function that can take the place of repo._git_current_branch""" - return branch_found, branch_name - return my_git_current_branch - - @staticmethod - def _git_current_tag(tag_found, tag_name): - """Return a function that takes the place of - repo._git_current_tag, which returns the given output.""" - def my_git_current_tag(dirname): - """mock function that can take the place of repo._git_current_tag""" - return tag_found, tag_name - return my_git_current_tag - - @staticmethod - def _git_current_hash(hash_found, hash_name): - """Return a function that takes the place of - repo._git_current_hash, which returns the given output.""" - def my_git_current_hash(dirname): - """mock function that can take the place of repo._git_current_hash""" - return hash_found, hash_name - return my_git_current_hash - - # ------------------------------------------------------------------------ - # Begin tests - # ------------------------------------------------------------------------ - - def test_ref_branch(self): - """Test that we correctly identify we are on a branch - """ - self._repo._git_current_branch = self._git_current_branch( - True, 'feature3') - self._repo._git_current_tag = self._git_current_tag(True, 'foo_tag') - self._repo._git_current_hash = self._git_current_hash(True, 'abc123') - expected = 'foo_tag (branch feature3)' - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, expected) - - def test_ref_detached_tag(self): - """Test that we correctly identify that the ref is detached at a tag - """ - self._repo._git_current_branch = self._git_current_branch(False, '') - self._repo._git_current_tag = self._git_current_tag(True, 'foo_tag') - self._repo._git_current_hash = self._git_current_hash(True, 'abc123') - expected = 'foo_tag' - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, expected) - - def test_ref_detached_hash(self): - """Test that we can identify ref is detached at a hash - - """ - self._repo._git_current_branch = self._git_current_branch(False, '') - self._repo._git_current_tag = self._git_current_tag(False, '') - self._repo._git_current_hash = self._git_current_hash(True, 'abc123') - expected = 'abc123' - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, expected) - - def test_ref_none(self): - """Test that we correctly identify that we're not in a git repo. - """ - self._repo._git_current_branch = self._git_current_branch(False, '') - self._repo._git_current_tag = self._git_current_tag(False, '') - self._repo._git_current_hash = self._git_current_hash(False, '') - result = self._repo._current_ref(os.getcwd()) - self.assertEqual(result, EMPTY_STR) - - -class TestGitRepositoryCheckSync(unittest.TestCase): - """Test whether the GitRepository _check_sync_logic functionality is - correct. - - Note: there are a lot of combinations of state: - - - external description - tag, branch - - - working copy - - doesn't exist (not checked out) - - exists, no git info - incorrect protocol, e.g. svn, or tarball? - - exists, git info - - as expected: - - different from expected: - - detached tag, - - detached hash, - - detached branch (compare remote and branch), - - tracking branch (compare remote and branch), - - same remote - - different remote - - untracked branch - - Test list: - - doesn't exist - - exists no git info - - - num_external * (working copy expected + num_working copy different) - - total tests = 16 - - """ - - # NOTE(bja, 2017-11) pylint complains about long method names, but - # it is hard to differentiate tests without making them more - # cryptic. Also complains about too many public methods, but it - # doesn't really make sense to break this up. - # pylint: disable=invalid-name,too-many-public-methods - - TMP_FAKE_DIR = 'fake' - TMP_FAKE_GIT_DIR = os.path.join(TMP_FAKE_DIR, '.git') - - def setUp(self): - """Setup reusable git repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: self.TMP_FAKE_DIR, - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - # The unit tests here don't care about the result of - # _current_ref, but we replace it here so that we don't need to - # worry about calling a possibly slow and possibly - # error-producing command (since _current_ref calls various git - # functions): - self._repo._current_ref = self._current_ref_empty - self._create_tmp_git_dir() - - # We have to override this class method rather than the self._repo - # instance method because it is called via - # GitRepository._remote_name_for_url, which is itself a @classmethod - # calls cls._git_remote_verbose(). - self._orignal_git_remote_verbose = GitRepository._git_remote_verbose - GitRepository._git_remote_verbose = self._git_remote_origin_upstream - def tearDown(self): - """Cleanup tmp stuff on the file system - """ - self._remove_tmp_git_dir() - - GitRepository._git_remote_verbose = self._orignal_git_remote_verbose - - def _create_tmp_git_dir(self): - """Create a temporary fake git directory for testing purposes. - """ - if not os.path.exists(self.TMP_FAKE_GIT_DIR): - os.makedirs(self.TMP_FAKE_GIT_DIR) - - def _remove_tmp_git_dir(self): - """Remove the temporary fake git directory - """ - if os.path.exists(self.TMP_FAKE_DIR): - shutil.rmtree(self.TMP_FAKE_DIR) - - # - # mock methods replacing git system calls - # - @staticmethod - def _current_ref_empty(dirname): - """Return an empty string. - - Drop-in for GitRepository._current_ref - """ - return EMPTY_STR - - @staticmethod - def _git_remote_origin_upstream(dirname): - """Return an info string that is a checkout hash. - - Drop-in for GitRepository._git_remote_verbose. - """ - return GIT_REMOTE_OUTPUT_ORIGIN_UPSTREAM - - @staticmethod - def _git_current_hash(myhash): - """Return a function that takes the place of repo._git_current_hash, - which returns the given hash - """ - def my_git_current_hash(dirname): - """mock function that can take the place of repo._git_current_hash""" - return 0, myhash - return my_git_current_hash - - def _git_revparse_commit(self, expected_ref, mystatus, myhash): - """Return a function that takes the place of - repo._git_revparse_commit, which returns a tuple: - (mystatus, myhash). - - Expects the passed-in ref to equal expected_ref - - status = 0 implies success, non-zero implies failure - """ - def my_git_revparse_commit(ref, dirname): - """mock function that can take the place of repo._git_revparse_commit""" - self.assertEqual(expected_ref, ref) - return mystatus, myhash - return my_git_revparse_commit - - # ---------------------------------------------------------------- - # - # Tests where working copy doesn't exist or is invalid - # - # ---------------------------------------------------------------- - def test_sync_dir_not_exist(self): - """Test that a directory that doesn't exist returns an error status - - Note: the Repository classes should be prevented from ever - working on an empty directory by the _Source object. - - """ - stat = ExternalStatus() - self._repo._check_sync(stat, 'invalid_directory_name') - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_dir_exist_no_git_info(self): - """Test that a non-existent git repo returns an unknown status - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 1, '') - self._repo._check_sync(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ------------------------------------------------------------------------ - # - # Tests where version in configuration file is not a valid reference - # - # ------------------------------------------------------------------------ - - def test_sync_invalid_reference(self): - """Test that an invalid reference returns out-of-sync - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 1, '') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ---------------------------------------------------------------- - # - # Tests where external description specifies a tag - # - # ---------------------------------------------------------------- - def test_sync_tag_on_same_hash(self): - """Test expect tag on same hash --> status ok - - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_tag_on_different_hash(self): - """Test expect tag on a different hash --> status modified - - """ - stat = ExternalStatus() - self._repo._tag = 'tag1' - self._repo._git_current_hash = self._git_current_hash('def456') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'tag1', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ---------------------------------------------------------------- - # - # Tests where external description specifies a hash - # - # ---------------------------------------------------------------- - def test_sync_hash_on_same_hash(self): - """Test expect hash on same hash --> status ok - - """ - stat = ExternalStatus() - self._repo._tag = '' - self._repo._hash = 'abc' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'abc', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_hash_on_different_hash(self): - """Test expect hash on a different hash --> status modified - - """ - stat = ExternalStatus() - self._repo._tag = '' - self._repo._hash = 'abc' - self._repo._git_current_hash = self._git_current_hash('def456') - self._repo._git_revparse_commit = self._git_revparse_commit( - 'abc', 0, 'abc123') - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - # ---------------------------------------------------------------- - # - # Tests where external description specifies a branch - # - # ---------------------------------------------------------------- - def test_sync_branch_on_same_hash(self): - """Test expect branch on same hash --> status ok - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('origin/feature-2', 0, 'abc123')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_branch_on_diff_hash(self): - """Test expect branch on diff hash --> status modified - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('origin/feature-2', 0, 'def456')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_branch_diff_remote(self): - """Test _remote_name_for_url with a different remote - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._url = '/path/to/other/repo' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('upstream/feature-2', 0, 'def456')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - # The test passes if _git_revparse_commit is called with the - # expected argument - - def test_sync_branch_diff_remote2(self): - """Test _remote_name_for_url with a different remote - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._url = '/path/to/local/repo2' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('other/feature-2', 0, 'def789')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - # The test passes if _git_revparse_commit is called with the - # expected argument - - def test_sync_branch_on_unknown_remote(self): - """Test expect branch, but remote is unknown --> status modified - - """ - stat = ExternalStatus() - self._repo._branch = 'feature-2' - self._repo._tag = '' - self._repo._url = '/path/to/unknown/repo' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('unknown_remote/feature-2', 1, '')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_sync_branch_on_untracked_local(self): - """Test expect branch, on untracked branch in local repo --> status ok - - Setting the externals description to '.' indicates that the - user only wants to consider the current local repo state - without fetching from remotes. This is required to preserve - the current branch of a repository during an update. - - """ - stat = ExternalStatus() - self._repo._branch = 'feature3' - self._repo._tag = '' - self._repo._url = '.' - self._repo._git_current_hash = self._git_current_hash('abc123') - self._repo._git_revparse_commit = ( - self._git_revparse_commit('feature3', 0, 'abc123')) - self._repo._check_sync_logic(stat, self.TMP_FAKE_DIR) - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_sync should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - -class TestGitStatusPorcelain(unittest.TestCase): - """Test parsing of output from git status --porcelain=v1 -z - """ - # pylint: disable=C0103 - GIT_STATUS_PORCELAIN_V1_ALL = ( - r' D INSTALL\0MM Makefile\0M README.md\0R cmakelists.txt\0' - r'CMakeLists.txt\0D commit-message-template.txt\0A stuff.txt\0' - r'?? junk.txt') - - GIT_STATUS_PORCELAIN_CLEAN = r'' - - def test_porcelain_status_dirty(self): - """Verify that git status output is considered dirty when there are - listed files. - - """ - git_output = self.GIT_STATUS_PORCELAIN_V1_ALL - is_dirty = GitRepository._status_v1z_is_dirty(git_output) - self.assertTrue(is_dirty) - - def test_porcelain_status_clean(self): - """Verify that git status output is considered clean when there are no - listed files. - - """ - git_output = self.GIT_STATUS_PORCELAIN_CLEAN - is_dirty = GitRepository._status_v1z_is_dirty(git_output) - self.assertFalse(is_dirty) - - -class TestGitCreateRemoteName(unittest.TestCase): - """Test the create_remote_name method on the GitRepository class - """ - - def setUp(self): - """Common infrastructure for testing _create_remote_name - """ - self._rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - 'empty', - ExternalsDescription.TAG: - 'very_useful_tag', - ExternalsDescription.BRANCH: EMPTY_STR, - ExternalsDescription.HASH: EMPTY_STR, - ExternalsDescription.SPARSE: EMPTY_STR, } - self._repo = GitRepository('test', self._rdata) - - def test_remote_git_proto(self): - """Test remote with git protocol - """ - self._repo._url = 'git@git.github.com:very_nice_org/useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'very_nice_org_useful_repo') - - def test_remote_https_proto(self): - """Test remote with git protocol - """ - self._repo._url = 'https://www.github.com/very_nice_org/useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'very_nice_org_useful_repo') - - def test_remote_local_abs(self): - """Test remote with git protocol - """ - self._repo._url = '/path/to/local/repositories/useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'repositories_useful_repo') - - def test_remote_local_rel(self): - """Test remote with git protocol - """ - os.environ['TEST_VAR'] = '/my/path/to/repos' - self._repo._url = '${TEST_VAR}/../../useful_repo' - remote_name = self._repo._create_remote_name() - self.assertEqual(remote_name, 'path_useful_repo') - del os.environ['TEST_VAR'] - - -class TestVerifyTag(unittest.TestCase): - """Test logic verifying that a tag exists and is unique - - """ - - def setUp(self): - """Setup reusable git repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'tmp', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - @staticmethod - def _shell_true(*args, **kwargs): - return 0 - - @staticmethod - def _shell_false(*args, **kwargs): - return 1 - - @staticmethod - def _mock_revparse_commit(ref, dirname): - _ = ref - return (TestValidRef._shell_true, '97ebc0e0deadc0de') - - @staticmethod - def _mock_revparse_commit_false(ref, dirname): - _ = ref - return (TestValidRef._shell_false, '97ebc0e0deadc0de') - - def test_tag_not_tag_branch_commit(self): - """Verify a non-tag returns false - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_false - self._repo._tag = 'something' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_tag_not_tag(self): - """Verify a non-tag, untracked remote returns false - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_true - self._repo._git_lsremote_branch = self._shell_true - self._repo._git_revparse_commit = self._mock_revparse_commit_false - self._repo._tag = 'tag1' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_tag_indeterminant(self): - """Verify an indeterminant tag/branch returns false - """ - self._repo._git_showref_tag = self._shell_true - self._repo._git_showref_branch = self._shell_true - self._repo._git_lsremote_branch = self._shell_true - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = 'something' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_tag_is_unique(self): - """Verify a unique tag match returns true - """ - self._repo._git_showref_tag = self._shell_true - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = 'tag1' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertTrue(received) - - def test_tag_is_not_hash(self): - """Verify a commit hash is not classified as a tag - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = '97ebc0e0' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - def test_hash_is_commit(self): - """Verify a commit hash is not classified as a tag - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit - self._repo._tag = '97ebc0e0' - remote_name = 'origin' - received, _ = self._repo._is_unique_tag(self._repo._tag, remote_name, - os.getcwd()) - self.assertFalse(received) - - -class TestValidRef(unittest.TestCase): - """Test logic verifying that a reference is a valid tag, branch or sha1 - - """ - - def setUp(self): - """Setup reusable git repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'git', - ExternalsDescription.REPO_URL: - '/path/to/local/repo', - ExternalsDescription.TAG: 'tag1', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'tmp', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = GitRepository('test', repo) - - @staticmethod - def _shell_true(url, remote=None): - _ = url - _ = remote - return 0 - - @staticmethod - def _shell_false(url, remote=None): - _ = url - _ = remote - return 1 - - @staticmethod - def _mock_revparse_commit_false(ref, dirname): - _ = ref - return (TestValidRef._shell_false, '') - - @staticmethod - def _mock_revparse_commit_true(ref, dirname): - _ = ref - _ = dirname - return (TestValidRef._shell_true, '') - - def test_valid_ref_is_invalid(self): - """Verify an invalid reference raises an exception - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_false - self._repo._tag = 'invalid_ref' - with self.assertRaises(RuntimeError): - self._repo._check_for_valid_ref(self._repo._tag, - remote_name=None, - dirname=os.getcwd()) - - def test_valid_tag(self): - """Verify a valid tag return true - """ - self._repo._git_showref_tag = self._shell_true - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_true - self._repo._tag = 'tag1' - received = self._repo._check_for_valid_ref(self._repo._tag, - remote_name=None, - dirname=os.getcwd()) - self.assertTrue(received) - - def test_valid_branch(self): - """Verify a valid tag return true - """ - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_true - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = self._mock_revparse_commit_true - self._repo._tag = 'tag1' - received = self._repo._check_for_valid_ref(self._repo._tag, - remote_name=None, - dirname=os.getcwd()) - self.assertTrue(received) - - def test_valid_hash(self): - """Verify a valid hash return true - """ - def _mock_revparse_commit_true(ref, dirname): - _ = ref - return (0, '56cc0b539426eb26810af9e') - - self._repo._git_showref_tag = self._shell_false - self._repo._git_showref_branch = self._shell_false - self._repo._git_lsremote_branch = self._shell_false - self._repo._git_revparse_commit = _mock_revparse_commit_true - self._repo._hash = '56cc0b5394' - received = self._repo._check_for_valid_ref(self._repo._hash, - remote_name=None, - dirname=os.getcwd()) - self.assertTrue(received) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_repository_svn.py b/manage_externals/test/test_unit_repository_svn.py deleted file mode 100755 index d9309df7f6..0000000000 --- a/manage_externals/test/test_unit_repository_svn.py +++ /dev/null @@ -1,501 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import unittest - -from manic.repository_svn import SvnRepository -from manic.externals_status import ExternalStatus -from manic.externals_description import ExternalsDescription -from manic.externals_description import ExternalsDescriptionDict -from manic.global_constants import EMPTY_STR - -# pylint: disable=W0212 - -SVN_INFO_MOSART = """Path: components/mosart -Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/mosart -URL: https://svn-ccsm-models.cgd.ucar.edu/mosart/trunk_tags/mosart1_0_26 -Relative URL: ^/mosart/trunk_tags/mosart1_0_26 -Repository Root: https://svn-ccsm-models.cgd.ucar.edu -Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5 -Revision: 86711 -Node Kind: directory -Schedule: normal -Last Changed Author: erik -Last Changed Rev: 86031 -Last Changed Date: 2017-07-07 12:28:10 -0600 (Fri, 07 Jul 2017) -""" -SVN_INFO_CISM = """ -Path: components/cism -Working Copy Root Path: /Users/andreb/projects/ncar/git-conversion/clm-dev-experimental/components/cism -URL: https://svn-ccsm-models.cgd.ucar.edu/glc/trunk_tags/cism2_1_37 -Relative URL: ^/glc/trunk_tags/cism2_1_37 -Repository Root: https://svn-ccsm-models.cgd.ucar.edu -Repository UUID: fe37f545-8307-0410-aea5-b40df96820b5 -Revision: 86711 -Node Kind: directory -Schedule: normal -Last Changed Author: sacks -Last Changed Rev: 85704 -Last Changed Date: 2017-06-15 05:59:28 -0600 (Thu, 15 Jun 2017) -""" - - -class TestSvnRepositoryCheckURL(unittest.TestCase): - """Verify that the svn_check_url function is working as expected. - """ - - def setUp(self): - """Setup reusable svn repository object - """ - self._name = 'component' - rdata = {ExternalsDescription.PROTOCOL: 'svn', - ExternalsDescription.REPO_URL: - 'https://svn-ccsm-models.cgd.ucar.edu', - ExternalsDescription.TAG: - 'mosart/trunk_tags/mosart1_0_26', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: '', - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = SvnRepository('test', repo) - - def test_check_url_same(self): - """Test that we correctly identify that the correct URL. - """ - svn_output = SVN_INFO_MOSART - expected_url = self._repo.url() - result, current_version = \ - self._repo._check_url(svn_output, expected_url) - self.assertEqual(result, ExternalStatus.STATUS_OK) - self.assertEqual(current_version, 'mosart/trunk_tags/mosart1_0_26') - - def test_check_url_different(self): - """Test that we correctly reject an incorrect URL. - """ - svn_output = SVN_INFO_CISM - expected_url = self._repo.url() - result, current_version = \ - self._repo._check_url(svn_output, expected_url) - self.assertEqual(result, ExternalStatus.MODEL_MODIFIED) - self.assertEqual(current_version, 'glc/trunk_tags/cism2_1_37') - - def test_check_url_none(self): - """Test that we can handle an empty string for output, e.g. not an svn - repo. - - """ - svn_output = EMPTY_STR - expected_url = self._repo.url() - result, current_version = \ - self._repo._check_url(svn_output, expected_url) - self.assertEqual(result, ExternalStatus.UNKNOWN) - self.assertEqual(current_version, '') - - -class TestSvnRepositoryCheckSync(unittest.TestCase): - """Test whether the SvnRepository svn_check_sync functionality is - correct. - - """ - - def setUp(self): - """Setup reusable svn repository object - """ - self._name = "component" - rdata = {ExternalsDescription.PROTOCOL: 'svn', - ExternalsDescription.REPO_URL: - 'https://svn-ccsm-models.cgd.ucar.edu/', - ExternalsDescription.TAG: - 'mosart/trunk_tags/mosart1_0_26', - } - - data = {self._name: - { - ExternalsDescription.REQUIRED: False, - ExternalsDescription.PATH: 'junk', - ExternalsDescription.EXTERNALS: EMPTY_STR, - ExternalsDescription.REPO: rdata, - }, - } - - model = ExternalsDescriptionDict(data) - repo = model[self._name][ExternalsDescription.REPO] - self._repo = SvnRepository('test', repo) - - @staticmethod - def _svn_info_empty(*_): - """Return an empty info string. Simulates svn info failing. - """ - return '' - - @staticmethod - def _svn_info_synced(*_): - """Return an info sting that is synced with the setUp data - """ - return SVN_INFO_MOSART - - @staticmethod - def _svn_info_modified(*_): - """Return and info string that is modified from the setUp data - """ - return SVN_INFO_CISM - - def test_repo_dir_not_exist(self): - """Test that a directory that doesn't exist returns an error status - - Note: the Repository classes should be prevented from ever - working on an empty directory by the _Source object. - - """ - stat = ExternalStatus() - self._repo._check_sync(stat, 'junk') - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_ERROR) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_repo_dir_exist_no_svn_info(self): - """Test that an empty info string returns an unknown status - """ - stat = ExternalStatus() - # Now we over-ride the _svn_info method on the repo to return - # a known value without requiring access to svn. - self._repo._svn_info = self._svn_info_empty - self._repo._check_sync(stat, '.') - self.assertEqual(stat.sync_state, ExternalStatus.UNKNOWN) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_repo_dir_synced(self): - """Test that a valid info string that is synced to the repo in the - externals description returns an ok status. - - """ - stat = ExternalStatus() - # Now we over-ride the _svn_info method on the repo to return - # a known value without requiring access to svn. - self._repo._svn_info = self._svn_info_synced - self._repo._check_sync(stat, '.') - self.assertEqual(stat.sync_state, ExternalStatus.STATUS_OK) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - def test_repo_dir_modified(self): - """Test that a valid svn info string that is out of sync with the - externals description returns a modified status. - - """ - stat = ExternalStatus() - # Now we over-ride the _svn_info method on the repo to return - # a known value without requiring access to svn. - self._repo._svn_info = self._svn_info_modified - self._repo._check_sync(stat, '.') - self.assertEqual(stat.sync_state, ExternalStatus.MODEL_MODIFIED) - # check_dir should only modify the sync_state, not clean_state - self.assertEqual(stat.clean_state, ExternalStatus.DEFAULT) - - -class TestSVNStatusXML(unittest.TestCase): - """Test parsing of svn status xml output - """ - SVN_STATUS_XML_DIRTY_ALL = ''' - - - - - -sacks -2017-06-15T11:59:00.355419Z - - - - - - -sacks -2013-02-07T16:17:56.412878Z - - - - - - -sacks -2017-05-01T16:48:27.893741Z - - - - - - - - - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_MISSING = ''' - - - - - -sacks -2017-06-15T11:59:00.355419Z - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_MODIFIED = ''' - - - - - -sacks -2013-02-07T16:17:56.412878Z - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_DELETED = ''' - - - - - -sacks -2017-05-01T16:48:27.893741Z - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_UNVERSION = ''' - - - - - - - - - - - -''' - - SVN_STATUS_XML_DIRTY_ADDED = ''' - - - - - - - - - - - -''' - - SVN_STATUS_XML_CLEAN = ''' - - - - - - - - - - - -''' - - def test_xml_status_dirty_missing(self): - """Verify that svn status output is consindered dirty when there is a - missing file. - - """ - svn_output = self.SVN_STATUS_XML_DIRTY_MISSING - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_modified(self): - """Verify that svn status output is consindered dirty when there is a - modified file. - """ - svn_output = self.SVN_STATUS_XML_DIRTY_MODIFIED - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_deleted(self): - """Verify that svn status output is consindered dirty when there is a - deleted file. - """ - svn_output = self.SVN_STATUS_XML_DIRTY_DELETED - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_unversion(self): - """Verify that svn status output ignores unversioned files when making - the clean/dirty decision. - - """ - svn_output = self.SVN_STATUS_XML_DIRTY_UNVERSION - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertFalse(is_dirty) - - def test_xml_status_dirty_added(self): - """Verify that svn status output is consindered dirty when there is a - added file. - """ - svn_output = self.SVN_STATUS_XML_DIRTY_ADDED - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_all(self): - """Verify that svn status output is consindered dirty when there are - multiple dirty files.. - - """ - svn_output = self.SVN_STATUS_XML_DIRTY_ALL - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertTrue(is_dirty) - - def test_xml_status_dirty_clean(self): - """Verify that svn status output is consindered clean when there are - no 'dirty' files. This means accepting untracked and externals. - - """ - svn_output = self.SVN_STATUS_XML_CLEAN - is_dirty = SvnRepository.xml_status_is_dirty( - svn_output) - self.assertFalse(is_dirty) - - -if __name__ == '__main__': - unittest.main() diff --git a/manage_externals/test/test_unit_utils.py b/manage_externals/test/test_unit_utils.py deleted file mode 100644 index 80e1636649..0000000000 --- a/manage_externals/test/test_unit_utils.py +++ /dev/null @@ -1,350 +0,0 @@ -#!/usr/bin/env python3 - -"""Unit test driver for checkout_externals - -Note: this script assume the path to the checkout_externals.py module is -already in the python path. - -""" - -from __future__ import absolute_import -from __future__ import unicode_literals -from __future__ import print_function - -import os -import unittest - -from manic.utils import last_n_lines, indent_string -from manic.utils import str_to_bool, execute_subprocess -from manic.utils import is_remote_url, split_remote_url, expand_local_url - - -class TestExecuteSubprocess(unittest.TestCase): - """Test the application logic of execute_subprocess wrapper - """ - - def test_exesub_return_stat_err(self): - """Test that execute_subprocess returns a status code when caller - requests and the executed subprocess fails. - - """ - cmd = ['false'] - status = execute_subprocess(cmd, status_to_caller=True) - self.assertEqual(status, 1) - - def test_exesub_return_stat_ok(self): - """Test that execute_subprocess returns a status code when caller - requests and the executed subprocess succeeds. - - """ - cmd = ['true'] - status = execute_subprocess(cmd, status_to_caller=True) - self.assertEqual(status, 0) - - def test_exesub_except_stat_err(self): - """Test that execute_subprocess raises an exception on error when - caller doesn't request return code - - """ - cmd = ['false'] - with self.assertRaises(RuntimeError): - execute_subprocess(cmd, status_to_caller=False) - - -class TestLastNLines(unittest.TestCase): - """Test the last_n_lines function. - - """ - - def test_last_n_lines_short(self): - """With a message with <= n lines, result of last_n_lines should - just be the original message. - - """ - mystr = """three -line -string -""" - - mystr_truncated = last_n_lines( - mystr, 3, truncation_message='[truncated]') - self.assertEqual(mystr, mystr_truncated) - - def test_last_n_lines_long(self): - """With a message with > n lines, result of last_n_lines should - be a truncated string. - - """ - mystr = """a -big -five -line -string -""" - expected = """[truncated] -five -line -string -""" - - mystr_truncated = last_n_lines( - mystr, 3, truncation_message='[truncated]') - self.assertEqual(expected, mystr_truncated) - - -class TestIndentStr(unittest.TestCase): - """Test the indent_string function. - - """ - - def test_indent_string_singleline(self): - """Test the indent_string function with a single-line string - - """ - mystr = 'foo' - result = indent_string(mystr, 4) - expected = ' foo' - self.assertEqual(expected, result) - - def test_indent_string_multiline(self): - """Test the indent_string function with a multi-line string - - """ - mystr = """hello -hi -goodbye -""" - result = indent_string(mystr, 2) - expected = """ hello - hi - goodbye -""" - self.assertEqual(expected, result) - - -class TestStrToBool(unittest.TestCase): - """Test the string to boolean conversion routine. - - """ - - def test_case_insensitive_true(self): - """Verify that case insensitive variants of 'true' returns the True - boolean. - - """ - values = ['true', 'TRUE', 'True', 'tRuE', 't', 'T', ] - for value in values: - received = str_to_bool(value) - self.assertTrue(received) - - def test_case_insensitive_false(self): - """Verify that case insensitive variants of 'false' returns the False - boolean. - - """ - values = ['false', 'FALSE', 'False', 'fAlSe', 'f', 'F', ] - for value in values: - received = str_to_bool(value) - self.assertFalse(received) - - def test_invalid_str_error(self): - """Verify that a non-true/false string generates a runtime error. - """ - values = ['not_true_or_false', 'A', '1', '0', - 'false_is_not_true', 'true_is_not_false'] - for value in values: - with self.assertRaises(RuntimeError): - str_to_bool(value) - - -class TestIsRemoteURL(unittest.TestCase): - """Crude url checking to determine if a url is local or remote. - - """ - - def test_url_remote_git(self): - """verify that a remote git url is identified. - """ - url = 'git@somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_remote_ssh(self): - """verify that a remote ssh url is identified. - """ - url = 'ssh://user@somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_remote_http(self): - """verify that a remote http url is identified. - """ - url = 'http://somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_remote_https(self): - """verify that a remote https url is identified. - """ - url = 'https://somewhere' - is_remote = is_remote_url(url) - self.assertTrue(is_remote) - - def test_url_local_user(self): - """verify that a local path with '~/path/to/repo' gets rejected - - """ - url = '~/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_var_curly(self): - """verify that a local path with env var '${HOME}' gets rejected - """ - url = '${HOME}/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_var(self): - """verify that a local path with an env var '$HOME' gets rejected - """ - url = '$HOME/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_abs(self): - """verify that a local abs path gets rejected - """ - url = '/path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - def test_url_local_rel(self): - """verify that a local relative path gets rejected - """ - url = '../../path/to/repo' - is_remote = is_remote_url(url) - self.assertFalse(is_remote) - - -class TestSplitRemoteURL(unittest.TestCase): - """Crude url checking to determine if a url is local or remote. - - """ - - def test_url_remote_git(self): - """verify that a remote git url is identified. - """ - url = 'git@somewhere.com:org/repo' - received = split_remote_url(url) - self.assertEqual(received, "org/repo") - - def test_url_remote_ssh(self): - """verify that a remote ssh url is identified. - """ - url = 'ssh://user@somewhere.com/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, 'somewhere.com/path/to/repo') - - def test_url_remote_http(self): - """verify that a remote http url is identified. - """ - url = 'http://somewhere.org/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, 'somewhere.org/path/to/repo') - - def test_url_remote_https(self): - """verify that a remote http url is identified. - """ - url = 'http://somewhere.gov/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, 'somewhere.gov/path/to/repo') - - def test_url_local_url_unchanged(self): - """verify that a local path is unchanged - - """ - url = '/path/to/repo' - received = split_remote_url(url) - self.assertEqual(received, url) - - -class TestExpandLocalURL(unittest.TestCase): - """Crude url checking to determine if a url is local or remote. - - Remote should be unmodified. - - Local, should perform user and variable expansion. - - """ - - def test_url_local_user1(self): - """verify that a local path with '~/path/to/repo' gets expanded to an - absolute path. - - NOTE(bja, 2017-11) we can't test for something like: - '~user/path/to/repo' because the user has to be in the local - machine password directory and we don't know a user name that - is valid on every system....? - - """ - field = 'test' - url = '~/path/to/repo' - received = expand_local_url(url, field) - print(received) - self.assertTrue(os.path.isabs(received)) - - def test_url_local_expand_curly(self): - """verify that a local path with '${HOME}' gets expanded to an absolute path. - """ - field = 'test' - url = '${HOME}/path/to/repo' - received = expand_local_url(url, field) - self.assertTrue(os.path.isabs(received)) - - def test_url_local_expand_var(self): - """verify that a local path with '$HOME' gets expanded to an absolute path. - """ - field = 'test' - url = '$HOME/path/to/repo' - received = expand_local_url(url, field) - self.assertTrue(os.path.isabs(received)) - - def test_url_local_env_missing(self): - """verify that a local path with env var that is missing gets left as-is - - """ - field = 'test' - url = '$TMP_VAR/path/to/repo' - received = expand_local_url(url, field) - print(received) - self.assertEqual(received, url) - - def test_url_local_expand_env(self): - """verify that a local path with another env var gets expanded to an - absolute path. - - """ - field = 'test' - os.environ['TMP_VAR'] = '/some/absolute' - url = '$TMP_VAR/path/to/repo' - received = expand_local_url(url, field) - del os.environ['TMP_VAR'] - print(received) - self.assertTrue(os.path.isabs(received)) - self.assertEqual(received, '/some/absolute/path/to/repo') - - def test_url_local_normalize_rel(self): - """verify that a local path with another env var gets expanded to an - absolute path. - - """ - field = 'test' - url = '/this/is/a/long/../path/to/a/repo' - received = expand_local_url(url, field) - print(received) - self.assertEqual(received, '/this/is/a/path/to/a/repo') - - -if __name__ == '__main__': - unittest.main() From b463da08ee3c5d40fe59150ec74db795e12a06e8 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 29 May 2024 09:51:50 -0600 Subject: [PATCH 135/161] using clm branch --- .gitmodules | 4 ++-- bin/git-fleximod | 8 ++++++++ components/clm | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) create mode 100755 bin/git-fleximod diff --git a/.gitmodules b/.gitmodules index 4b2a94beb3..37fba58bd5 100644 --- a/.gitmodules +++ b/.gitmodules @@ -185,8 +185,8 @@ fxDONOTUSEurl = https://github.com/NCAR/ParallelIO [submodule "clm"] path = components/clm -url = https://github.com/ESCOMP/CTSM -fxtag = ctsm5.2.007 +url = https://github.com/jedwards4b/CTSM +fxtag = ctsm5.2.007/candidate fxrequired = ToplevelRequired fxDONOTUSEurl = https://github.com/ESCOMP/CTSM diff --git a/bin/git-fleximod b/bin/git-fleximod new file mode 100755 index 0000000000..f69ede1c22 --- /dev/null +++ b/bin/git-fleximod @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +import sys +import os +sys.path.insert(0,os.path.abspath(os.path.join(os.path.dirname(__file__),"..",".lib","git-fleximod"))) +from git_fleximod.git_fleximod import main + +if __name__ == '__main__': + sys.exit(main()) diff --git a/components/clm b/components/clm index 6aebaad42f..5cdf117265 160000 --- a/components/clm +++ b/components/clm @@ -1 +1 @@ -Subproject commit 6aebaad42f1db6d8be2c84205adce60a884cc254 +Subproject commit 5cdf117265bb457ed64af741be0caed593a3a82b From 585ff448d06721b777663294ec666bdcb22c5178 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 29 May 2024 09:55:44 -0600 Subject: [PATCH 136/161] Squashed '.lib/git-fleximod/' content from commit 94121295 git-subtree-dir: .lib/git-fleximod git-subtree-split: 94121295e57a1d6e713b8e3ba84261d9e9cfebe6 --- .github/workflows/pre-commit | 13 + .github/workflows/pytest.yaml | 77 ++++ .pre-commit-config.yaml | 18 + License | 20 + README.md | 110 ++++++ doc/Makefile | 20 + doc/conf.py | 26 ++ doc/index.rst | 24 ++ doc/make.bat | 35 ++ escomp_install | 25 ++ git_fleximod/__init__.py | 0 git_fleximod/cli.py | 119 ++++++ git_fleximod/git_fleximod.py | 601 +++++++++++++++++++++++++++++ git_fleximod/gitinterface.py | 79 ++++ git_fleximod/gitmodules.py | 97 +++++ git_fleximod/lstripreader.py | 43 +++ git_fleximod/metoflexi.py | 236 ++++++++++++ git_fleximod/utils.py | 365 ++++++++++++++++++ poetry.lock | 693 ++++++++++++++++++++++++++++++++++ pyproject.toml | 41 ++ tbump.toml | 43 +++ tests/__init__.py | 3 + tests/conftest.py | 138 +++++++ tests/test_a_import.py | 8 + tests/test_b_update.py | 26 ++ tests/test_c_required.py | 30 ++ tests/test_d_complex.py | 67 ++++ 27 files changed, 2957 insertions(+) create mode 100644 .github/workflows/pre-commit create mode 100644 .github/workflows/pytest.yaml create mode 100644 .pre-commit-config.yaml create mode 100644 License create mode 100644 README.md create mode 100644 doc/Makefile create mode 100644 doc/conf.py create mode 100644 doc/index.rst create mode 100644 doc/make.bat create mode 100644 escomp_install create mode 100644 git_fleximod/__init__.py create mode 100644 git_fleximod/cli.py create mode 100755 git_fleximod/git_fleximod.py create mode 100644 git_fleximod/gitinterface.py create mode 100644 git_fleximod/gitmodules.py create mode 100644 git_fleximod/lstripreader.py create mode 100755 git_fleximod/metoflexi.py create mode 100644 git_fleximod/utils.py create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 tbump.toml create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/test_a_import.py create mode 100644 tests/test_b_update.py create mode 100644 tests/test_c_required.py create mode 100644 tests/test_d_complex.py diff --git a/.github/workflows/pre-commit b/.github/workflows/pre-commit new file mode 100644 index 0000000000..1a6ad0082a --- /dev/null +++ b/.github/workflows/pre-commit @@ -0,0 +1,13 @@ +name: pre-commit +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 diff --git a/.github/workflows/pytest.yaml b/.github/workflows/pytest.yaml new file mode 100644 index 0000000000..0868dd9a33 --- /dev/null +++ b/.github/workflows/pytest.yaml @@ -0,0 +1,77 @@ +# Run this job on pushes to `main`, and for pull requests. If you don't specify +# `branches: [main], then this actions runs _twice_ on pull requests, which is +# annoying. + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and + # reference the matrixe python version here. + - uses: actions/setup-python@v5 + with: + python-version: '3.9' + + # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow + # from installing Poetry every time, which can be slow. Note the use of the Poetry version + # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache + # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be + # mildly cleaner by using an environment variable, but I don't really care. + - name: cache poetry install + uses: actions/cache@v4 + with: + path: ~/.local + key: poetry-1.7.1 + + # Install Poetry. You could do this manually, or there are several actions that do this. + # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to + # Poetry's default install script, which feels correct. I pin the Poetry version here + # because Poetry does occasionally change APIs between versions and I don't want my + # actions to break if it does. + # + # The key configuration value here is `virtualenvs-in-project: true`: this creates the + # venv as a `.venv` in your testing directory, which allows the next step to easily + # cache it. + - uses: snok/install-poetry@v1 + with: + version: 1.7.1 + virtualenvs-create: true + virtualenvs-in-project: true + + # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache + # key: if you're using multiple Python versions, or multiple OSes, you'd need to include + # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock. + - name: cache deps + id: cache-deps + uses: actions/cache@v4 + with: + path: .venv + key: pydeps-${{ hashFiles('**/poetry.lock') }} + + # Install dependencies. `--no-root` means "install all dependencies but not the project + # itself", which is what you want to avoid caching _your_ code. The `if` statement + # ensures this only runs on a cache miss. + - run: poetry install --no-interaction --no-root + if: steps.cache-deps.outputs.cache-hit != 'true' + + # Now install _your_ project. This isn't necessary for many types of projects -- particularly + # things like Django apps don't need this. But it's a good idea since it fully-exercises the + # pyproject.toml and makes that if you add things like console-scripts at some point that + # they'll be installed and working. + - run: poetry install --no-interaction + + # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` + # so this line is super-simple. But it could be as complex as you need. + - run: | + git config --global user.name "${GITHUB_ACTOR}" + git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" + poetry run pytest + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..2f6089da72 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +exclude: ^utils/.*$ + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/pylint + rev: v2.11.1 + hooks: + - id: pylint + args: + - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member diff --git a/License b/License new file mode 100644 index 0000000000..2c6fe768c2 --- /dev/null +++ b/License @@ -0,0 +1,20 @@ +Copyright 2024 National Center for Atmospheric Sciences (NCAR) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +“Softwareâ€), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS ISâ€, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..d1ef632f28 --- /dev/null +++ b/README.md @@ -0,0 +1,110 @@ +# git-fleximod + +Flexible, Enhanced Submodule Management for Git + +## Overview + +Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way. + +## Installation + +#TODO Install using pip: +# pip install git-fleximod + If you choose to locate git-fleximod in your path you can access it via command: git fleximod + +## Usage + + Basic Usage: + git fleximod [options] + Available Commands: + status: Display the status of submodules. + update: Update submodules to the tag indicated in .gitmodules variable fxtag. + test: Make sure that fxtags and submodule hashes are consistant, + make sure that official urls (as defined by fxDONOTUSEurl) are set + make sure that fxtags are defined for all submodules + Additional Options: + See git fleximod --help for more details. + +## Supported .gitmodules Variables + + fxtag: Specify a specific tag or branch to checkout for a submodule. + fxrequired: Mark a submodule's checkout behavior, with allowed values: + - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module). + - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). + - AlwaysRequired: Always required (always checked out). + - AlwaysOptional: Always optional (checked out with --optional flag). + fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. + fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks + **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be + changed by users. Use the url variable to change to a fork if desired. + +## Sparse Checkouts + + To enable sparse checkout for a submodule, set the fxsparse variable + in the .gitmodules file to the path of a file containing the desired + sparse checkout paths. Git-fleximod will automatically configure + sparse checkout based on this file when applicable commands are run. + See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) + for details on the format of this file. + +## Tests + + The git fleximod test action is designed to be used by, for example, github workflows + to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags + +## Examples + +Here are some common usage examples: + +Update all submodules, including optional ones: +```bash + git fleximod update --optional +``` + +Updating a specific submodule to the fxtag indicated in .gitmodules: + +```bash + git fleximod update submodule-name +``` +Example .gitmodules entry: +```ini, toml + [submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxsparse = ../.cosp_sparse_checkout + fxrequired = AlwaysRequired + fxtag = v2.1.4cesm +``` +Explanation: + +This entry indicates that the submodule named cosp2 at tag v2.1.4cesm +should be checked out into the directory src/physics/cosp2/src +relative to the .gitmodules directory. It should be checked out from +the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as +described in the file ../.cosp_sparse_checkout relative to the path +directory. It should be checked out anytime this .gitmodules entry is +read. + +Additional example: +```ini, toml + [submodule "cime"] + path = cime + url = https://github.com/jedwards4b/cime + fxrequired = ToplevelRequired + fxtag = cime6.0.198_rme01 +``` + +Explanation: + +This entry indicates that the submodule cime should be checked out +into a directory named cime at tag cime6.0.198_rme01 from the URL +https://github.com/jedwards4b/cime. This should only be done if +the .gitmodules file is at the top level of the repository clone. + +## Contributing + +We welcome contributions! Please see the CONTRIBUTING.md file for guidelines. + +## License + +Git-fleximod is released under the MIT License. diff --git a/doc/Makefile b/doc/Makefile new file mode 100644 index 0000000000..d4bb2cbb9e --- /dev/null +++ b/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/doc/conf.py b/doc/conf.py new file mode 100644 index 0000000000..423099eec9 --- /dev/null +++ b/doc/conf.py @@ -0,0 +1,26 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "git-fleximod" +author = "Jim Edwards " +release = "0.4.0" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx_argparse_cli"] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "alabaster" +html_static_path = ["_static"] diff --git a/doc/index.rst b/doc/index.rst new file mode 100644 index 0000000000..0f9c1a7f7e --- /dev/null +++ b/doc/index.rst @@ -0,0 +1,24 @@ +.. git-fleximod documentation master file, created by + sphinx-quickstart on Sat Feb 3 12:02:22 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to git-fleximod's documentation! +======================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: +.. module:: sphinxcontrib.autoprogram +.. sphinx_argparse_cli:: + :module: git_fleximod.cli + :func: get_parser + :prog: git-fleximod + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/doc/make.bat b/doc/make.bat new file mode 100644 index 0000000000..32bb24529f --- /dev/null +++ b/doc/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/escomp_install b/escomp_install new file mode 100644 index 0000000000..ae782e72a4 --- /dev/null +++ b/escomp_install @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# updates git-fleximod in an ESCOMP model +# this script should be run from the model root directory, it expects +# git-fleximod to already be installed with the script in bin +# and the classes in lib/python/site-packages +import sys +import shutil +import os + +from glob import iglob + +fleximod_root = sys.argv[1] +fleximod_path = os.path.join(fleximod_root,"src","git-fleximod") +if os.path.isfile(fleximod_path): + with open(fleximod_path,"r") as f: + fleximod = f.readlines() + with open(os.path.join(".","bin","git-fleximod"),"w") as f: + for line in fleximod: + f.write(line) + if "import argparse" in line: + f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n') + + for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")): + shutil.copy(file, + os.path.join("lib","python","site-packages","fleximod",os.path.basename(file))) diff --git a/git_fleximod/__init__.py b/git_fleximod/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py new file mode 100644 index 0000000000..1fb959dad0 --- /dev/null +++ b/git_fleximod/cli.py @@ -0,0 +1,119 @@ +from pathlib import Path +import argparse + +__version__ = "0.7.4" + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return attempt + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + # + # user options + # + choices = ["update", "status", "test"] + parser.add_argument( + "action", + choices=choices, + default="update", + help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", + ) + + parser.add_argument( + "components", + nargs="*", + help="Specific component(s) to checkout. By default, " + "all required submodules are checked out.", + ) + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + + parser.add_argument( + "-x", + "--exclude", + nargs="*", + help="Component(s) listed in the gitmodules file which should be ignored.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Override cautions and update or checkout over locally modified repository.", + ) + + parser.add_argument( + "-o", + "--optional", + action="store_true", + default=False, + help="By default only the required submodules " + "are checked out. This flag will also checkout the " + "optional submodules relative to the toplevel directory.", + ) + + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + + parser.add_argument( + "-V", + "--version", + action="version", + version=f"%(prog)s {__version__}", + help="Print version and exit.", + ) + + # + # developer options + # + parser.add_argument( + "--backtrace", + action="store_true", + help="DEVELOPER: show exception backtraces as extra " "debugging output", + ) + + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py new file mode 100755 index 0000000000..103cc82a50 --- /dev/null +++ b/git_fleximod/git_fleximod.py @@ -0,0 +1,601 @@ +#!/usr/bin/env python +import sys + +MIN_PYTHON = (3, 7) +if sys.version_info < MIN_PYTHON: + sys.exit("Python %s.%s or later is required." % MIN_PYTHON) + +import os +import shutil +import logging +import textwrap +from git_fleximod import utils +from git_fleximod import cli +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from configparser import NoOptionError + +# logger variable is global +logger = None + + +def fxrequired_allowed_values(): + return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] + + +def commandline_arguments(args=None): + parser = cli.get_parser() + + if args: + options = parser.parse_args(args) + else: + options = parser.parse_args() + + # explicitly listing a component overrides the optional flag + if options.optional or options.components: + fxrequired = [ + "ToplevelRequired", + "ToplevelOptional", + "AlwaysRequired", + "AlwaysOptional", + ] + else: + fxrequired = ["ToplevelRequired", "AlwaysRequired"] + + action = options.action + if not action: + action = "update" + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + if hasattr(options, "version"): + exit() + + return ( + options.path, + options.gitmodules, + fxrequired, + options.components, + options.exclude, + options.force, + action, + ) + + +def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): + """ + This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq + in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. + Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important + because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time + and disk space consuming. + + Parameters: + root_dir (str): The root directory for the git operation. + name (str): The name of the submodule. + url (str): The URL of the submodule. + path (str): The path to the submodule. + sparsefile (str): The sparse file for the submodule. + tag (str, optional): The tag to checkout. Defaults to "master". + + Returns: + None + """ + logger.info("Called sparse_checkout for {}".format(name)) + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + if superroot: + gitroot = superroot.strip() + else: + gitroot = root_dir.strip() + assert os.path.isdir(os.path.join(gitroot, ".git")) + # first create the module directory + if not os.path.isdir(os.path.join(root_dir, path)): + os.makedirs(os.path.join(root_dir, path)) + + # initialize a new git repo and set the sparse checkout flag + sprep_repo = os.path.join(root_dir, path) + sprepo_git = GitInterface(sprep_repo, logger) + if os.path.exists(os.path.join(sprep_repo, ".git")): + try: + logger.info("Submodule {} found".format(name)) + chk = sprepo_git.config_get_value("core", "sparseCheckout") + if chk == "true": + logger.info("Sparse submodule {} already checked out".format(name)) + return + except NoOptionError: + logger.debug("Sparse submodule {} not present".format(name)) + except Exception as e: + utils.fatal_error("Unexpected error {} occured.".format(e)) + + sprepo_git.config_set_value("core", "sparseCheckout", "true") + + # set the repository remote + + logger.info("Setting remote origin in {}/{}".format(root_dir, path)) + status = sprepo_git.git_operation("remote", "-v") + if url not in status: + sprepo_git.git_operation("remote", "add", "origin", url) + + topgit = os.path.join(gitroot, ".git") + + if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): + with open(os.path.join(root_dir, ".git")) as f: + gitpath = os.path.relpath( + os.path.join(root_dir, f.read().split()[1]), + start=os.path.join(root_dir, path), + ) + topgit = os.path.join(gitpath, "modules") + else: + topgit = os.path.relpath( + os.path.join(root_dir, ".git", "modules"), + start=os.path.join(root_dir, path), + ) + + with utils.pushd(sprep_repo): + if not os.path.isdir(topgit): + os.makedirs(topgit) + topgit += os.sep + name + + if os.path.isdir(os.path.join(root_dir, path, ".git")): + with utils.pushd(sprep_repo): + shutil.move(".git", topgit) + with open(".git", "w") as f: + f.write("gitdir: " + os.path.relpath(topgit)) + # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) + gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) + if os.path.isfile(gitsparse): + logger.warning( + "submodule {} is already initialized {}".format(name, topgit) + ) + return + + with utils.pushd(sprep_repo): + shutil.copy(sparsefile, gitsparse) + + # Finally checkout the repo + sprepo_git.git_operation("fetch", "origin", "--tags") + sprepo_git.git_operation("checkout", tag) + + print(f"Successfully checked out {name:>20} at {tag}") + rgit.config_set_value(f'submodule "{name}"', "active", "true") + rgit.config_set_value(f'submodule "{name}"', "url", url) + + +def single_submodule_checkout( + root, name, path, url=None, tag=None, force=False, optional=False +): + """ + This function checks out a single git submodule. + + Parameters: + root (str): The root directory for the git operation. + name (str): The name of the submodule. + path (str): The path to the submodule. + url (str, optional): The URL of the submodule. Defaults to None. + tag (str, optional): The tag to checkout. Defaults to None. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + optional (bool, optional): If set to True, the submodule is considered optional. Defaults to False. + + Returns: + None + """ + # function implementation... + git = GitInterface(root, logger) + repodir = os.path.join(root, path) + logger.info("Checkout {} into {}/{}".format(name, root, path)) + # if url is provided update to the new url + tmpurl = None + repo_exists = False + if os.path.exists(os.path.join(repodir, ".git")): + logger.info("Submodule {} already checked out".format(name)) + repo_exists = True + # Look for a .gitmodules file in the newly checkedout repo + if not repo_exists and url: + # ssh urls cause problems for those who dont have git accounts with ssh keys defined + # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was + # opened with a GitModules object we don't need to worry about restoring the file here + # it will be done by the GitModules class + if url.startswith("git@"): + tmpurl = url + url = url.replace("git@github.com:", "https://github.com/") + git.git_operation("clone", url, path) + smgit = GitInterface(repodir, logger) + if not tag: + tag = smgit.git_operation("describe", "--tags", "--always").rstrip() + smgit.git_operation("checkout", tag) + # Now need to move the .git dir to the submodule location + rootdotgit = os.path.join(root, ".git") + if os.path.isfile(rootdotgit): + with open(rootdotgit) as f: + line = f.readline() + if line.startswith("gitdir: "): + rootdotgit = line[8:].rstrip() + + newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) + if os.path.exists(newpath): + shutil.rmtree(os.path.join(repodir, ".git")) + else: + shutil.move(os.path.join(repodir, ".git"), newpath) + + with open(os.path.join(repodir, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) + + if not os.path.exists(repodir): + parent = os.path.dirname(repodir) + if not os.path.isdir(parent): + os.makedirs(parent) + git.git_operation("submodule", "add", "--name", name, "--", url, path) + + if not repo_exists or not tmpurl: + git.git_operation("submodule", "update", "--init", "--", path) + + if os.path.exists(os.path.join(repodir, ".gitmodules")): + # recursively handle this checkout + print(f"Recursively checking out submodules of {name}") + gitmodules = GitModules(logger, confpath=repodir) + requiredlist = ["AlwaysRequired"] + if optional: + requiredlist.append("AlwaysOptional") + submodules_checkout(gitmodules, repodir, requiredlist, force=force) + if not os.path.exists(os.path.join(repodir, ".git")): + utils.fatal_error( + f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}" + ) + + if tmpurl: + print(git.git_operation("restore", ".gitmodules")) + + return + + +def submodules_status(gitmodules, root_dir, toplevel=False): + testfails = 0 + localmods = 0 + needsupdate = 0 + for name in gitmodules.sections(): + path = gitmodules.get(name, "path") + tag = gitmodules.get(name, "fxtag") + required = gitmodules.get(name, "fxrequired") + level = required and "Toplevel" in required + if not path: + utils.fatal_error("No path found in .gitmodules for {}".format(name)) + newpath = os.path.join(root_dir, path) + logger.debug("newpath is {}".format(newpath)) + if not os.path.exists(os.path.join(newpath, ".git")): + rootgit = GitInterface(root_dir, logger) + # submodule commands use path, not name + url = gitmodules.get(name, "url") + url = url.replace("git@github.com:", "https://github.com/") + tags = rootgit.git_operation("ls-remote", "--tags", url) + atag = None + needsupdate += 1 + if not toplevel and level: + continue + for htag in tags.split("\n"): + if tag and tag in htag: + atag = (htag.split()[1])[10:] + break + if tag and tag == atag: + print(f"e {name:>20} not checked out, aligned at tag {tag}") + elif tag: + ahash = rootgit.git_operation( + "submodule", "status", "{}".format(path) + ).rstrip() + ahash = ahash[1 : len(tag) + 1] + if tag == ahash: + print(f"e {name:>20} not checked out, aligned at hash {ahash}") + else: + print( + f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" + ) + testfails += 1 + else: + print(f"e {name:>20} has no fxtag defined in .gitmodules") + testfails += 1 + else: + with utils.pushd(newpath): + git = GitInterface(newpath, logger) + atag = git.git_operation("describe", "--tags", "--always").rstrip() + ahash = git.git_operation("status").partition("\n")[0].split()[-1] + if tag and atag == tag: + print(f" {name:>20} at tag {tag}") + elif tag and ahash[: len(tag)] == tag: + print(f" {name:>20} at hash {ahash}") + elif atag == ahash: + print(f" {name:>20} at hash {ahash}") + elif tag: + print( + f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}" + ) + testfails += 1 + needsupdate += 1 + else: + print( + f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}" + ) + testfails += 1 + + status = git.git_operation("status", "--ignore-submodules") + if "nothing to commit" not in status: + localmods = localmods + 1 + print("M" + textwrap.indent(status, " ")) + + return testfails, localmods, needsupdate + + +def submodules_update(gitmodules, root_dir, requiredlist, force): + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) + + if localmods and not force: + local_mods_output() + return + if needsupdate == 0: + return + + for name in gitmodules.sections(): + fxtag = gitmodules.get(name, "fxtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + logger.info( + "name={} path={} url={} fxtag={} requiredlist={}".format( + name, os.path.join(root_dir, path), url, fxtag, requiredlist + ) + ) + # if not os.path.exists(os.path.join(root_dir,path, ".git")): + fxrequired = gitmodules.get(name, "fxrequired") + assert fxrequired in fxrequired_allowed_values() + rgit = GitInterface(root_dir, logger) + superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + + fxsparse = gitmodules.get(name, "fxsparse") + + if ( + fxrequired + and (superroot and "Toplevel" in fxrequired) + or fxrequired not in requiredlist + ): + if "ToplevelOptional" == fxrequired: + print("Skipping optional component {}".format(name)) + continue + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) + ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) + else: + logger.info( + "Calling submodule_checkout({},{},{},{})".format( + root_dir, name, path, url + ) + ) + + single_submodule_checkout( + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional=("AlwaysOptional" in requiredlist), + ) + + if os.path.exists(os.path.join(path, ".git")): + submoddir = os.path.join(root_dir, path) + with utils.pushd(submoddir): + git = GitInterface(submoddir, logger) + # first make sure the url is correct + upstream = git.git_operation("ls-remote", "--get-url").rstrip() + newremote = "origin" + if upstream != url: + # TODO - this needs to be a unique name + remotes = git.git_operation("remote", "-v") + if url in remotes: + for line in remotes: + if url in line and "fetch" in line: + newremote = line.split()[0] + break + else: + i = 0 + while newremote in remotes: + i = i + 1 + newremote = f"newremote.{i:02d}" + git.git_operation("remote", "add", newremote, url) + + tags = git.git_operation("tag", "-l") + if fxtag and fxtag not in tags: + git.git_operation("fetch", newremote, "--tags") + atag = git.git_operation("describe", "--tags", "--always").rstrip() + if fxtag and fxtag != atag: + try: + git.git_operation("checkout", fxtag) + print(f"{name:>20} updated to {fxtag}") + except Exception as error: + print(error) + elif not fxtag: + print(f"No fxtag found for submodule {name:>20}") + else: + print(f"{name:>20} up to date.") + + +def local_mods_output(): + text = """\ + The submodules labeled with 'M' above are not in a clean state. + The following are options for how to proceed: + (1) Go into each submodule which is not in a clean state and issue a 'git status' + Either revert or commit your changes so that the submodule is in a clean state. + (2) use the --force option to git-fleximod + (3) you can name the particular submodules to update using the git-fleximod command line + (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') + then rerun git-fleximod update. +""" + print(text) + + +# checkout is done by update if required so this function may be depricated +def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): + """ + This function checks out all git submodules based on the provided parameters. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + requiredlist (list): The list of required modules. + force (bool, optional): If set to True, forces the checkout operation. Defaults to False. + + Returns: + None + """ + # function implementation... + print("") + _, localmods, needsupdate = submodules_status(gitmodules, root_dir) + if localmods and not force: + local_mods_output() + return + if not needsupdate: + return + for name in gitmodules.sections(): + fxrequired = gitmodules.get(name, "fxrequired") + fxsparse = gitmodules.get(name, "fxsparse") + fxtag = gitmodules.get(name, "fxtag") + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + if fxrequired and fxrequired not in requiredlist: + if "Optional" in fxrequired: + print("Skipping optional component {}".format(name)) + continue + + if fxsparse: + logger.debug( + "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( + root_dir, name, url, path, fxsparse, fxtag + ) + ) + submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) + else: + logger.debug( + "Calling submodule_checkout({},{},{})".format(root_dir, name, path) + ) + single_submodule_checkout( + root_dir, + name, + path, + url=url, + tag=fxtag, + force=force, + optional="AlwaysOptional" in requiredlist, + ) + + +def submodules_test(gitmodules, root_dir): + """ + This function tests the git submodules based on the provided parameters. + + It first checks that fxtags are present and in sync with submodule hashes. + Then it ensures that urls are consistent with fxurls (not forks and not ssh) + and that sparse checkout files exist. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + + Returns: + int: The number of test failures. + """ + # First check that fxtags are present and in sync with submodule hashes + testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir) + print("") + # Then make sure that urls are consistant with fxurls (not forks and not ssh) + # and that sparse checkout files exist + for name in gitmodules.sections(): + url = gitmodules.get(name, "url") + fxurl = gitmodules.get(name, "fxDONOTMODIFYurl") + fxsparse = gitmodules.get(name, "fxsparse") + path = gitmodules.get(name, "path") + fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl + url = url[:-4] if url.endswith(".git") else url + if not fxurl or url.lower() != fxurl.lower(): + print(f"{name:>20} url {url} not in sync with required {fxurl}") + testfails += 1 + if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): + print(f"{name:>20} sparse checkout file {fxsparse} not found") + testfails += 1 + return testfails + localmods + needsupdate + + +def main(): + ( + root_dir, + file_name, + fxrequired, + includelist, + excludelist, + force, + action, + ) = commandline_arguments() + # Get a logger for the package + global logger + logger = logging.getLogger(__name__) + + logger.info("action is {}".format(action)) + + if not os.path.isfile(os.path.join(root_dir, file_name)): + file_path = utils.find_upwards(root_dir, file_name) + + if file_path is None: + utils.fatal_error( + "No {} found in {} or any of it's parents".format(file_name, root_dir) + ) + + root_dir = os.path.dirname(file_path) + logger.info( + "root_dir is {} includelist={} excludelist={}".format( + root_dir, includelist, excludelist + ) + ) + gitmodules = GitModules( + logger, + confpath=root_dir, + conffile=file_name, + includelist=includelist, + excludelist=excludelist, + ) + if not gitmodules.sections(): + sys.exit("No submodule components found") + retval = 0 + if action == "update": + submodules_update(gitmodules, root_dir, fxrequired, force) + elif action == "status": + tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) + if tfails + lmods + updates > 0: + print( + f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n" + ) + if lmods > 0: + local_mods_output() + elif action == "test": + retval = submodules_test(gitmodules, root_dir) + else: + utils.fatal_error(f"unrecognized action request {action}") + return retval + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/git_fleximod/gitinterface.py b/git_fleximod/gitinterface.py new file mode 100644 index 0000000000..93ae38ecde --- /dev/null +++ b/git_fleximod/gitinterface.py @@ -0,0 +1,79 @@ +import os +import sys +from . import utils +from pathlib import Path + +class GitInterface: + def __init__(self, repo_path, logger): + logger.debug("Initialize GitInterface for {}".format(repo_path)) + if isinstance(repo_path, str): + self.repo_path = Path(repo_path).resolve() + elif isinstance(repo_path, Path): + self.repo_path = repo_path.resolve() + else: + raise TypeError("repo_path must be a str or Path object") + self.logger = logger + try: + import git + + self._use_module = True + try: + self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo + except git.exc.InvalidGitRepositoryError: + self.git = git + self._init_git_repo() + msg = "Using GitPython interface to git" + except ImportError: + self._use_module = False + if not (self.repo_path / ".git").exists(): + self._init_git_repo() + msg = "Using shell interface to git" + self.logger.info(msg) + + def _git_command(self, operation, *args): + self.logger.info(operation) + if self._use_module and operation != "submodule": + try: + return getattr(self.repo.git, operation)(*args) + except Exception as e: + sys.exit(e) + else: + return ["git", "-C", str(self.repo_path), operation] + list(args) + + def _init_git_repo(self): + if self._use_module: + self.repo = self.git.Repo.init(str(self.repo_path)) + else: + command = ("git", "-C", str(self.repo_path), "init") + utils.execute_subprocess(command) + + # pylint: disable=unused-argument + def git_operation(self, operation, *args, **kwargs): + command = self._git_command(operation, *args) + self.logger.info(command) + if isinstance(command, list): + try: + return utils.execute_subprocess(command, output_to_caller=True) + except Exception as e: + sys.exit(e) + else: + return command + + def config_get_value(self, section, name): + if self._use_module: + config = self.repo.config_reader() + return config.get_value(section, name) + else: + cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}") + output = utils.execute_subprocess(cmd, output_to_caller=True) + return output.strip() + + def config_set_value(self, section, name, value): + if self._use_module: + with self.repo.config_writer() as writer: + writer.set_value(section, name, value) + writer.release() # Ensure changes are saved + else: + cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value) + self.logger.info(cmd) + utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/git_fleximod/gitmodules.py b/git_fleximod/gitmodules.py new file mode 100644 index 0000000000..68c82d066f --- /dev/null +++ b/git_fleximod/gitmodules.py @@ -0,0 +1,97 @@ +import shutil +from pathlib import Path +from configparser import RawConfigParser, ConfigParser +from .lstripreader import LstripReader + + +class GitModules(RawConfigParser): + def __init__( + self, + logger, + confpath=Path.cwd(), + conffile=".gitmodules", + includelist=None, + excludelist=None, + ): + """ + confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory). + conffile: Name of the configuration file (defaults to .gitmodules). + includelist: Optional list of submodules to include. + excludelist: Optional list of submodules to exclude. + """ + self.logger = logger + self.logger.debug( + "Creating a GitModules object {} {} {} {}".format( + confpath, conffile, includelist, excludelist + ) + ) + super().__init__() + self.conf_file = (Path(confpath) / Path(conffile)) + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=conffile) + self.includelist = includelist + self.excludelist = excludelist + self.isdirty = False + + def reload(self): + self.clear() + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file) + + + def set(self, name, option, value): + """ + Sets a configuration value for a specific submodule: + Ensures the appropriate section exists for the submodule. + Calls the parent class's set method to store the value. + """ + self.isdirty = True + self.logger.debug("set called {} {} {}".format(name, option, value)) + section = f'submodule "{name}"' + if not self.has_section(section): + self.add_section(section) + super().set(section, option, str(value)) + + # pylint: disable=redefined-builtin, arguments-differ + def get(self, name, option, raw=False, vars=None, fallback=None): + """ + Retrieves a configuration value for a specific submodule: + Uses the parent class's get method to access the value. + Handles potential errors if the section or option doesn't exist. + """ + self.logger.debug("get called {} {}".format(name, option)) + section = f'submodule "{name}"' + try: + return ConfigParser.get( + self, section, option, raw=raw, vars=vars, fallback=fallback + ) + except ConfigParser.NoOptionError: + return None + + def save(self): + if self.isdirty: + self.logger.info("Writing {}".format(self.conf_file)) + with open(self.conf_file, "w") as fd: + self.write(fd) + self.isdirty = False + + def __del__(self): + self.save() + + def sections(self): + """Strip the submodule part out of section and just use the name""" + self.logger.debug("calling GitModules sections iterator") + names = [] + for section in ConfigParser.sections(self): + name = section[11:-1] + if self.includelist and name not in self.includelist: + continue + if self.excludelist and name in self.excludelist: + continue + names.append(name) + return names + + def items(self, name, raw=False, vars=None): + self.logger.debug("calling GitModules items for {}".format(name)) + section = f'submodule "{name}"' + return ConfigParser.items(section, raw=raw, vars=vars) diff --git a/git_fleximod/lstripreader.py b/git_fleximod/lstripreader.py new file mode 100644 index 0000000000..01d5580ee8 --- /dev/null +++ b/git_fleximod/lstripreader.py @@ -0,0 +1,43 @@ +class LstripReader(object): + "LstripReader formats .gitmodules files to be acceptable for configparser" + + def __init__(self, filename): + with open(filename, "r") as infile: + lines = infile.readlines() + self._lines = list() + self._num_lines = len(lines) + self._index = 0 + for line in lines: + self._lines.append(line.lstrip()) + + def readlines(self): + """Return all the lines from this object's file""" + return self._lines + + def readline(self, size=-1): + """Format and return the next line or raise StopIteration""" + try: + line = self.next() + except StopIteration: + line = "" + + if (size > 0) and (len(line) < size): + return line[0:size] + + return line + + def __iter__(self): + """Begin an iteration""" + self._index = 0 + return self + + def next(self): + """Return the next line or raise StopIteration""" + if self._index >= self._num_lines: + raise StopIteration + + self._index = self._index + 1 + return self._lines[self._index - 1] + + def __next__(self): + return self.next() diff --git a/git_fleximod/metoflexi.py b/git_fleximod/metoflexi.py new file mode 100755 index 0000000000..cc347db2dd --- /dev/null +++ b/git_fleximod/metoflexi.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +from configparser import ConfigParser +import sys +import shutil +from pathlib import Path +import argparse +import logging +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from git_fleximod import utils + +logger = None + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return d + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument('-e', '--externals', nargs='?', + default='Externals.cfg', + help='The externals description filename. ' + 'Default: %(default)s.') + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser + +def commandline_arguments(args=None): + parser = get_parser() + + options = parser.parse_args(args) + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + return( + options.path, + options.gitmodules, + options.externals + ) + +class ExternalRepoTranslator: + """ + Translates external repositories configured in an INI-style externals file. + """ + + def __init__(self, rootpath, gitmodules, externals): + self.rootpath = rootpath + if gitmodules: + self.gitmodules = GitModules(logger, confpath=rootpath) + self.externals = (rootpath / Path(externals)).resolve() + print(f"Translating {self.externals}") + self.git = GitInterface(rootpath, logger) + +# def __del__(self): +# if (self.rootpath / "save.gitignore"): + + + def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol): + """ + Translates a single repository based on configuration details. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + tag (str): The tag to use for the external repository. + url (str): The URL of the external repository. + path (str): The relative path within the main repository for the external repository. + efile (str): The external file or file containing submodules. + hash_ (str): The commit hash to checkout (if applicable). + sparse (str): Boolean indicating whether to use sparse checkout (if applicable). + protocol (str): The protocol to use (e.g., 'git', 'http'). + """ + assert protocol != "svn", "SVN protocol is not currently supported" + print(f"Translating repository {section}") + if efile: + file_path = Path(path) / Path(efile) + newroot = (self.rootpath / file_path).parent.resolve() + if not newroot.exists(): + newroot.mkdir(parents=True) + logger.info("Newroot is {}".format(newroot)) + newt = ExternalRepoTranslator(newroot, ".gitmodules", efile) + newt.translate_repo() + if protocol == "externals_only": + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + else: + newpath = (self.rootpath / Path(path)) + if newpath.exists(): + shutil.rmtree(newpath) + logger.info("Creating directory {}".format(newpath)) + newpath.mkdir(parents=True) + if tag: + logger.info("cloning {}".format(section)) + try: + self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) + except: + self.git.git_operation("clone", url, path) + with utils.pushd(newpath): + ngit = GitInterface(newpath, logger) + ngit.git_operation("checkout", tag) + if hash_: + self.git.git_operation("clone", url, path) + git = GitInterface(newpath, logger) + git.git_operation("fetch", "origin") + git.git_operation("checkout", hash_) + if sparse: + print("setting as sparse submodule {}".format(section)) + sparsefile = (newpath / Path(sparse)) + newfile = (newpath / ".git" / "info" / "sparse-checkout") + print(f"sparsefile {sparsefile} newfile {newfile}") + shutil.copy(sparsefile, newfile) + + logger.info("adding submodule {}".format(section)) + self.gitmodules.save() + self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) + self.git.git_operation("submodule","absorbgitdirs") + self.gitmodules.reload() + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + + + def translate_repo(self): + """ + Translates external repositories defined within an external file. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + external_file (str): The path to the external file containing repository definitions. + """ + econfig = ConfigParser() + econfig.read((self.rootpath / Path(self.externals))) + + for section in econfig.sections(): + if section == "externals_description": + logger.info("skipping section {}".format(section)) + return + logger.info("Translating section {}".format(section)) + tag = econfig.get(section, "tag", raw=False, fallback=None) + url = econfig.get(section, "repo_url", raw=False, fallback=None) + path = econfig.get(section, "local_path", raw=False, fallback=None) + efile = econfig.get(section, "externals", raw=False, fallback=None) + hash_ = econfig.get(section, "hash", raw=False, fallback=None) + sparse = econfig.get(section, "sparse", raw=False, fallback=None) + protocol = econfig.get(section, "protocol", raw=False, fallback=None) + + self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol) + + + +def _main(): + rootpath, gitmodules, externals = commandline_arguments() + global logger + logger = logging.getLogger(__name__) + with utils.pushd(rootpath): + t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals) + logger.info("Translating {}".format(rootpath)) + t.translate_repo() + + +if __name__ == "__main__": + sys.exit(_main()) diff --git a/git_fleximod/utils.py b/git_fleximod/utils.py new file mode 100644 index 0000000000..7cc1de38cc --- /dev/null +++ b/git_fleximod/utils.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python3 +""" +Common public utilities for manic package + +""" + +import logging +import os +import subprocess +import sys +from threading import Timer +from pathlib import Path + +LOCAL_PATH_INDICATOR = "." +# --------------------------------------------------------------------- +# +# functions to massage text for output and other useful utilities +# +# --------------------------------------------------------------------- +from contextlib import contextmanager + + +@contextmanager +def pushd(new_dir): + """context for chdir. usage: with pushd(new_dir)""" + previous_dir = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(previous_dir) + + +def log_process_output(output): + """Log each line of process output at debug level so it can be + filtered if necessary. By default, output is a single string, and + logging.debug(output) will only put log info heading on the first + line. This makes it hard to filter with grep. + + """ + output = output.split("\n") + for line in output: + logging.debug(line) + + +def printlog(msg, **kwargs): + """Wrapper script around print to ensure that everything printed to + the screen also gets logged. + + """ + logging.info(msg) + if kwargs: + print(msg, **kwargs) + else: + print(msg) + sys.stdout.flush() + + +def find_upwards(root_dir, filename): + """Find a file in root dir or any of it's parents""" + d = Path(root_dir) + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.exists(): + return attempt + d = d.parent + return None + + +def last_n_lines(the_string, n_lines, truncation_message=None): + """Returns the last n lines of the given string + + Args: + the_string: str + n_lines: int + truncation_message: str, optional + + Returns a string containing the last n lines of the_string + + If truncation_message is provided, the returned string begins with + the given message if and only if the string is greater than n lines + to begin with. + """ + + lines = the_string.splitlines(True) + if len(lines) <= n_lines: + return_val = the_string + else: + lines_subset = lines[-n_lines:] + str_truncated = "".join(lines_subset) + if truncation_message: + str_truncated = truncation_message + "\n" + str_truncated + return_val = str_truncated + + return return_val + + +def indent_string(the_string, indent_level): + """Indents the given string by a given number of spaces + + Args: + the_string: str + indent_level: int + + Returns a new string that is the same as the_string, except that + each line is indented by 'indent_level' spaces. + + In python3, this can be done with textwrap.indent. + """ + + lines = the_string.splitlines(True) + padding = " " * indent_level + lines_indented = [padding + line for line in lines] + return "".join(lines_indented) + + +# --------------------------------------------------------------------- +# +# error handling +# +# --------------------------------------------------------------------- + + +def fatal_error(message): + """ + Error output function + """ + logging.error(message) + raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) + + +# --------------------------------------------------------------------- +# +# Data conversion / manipulation +# +# --------------------------------------------------------------------- +def str_to_bool(bool_str): + """Convert a sting representation of as boolean into a true boolean. + + Conversion should be case insensitive. + """ + value = None + str_lower = bool_str.lower() + if str_lower in ("true", "t"): + value = True + elif str_lower in ("false", "f"): + value = False + if value is None: + msg = ( + 'ERROR: invalid boolean string value "{0}". ' + 'Must be "true" or "false"'.format(bool_str) + ) + fatal_error(msg) + return value + + +REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"] + + +def is_remote_url(url): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + """ + remote_url = False + for prefix in REMOTE_PREFIXES: + if url.startswith(prefix): + remote_url = True + return remote_url + + +def split_remote_url(url): + """check if the user provided a local file path or a + remote. If remote, try to strip off protocol info. + + """ + remote_url = is_remote_url(url) + if not remote_url: + return url + + for prefix in REMOTE_PREFIXES: + url = url.replace(prefix, "") + + if "@" in url: + url = url.split("@")[1] + + if ":" in url: + url = url.split(":")[1] + + return url + + +def expand_local_url(url, field): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + Note: local paths of LOCAL_PATH_INDICATOR have special meaning and + represent local copy only, don't work with the remotes. + + """ + remote_url = is_remote_url(url) + if not remote_url: + if url.strip() == LOCAL_PATH_INDICATOR: + pass + else: + url = os.path.expandvars(url) + url = os.path.expanduser(url) + if not os.path.isabs(url): + msg = ( + 'WARNING: Externals description for "{0}" contains a ' + "url that is not remote and does not expand to an " + "absolute path. Version control operations may " + "fail.\n\nurl={1}".format(field, url) + ) + printlog(msg) + else: + url = os.path.normpath(url) + return url + + +# --------------------------------------------------------------------- +# +# subprocess +# +# --------------------------------------------------------------------- + +# Give the user a helpful message if we detect that a command seems to +# be hanging. +_HANGING_SEC = 300 + + +def _hanging_msg(working_directory, command): + print( + """ + +Command '{command}' +from directory {working_directory} +has taken {hanging_sec} seconds. It may be hanging. + +The command will continue to run, but you may want to abort +manage_externals with ^C and investigate. A possible cause of hangs is +when svn or git require authentication to access a private +repository. On some systems, svn and git requests for authentication +information will not be displayed to the user. In this case, the program +will appear to hang. Ensure you can run svn and git manually and access +all repositories without entering your authentication information. + +""".format( + command=command, + working_directory=working_directory, + hanging_sec=_HANGING_SEC, + ) + ) + + +def execute_subprocess(commands, status_to_caller=False, output_to_caller=False): + """Wrapper around subprocess.check_output to handle common + exceptions. + + check_output runs a command with arguments and waits + for it to complete. + + check_output raises an exception on a nonzero return code. if + status_to_caller is true, execute_subprocess returns the subprocess + return code, otherwise execute_subprocess treats non-zero return + status as an error and raises an exception. + + """ + cwd = os.getcwd() + msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) + logging.info(msg) + commands_str = " ".join(str(element) for element in commands) + logging.info(commands_str) + return_to_caller = status_to_caller or output_to_caller + status = -1 + output = "" + hanging_timer = Timer( + _HANGING_SEC, + _hanging_msg, + kwargs={"working_directory": cwd, "command": commands_str}, + ) + hanging_timer.start() + try: + output = subprocess.check_output( + commands, stderr=subprocess.STDOUT, universal_newlines=True + ) + log_process_output(output) + status = 0 + except OSError as error: + msg = failed_command_msg( + "Command execution failed. Does the executable exist?", commands + ) + logging.error(error) + fatal_error(msg) + except ValueError as error: + msg = failed_command_msg( + "DEV_ERROR: Invalid arguments trying to run subprocess", commands + ) + logging.error(error) + fatal_error(msg) + except subprocess.CalledProcessError as error: + # Only report the error if we are NOT returning to the + # caller. If we are returning to the caller, then it may be a + # simple status check. If returning, it is the callers + # responsibility determine if an error occurred and handle it + # appropriately. + if not return_to_caller: + msg_context = ( + "Process did not run successfully; " + "returned status {0}".format(error.returncode) + ) + msg = failed_command_msg(msg_context, commands, output=error.output) + logging.error(error) + logging.error(msg) + log_process_output(error.output) + fatal_error(msg) + status = error.returncode + finally: + hanging_timer.cancel() + + if status_to_caller and output_to_caller: + ret_value = (status, output) + elif status_to_caller: + ret_value = status + elif output_to_caller: + ret_value = output + else: + ret_value = None + + return ret_value + + +def failed_command_msg(msg_context, command, output=None): + """Template for consistent error messages from subprocess calls. + + If 'output' is given, it should provide the output from the failed + command + """ + + if output: + output_truncated = last_n_lines( + output, 20, truncation_message="[... Output truncated for brevity ...]" + ) + errmsg = ( + "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: " + ) + else: + errmsg = "" + + command_str = " ".join(command) + errmsg += """In directory + {cwd} +{context}: + {command} +""".format( + cwd=os.getcwd(), context=msg_context, command=command_str + ) + + if output: + errmsg += "See above for output from failed command.\n" + + return errmsg diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000..b59ed3942c --- /dev/null +++ b/poetry.lock @@ -0,0 +1,693 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fsspec" +version = "2023.12.2" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.41" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pyfakefs" +version = "5.3.5" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.3.5-py3-none-any.whl", hash = "sha256:751015c1de94e1390128c82b48cdedc3f088bbdbe4bc713c79d02a27f0f61e69"}, + {file = "pyfakefs-5.3.5.tar.gz", hash = "sha256:7cdc500b35a214cb7a614e1940543acc6650e69a94ac76e30f33c9373bd9cf90"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.0.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, + {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.3.0,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "5.3.0" +description = "Python documentation generator" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..2484552e4f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "git-fleximod" +version = "0.7.4" +description = "Extended support for git-submodule and git-sparse-checkout" +authors = ["Jim Edwards "] +maintainers = ["Jim Edwards "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/jedwards4b/git-fleximod" +keywords = ["git", "submodule", "sparse-checkout"] +packages = [ +{ include = "git_fleximod"}, +{ include = "doc"}, +] + +[tool.poetry.scripts] +git-fleximod = "git_fleximod.git_fleximod:main" +me2flexi = "git_fleximod.metoflexi:_main" +fsspec = "fsspec.fuse:main" + +[tool.poetry.dependencies] +python = "^3.8" +GitPython = "^3.1.0" +sphinx = "^5.0.0" +fsspec = "^2023.12.2" +wheel = "^0.42.0" +pytest = "^8.0.0" +pyfakefs = "^5.3.5" + +[tool.poetry.urls] +"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" + +[tool.pytest.ini_options] +markers = [ + "skip_after_first: only run on first iteration" +] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + diff --git a/tbump.toml b/tbump.toml new file mode 100644 index 0000000000..d4b8eaee11 --- /dev/null +++ b/tbump.toml @@ -0,0 +1,43 @@ +# Uncomment this if your project is hosted on GitHub: +github_url = "https://github.com/jedwards4b/git-fleximod/" + +[version] +current = "0.7.4" + +# Example of a semver regexp. +# Make sure this matches current_version before +# using tbump +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + ''' + +[git] +message_template = "Bump to {new_version}" +tag_template = "v{new_version}" + +# For each file to patch, add a [[file]] config +# section containing the path of the file, relative to the +# tbump.toml location. +[[file]] +src = "git_fleximod/cli.py" + +[[file]] +src = "pyproject.toml" + +# You can specify a list of commands to +# run after the files have been patched +# and before the git commit is made + +# [[before_commit]] +# name = "check changelog" +# cmd = "grep -q {new_version} Changelog.rst" + +# Or run some commands after the git tag and the branch +# have been pushed: +# [[after_push]] +# name = "publish" +# cmd = "./publish.sh" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000..4d4c66c78e --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,3 @@ +import sys, os + +sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src")) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000..942a0efb97 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,138 @@ +import pytest +from git_fleximod.gitinterface import GitInterface +import os +import subprocess +import logging +from pathlib import Path + +@pytest.fixture(scope='session') +def logger(): + logging.basicConfig( + level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()] + ) + logger = logging.getLogger(__name__) + return logger + +all_repos=[ + {"subrepo_path": "modules/test", + "submodule_name": "test_submodule", + "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_submodule at tag MPIserial_2.4.0", + "status3" : "test_submodule at tag MPIserial_2.4.0", + "status4" : "test_submodule at tag MPIserial_2.4.0", + "gitmodules_content" : """ + [submodule "test_submodule"] + path = modules/test + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelRequired +"""}, + {"subrepo_path": "modules/test_optional", + "submodule_name": "test_optional", + "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_optional at tag MPIserial_2.4.0", + "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", + "status4" : "test_optional at tag MPIserial_2.4.0", + "gitmodules_content": """ + [submodule "test_optional"] + path = modules/test_optional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelOptional +"""}, + {"subrepo_path": "modules/test_alwaysoptional", + "submodule_name": "test_alwaysoptional", + "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", + "status2" : "test_alwaysoptional at hash e5cf35c", + "status3" : "test_alwaysoptional not checked out, out of sync at tag MPIserial_2.3.0", + "status4" : "test_alwaysoptional at hash e5cf35c", + "gitmodules_content": """ + [submodule "test_alwaysoptional"] + path = modules/test_alwaysoptional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = e5cf35c + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysOptional +"""}, + {"subrepo_path": "modules/test_sparse", + "submodule_name": "test_sparse", + "status1" : "test_sparse at tag MPIserial_2.5.0", + "status2" : "test_sparse at tag MPIserial_2.5.0", + "status3" : "test_sparse at tag MPIserial_2.5.0", + "status4" : "test_sparse at tag MPIserial_2.5.0", + "gitmodules_content": """ + [submodule "test_sparse"] + path = modules/test_sparse + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.5.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysRequired + fxsparse = ../.sparse_file_list +"""}, +] +@pytest.fixture(params=all_repos) + +def shared_repos(request): + return request.param + +@pytest.fixture +def get_all_repos(): + return all_repos + +def write_sparse_checkout_file(fp): + sparse_content = """m4 +""" + fp.write_text(sparse_content) + +@pytest.fixture +def test_repo(shared_repos, tmp_path, logger): + subrepo_path = shared_repos["subrepo_path"] + submodule_name = shared_repos["submodule_name"] + test_dir = tmp_path / "testrepo" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + assert test_dir.joinpath(".git").is_dir() + (test_dir / "modules").mkdir() + if "sparse" in submodule_name: + (test_dir / subrepo_path).mkdir() + # Add the sparse checkout file + write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list") + gitp.git_operation("add","modules/.sparse_file_list") + else: + gitp = GitInterface(str(test_dir), logger) + gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) + assert test_dir.joinpath(".gitmodules").is_file() + gitp.git_operation("add",subrepo_path) + gitp.git_operation("commit","-a","-m","\"add submod\"") + test_dir2 = tmp_path / "testrepo2" + gitp.git_operation("clone",test_dir,test_dir2) + return test_dir2 + + +@pytest.fixture +def complex_repo(tmp_path, logger): + test_dir = tmp_path / "testcomplex" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") + gitp.git_operation("fetch", "origin", "main") + gitp.git_operation("checkout", "main") + return test_dir + +@pytest.fixture +def git_fleximod(): + def _run_fleximod(path, args, input=None): + cmd = ["git", "fleximod"] + args.split() + result = subprocess.run(cmd, cwd=path, input=input, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True) + if result.returncode: + print(result.stdout) + print(result.stderr) + return result + return _run_fleximod + diff --git a/tests/test_a_import.py b/tests/test_a_import.py new file mode 100644 index 0000000000..d5ca878de5 --- /dev/null +++ b/tests/test_a_import.py @@ -0,0 +1,8 @@ +# pylint: disable=unused-import +from git_fleximod import cli +from git_fleximod import utils +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules + +def test_import(): + print("here") diff --git a/tests/test_b_update.py b/tests/test_b_update.py new file mode 100644 index 0000000000..159f1cfae0 --- /dev/null +++ b/tests/test_b_update.py @@ -0,0 +1,26 @@ +import pytest +from pathlib import Path + +def test_basic_checkout(git_fleximod, test_repo, shared_repos): + # Prepare a simple .gitmodules + gm = shared_repos['gitmodules_content'] + file_path = (test_repo / ".gitmodules") + repo_name = shared_repos["submodule_name"] + repo_path = shared_repos["subrepo_path"] + + file_path.write_text(gm) + + # Run the command + result = git_fleximod(test_repo, f"update {repo_name}") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? + if "sparse" in repo_name: + assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? + assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? + + status = git_fleximod(test_repo, f"status {repo_name}") + + assert shared_repos["status2"] in status.stdout + diff --git a/tests/test_c_required.py b/tests/test_c_required.py new file mode 100644 index 0000000000..89ab8d294d --- /dev/null +++ b/tests/test_c_required.py @@ -0,0 +1,30 @@ +import pytest +from pathlib import Path + +def test_required(git_fleximod, test_repo, shared_repos): + file_path = (test_repo / ".gitmodules") + gm = shared_repos["gitmodules_content"] + repo_name = shared_repos["submodule_name"] + if file_path.exists(): + with file_path.open("r") as f: + gitmodules_content = f.read() + # add the entry if it does not exist + if repo_name not in gitmodules_content: + file_path.write_text(gitmodules_content+gm) + # or if it is incomplete + elif gm not in gitmodules_content: + file_path.write_text(gm) + else: + file_path.write_text(gm) + result = git_fleximod(test_repo, "update") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status3"] in status.stdout + status = git_fleximod(test_repo, f"update --optional") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout + status = git_fleximod(test_repo, f"update {repo_name}") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout diff --git a/tests/test_d_complex.py b/tests/test_d_complex.py new file mode 100644 index 0000000000..fdce516274 --- /dev/null +++ b/tests/test_d_complex.py @@ -0,0 +1,67 @@ +import pytest +from pathlib import Path +from git_fleximod.gitinterface import GitInterface + +def test_complex_checkout(git_fleximod, complex_repo, logger): + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) + assert("Complex not checked out, aligned at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + # This should checkout and update test_submodule and complex_sub + result = git_fleximod(complex_repo, "update") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex") + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + # update a single optional submodule + + result = git_fleximod(complex_repo, "update ToplevelOptional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + + + # Finally update optional + result = git_fleximod(complex_repo, "update --optional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag01" in status.stdout) + assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex" ) + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + From 9c46501e49265ec9b4049a502399b46c659ee197 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 29 May 2024 15:09:36 -0600 Subject: [PATCH 137/161] update hemco external --- .gitmodules | 2 +- components/mizuRoute | 2 +- src/dynamics/fv3 | 2 +- src/hemco | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitmodules b/.gitmodules index 37fba58bd5..de0d660878 100644 --- a/.gitmodules +++ b/.gitmodules @@ -57,7 +57,7 @@ [submodule "hemco"] path = src/hemco url = https://github.com/ESCOMP/HEMCO_CESM.git - fxtag = hemco-cesm1_2_1_hemco3_6_3_cesm + fxtag = hemco-cesm1_2_1_hemco3_6_3_cesm_rme01 fxrequired = AlwaysRequired fxDONOTUSEurl = https://github.com/ESCOMP/HEMCO_CESM.git diff --git a/components/mizuRoute b/components/mizuRoute index 81c720c7ee..c55bd1ab74 160000 --- a/components/mizuRoute +++ b/components/mizuRoute @@ -1 +1 @@ -Subproject commit 81c720c7ee51f9c69f2934f696078c42f4493565 +Subproject commit c55bd1ab746734ea77a00606dde895ab034edf1a diff --git a/src/dynamics/fv3 b/src/dynamics/fv3 index 38b889fcff..e838751297 160000 --- a/src/dynamics/fv3 +++ b/src/dynamics/fv3 @@ -1 +1 @@ -Subproject commit 38b889fcfff1e316ef5b4d4d02ecc47c801b8669 +Subproject commit e838751297b31845d0456b3a2d99fceff4ef5c60 diff --git a/src/hemco b/src/hemco index 83127485c1..3a6d999ab0 160000 --- a/src/hemco +++ b/src/hemco @@ -1 +1 @@ -Subproject commit 83127485c149846ba79d5472ae131acd7bda0544 +Subproject commit 3a6d999ab0dbee9f03ab6b9a13dd3b6d9670eb54 From 4aa073c3482ad30b6952834ccc5eb99778f88c09 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 09:56:18 -0600 Subject: [PATCH 138/161] update find_root_dir to work with git worktree --- git_fleximod/cli.py | 24 +++++++++++++++++------- git_fleximod/git_fleximod.py | 10 +++++++--- 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 1fb959dad0..3eb90870dc 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,17 +1,27 @@ from pathlib import Path import argparse +from git_fleximod import utils __version__ = "0.7.4" -def find_root_dir(filename=".git"): +def find_root_dir(filename=".gitmodules"): + """ finds the highest directory in tree + which contains a file called filename """ d = Path.cwd() root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.is_dir(): - return attempt - d = d.parent - return None + dirlist = [] + dl = d + while dl != root: + dirlist.append(dl) + dl = dl.parent + dirlist.append(root) + dirlist.reverse() + + for dl in dirlist: + attempt = dl / filename + if attempt.is_file(): + return dl + utils.fatal_error("No .gitmodules found in directory tree") def get_parser(): diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 103cc82a50..f080513a52 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -312,7 +312,11 @@ def submodules_status(gitmodules, root_dir, toplevel=False): with utils.pushd(newpath): git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() - ahash = git.git_operation("status").partition("\n")[0].split()[-1] + part = git.git_operation("status").partition("\n")[0] + # fake hash to initialize + ahash = "xxxx" + if part: + ahash = part.split()[-1] if tag and atag == tag: print(f" {name:>20} at tag {tag}") elif tag and ahash[: len(tag)] == tag: @@ -554,8 +558,8 @@ def main(): global logger logger = logging.getLogger(__name__) - logger.info("action is {}".format(action)) - + logger.info("action is {} root_dir={} file_name={}".format(action, root_dir, file_name)) + if not os.path.isfile(os.path.join(root_dir, file_name)): file_path = utils.find_upwards(root_dir, file_name) From 1fddaac99d83852380738c44ca33f615898123ca Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:09:31 -0600 Subject: [PATCH 139/161] return str not Path --- License | 2 +- README.md | 2 -- git_fleximod/cli.py | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/License b/License index 2c6fe768c2..88bc22515e 100644 --- a/License +++ b/License @@ -1,4 +1,4 @@ -Copyright 2024 National Center for Atmospheric Sciences (NCAR) +Copyright 2024 NSF National Center for Atmospheric Sciences (NCAR) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/README.md b/README.md index d1ef632f28..53917da400 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,6 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec ## Installation -#TODO Install using pip: -# pip install git-fleximod If you choose to locate git-fleximod in your path you can access it via command: git fleximod ## Usage diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 3eb90870dc..d24d07c59c 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -20,7 +20,7 @@ def find_root_dir(filename=".gitmodules"): for dl in dirlist: attempt = dl / filename if attempt.is_file(): - return dl + return str(dl) utils.fatal_error("No .gitmodules found in directory tree") From a354b0528228ac89ce103b3b42b2a203fe495ba5 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:14:11 -0600 Subject: [PATCH 140/161] Bump to 0.7.5 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index d24d07c59c..4c3fb1a8f9 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -2,7 +2,7 @@ import argparse from git_fleximod import utils -__version__ = "0.7.4" +__version__ = "0.7.5" def find_root_dir(filename=".gitmodules"): """ finds the highest directory in tree diff --git a/pyproject.toml b/pyproject.toml index 2484552e4f..ac1684ea0f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.7.4" +version = "0.7.5" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index d4b8eaee11..e644fc4d24 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.7.4" +current = "0.7.5" # Example of a semver regexp. # Make sure this matches current_version before From e9b20b57db64c0854dd0dcbff5618f44bbe2d589 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:25:10 -0600 Subject: [PATCH 141/161] Squashed '.lib/git-fleximod/' changes from 94121295..a354b052 a354b052 Bump to 0.7.5 71273de6 Merge pull request #37 from ESMCI/fix/git_workflow 1fddaac9 return str not Path 4aa073c3 update find_root_dir to work with git worktree git-subtree-dir: .lib/git-fleximod git-subtree-split: a354b0528228ac89ce103b3b42b2a203fe495ba5 --- License | 2 +- README.md | 2 -- git_fleximod/cli.py | 26 ++++++++++++++++++-------- git_fleximod/git_fleximod.py | 10 +++++++--- pyproject.toml | 2 +- tbump.toml | 2 +- 6 files changed, 28 insertions(+), 16 deletions(-) diff --git a/License b/License index 2c6fe768c2..88bc22515e 100644 --- a/License +++ b/License @@ -1,4 +1,4 @@ -Copyright 2024 National Center for Atmospheric Sciences (NCAR) +Copyright 2024 NSF National Center for Atmospheric Sciences (NCAR) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the diff --git a/README.md b/README.md index d1ef632f28..53917da400 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,6 @@ Git-fleximod is a Python-based tool that extends Git's submodule and sparse chec ## Installation -#TODO Install using pip: -# pip install git-fleximod If you choose to locate git-fleximod in your path you can access it via command: git fleximod ## Usage diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 1fb959dad0..4c3fb1a8f9 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -1,17 +1,27 @@ from pathlib import Path import argparse +from git_fleximod import utils -__version__ = "0.7.4" +__version__ = "0.7.5" -def find_root_dir(filename=".git"): +def find_root_dir(filename=".gitmodules"): + """ finds the highest directory in tree + which contains a file called filename """ d = Path.cwd() root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.is_dir(): - return attempt - d = d.parent - return None + dirlist = [] + dl = d + while dl != root: + dirlist.append(dl) + dl = dl.parent + dirlist.append(root) + dirlist.reverse() + + for dl in dirlist: + attempt = dl / filename + if attempt.is_file(): + return str(dl) + utils.fatal_error("No .gitmodules found in directory tree") def get_parser(): diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 103cc82a50..f080513a52 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -312,7 +312,11 @@ def submodules_status(gitmodules, root_dir, toplevel=False): with utils.pushd(newpath): git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() - ahash = git.git_operation("status").partition("\n")[0].split()[-1] + part = git.git_operation("status").partition("\n")[0] + # fake hash to initialize + ahash = "xxxx" + if part: + ahash = part.split()[-1] if tag and atag == tag: print(f" {name:>20} at tag {tag}") elif tag and ahash[: len(tag)] == tag: @@ -554,8 +558,8 @@ def main(): global logger logger = logging.getLogger(__name__) - logger.info("action is {}".format(action)) - + logger.info("action is {} root_dir={} file_name={}".format(action, root_dir, file_name)) + if not os.path.isfile(os.path.join(root_dir, file_name)): file_path = utils.find_upwards(root_dir, file_name) diff --git a/pyproject.toml b/pyproject.toml index 2484552e4f..ac1684ea0f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.7.4" +version = "0.7.5" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index d4b8eaee11..e644fc4d24 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.7.4" +current = "0.7.5" # Example of a semver regexp. # Make sure this matches current_version before From 0da3e6394140b520ef973d1b8f786187c6a751b2 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:29:20 -0600 Subject: [PATCH 142/161] update fv3 interface --- .gitmodules | 2 +- src/dynamics/fv3 | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 37fba58bd5..51710c0156 100644 --- a/.gitmodules +++ b/.gitmodules @@ -44,7 +44,7 @@ path = src/dynamics/fv3 url = https://github.com/ESCOMP/CAM_FV3_interface.git fxrequired = AlwaysRequired - fxtag = fv3int_022824 + fxtag = fv3int_053124 fxDONOTUSEurl = https://github.com/ESCOMP/CAM_FV3_interface.git [submodule "geoschem"] diff --git a/src/dynamics/fv3 b/src/dynamics/fv3 index 38b889fcff..df3550b0f6 160000 --- a/src/dynamics/fv3 +++ b/src/dynamics/fv3 @@ -1 +1 @@ -Subproject commit 38b889fcfff1e316ef5b4d4d02ecc47c801b8669 +Subproject commit df3550b0f6a835778f32ccc8c6291942e0413f62 From 6f1f86b428082e31da2bc6f6f50593a1c160052a Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:31:50 -0600 Subject: [PATCH 143/161] update externals --- components/mizuRoute | 2 +- src/hemco | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/components/mizuRoute b/components/mizuRoute index c55bd1ab74..81c720c7ee 160000 --- a/components/mizuRoute +++ b/components/mizuRoute @@ -1 +1 @@ -Subproject commit c55bd1ab746734ea77a00606dde895ab034edf1a +Subproject commit 81c720c7ee51f9c69f2934f696078c42f4493565 diff --git a/src/hemco b/src/hemco index 3a6d999ab0..83127485c1 160000 --- a/src/hemco +++ b/src/hemco @@ -1 +1 @@ -Subproject commit 3a6d999ab0dbee9f03ab6b9a13dd3b6d9670eb54 +Subproject commit 83127485c149846ba79d5472ae131acd7bda0544 From 87a14836dfbaf467c1595b34fadcad3de9a590d9 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Fri, 31 May 2024 10:33:50 -0600 Subject: [PATCH 144/161] update ctsm external --- components/clm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/clm b/components/clm index 5cdf117265..a7f62dab98 160000 --- a/components/clm +++ b/components/clm @@ -1 +1 @@ -Subproject commit 5cdf117265bb457ed64af741be0caed593a3a82b +Subproject commit a7f62dab9830cf0fc6246ce75afff3a3412f1aab From 00b7b38174360d20b9c2bdeeb8d09f1ac0656fca Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Sat, 1 Jun 2024 11:41:49 -0600 Subject: [PATCH 145/161] some fixes when modifying url --- git_fleximod/git_fleximod.py | 49 ++++++++++++++++++++---------------- git_fleximod/gitmodules.py | 2 +- 2 files changed, 28 insertions(+), 23 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index f080513a52..a003c0da8c 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -263,6 +263,21 @@ def single_submodule_checkout( return +def add_remote(git, url): + remotes = git.git_operation("remote", "-v") + newremote = "newremote.00" + if url in remotes: + for line in remotes: + if url in line and "fetch" in line: + newremote = line.split()[0] + break + else: + i = 0 + while "newremote" in remotes: + i = i + 1 + newremote = f"newremote.{i:02d}" + git.git_operation("remote", "add", newremote, url) + return newremote def submodules_status(gitmodules, root_dir, toplevel=False): testfails = 0 @@ -271,6 +286,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): for name in gitmodules.sections(): path = gitmodules.get(name, "path") tag = gitmodules.get(name, "fxtag") + url = gitmodules.get(name, "url") required = gitmodules.get(name, "fxrequired") level = required and "Toplevel" in required if not path: @@ -280,7 +296,6 @@ def submodules_status(gitmodules, root_dir, toplevel=False): if not os.path.exists(os.path.join(newpath, ".git")): rootgit = GitInterface(root_dir, logger) # submodule commands use path, not name - url = gitmodules.get(name, "url") url = url.replace("git@github.com:", "https://github.com/") tags = rootgit.git_operation("ls-remote", "--tags", url) atag = None @@ -312,11 +327,11 @@ def submodules_status(gitmodules, root_dir, toplevel=False): with utils.pushd(newpath): git = GitInterface(newpath, logger) atag = git.git_operation("describe", "--tags", "--always").rstrip() - part = git.git_operation("status").partition("\n")[0] - # fake hash to initialize - ahash = "xxxx" - if part: - ahash = part.split()[-1] + ahash = git.git_operation("rev-list", "HEAD").partition("\n")[0] + rurl = git.git_operation("ls-remote","--get-url").rstrip() + if rurl != url: + remote = add_remote(git, url) + git.git_operation("fetch", remote) if tag and atag == tag: print(f" {name:>20} at tag {tag}") elif tag and ahash[: len(tag)] == tag: @@ -335,7 +350,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): ) testfails += 1 - status = git.git_operation("status", "--ignore-submodules") + status = git.git_operation("status", "--ignore-submodules", "-uno") if "nothing to commit" not in status: localmods = localmods + 1 print("M" + textwrap.indent(status, " ")) @@ -357,11 +372,11 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): path = gitmodules.get(name, "path") url = gitmodules.get(name, "url") logger.info( - "name={} path={} url={} fxtag={} requiredlist={}".format( + "name={} path={} url={} fxtag={} requiredlist={} ".format( name, os.path.join(root_dir, path), url, fxtag, requiredlist ) ) - # if not os.path.exists(os.path.join(root_dir,path, ".git")): + fxrequired = gitmodules.get(name, "fxrequired") assert fxrequired in fxrequired_allowed_values() rgit = GitInterface(root_dir, logger) @@ -409,19 +424,7 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): upstream = git.git_operation("ls-remote", "--get-url").rstrip() newremote = "origin" if upstream != url: - # TODO - this needs to be a unique name - remotes = git.git_operation("remote", "-v") - if url in remotes: - for line in remotes: - if url in line and "fetch" in line: - newremote = line.split()[0] - break - else: - i = 0 - while newremote in remotes: - i = i + 1 - newremote = f"newremote.{i:02d}" - git.git_operation("remote", "add", newremote, url) + add_remote(git, url) tags = git.git_operation("tag", "-l") if fxtag and fxtag not in tags: @@ -439,6 +442,8 @@ def submodules_update(gitmodules, root_dir, requiredlist, force): print(f"{name:>20} up to date.") + + def local_mods_output(): text = """\ The submodules labeled with 'M' above are not in a clean state. diff --git a/git_fleximod/gitmodules.py b/git_fleximod/gitmodules.py index 68c82d066f..7e4e05394a 100644 --- a/git_fleximod/gitmodules.py +++ b/git_fleximod/gitmodules.py @@ -59,7 +59,7 @@ def get(self, name, option, raw=False, vars=None, fallback=None): Uses the parent class's get method to access the value. Handles potential errors if the section or option doesn't exist. """ - self.logger.debug("get called {} {}".format(name, option)) + self.logger.debug("git get called {} {}".format(name, option)) section = f'submodule "{name}"' try: return ConfigParser.get( From a34070be02d10071b544be3839f6c9ed79662665 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Sat, 1 Jun 2024 11:46:24 -0600 Subject: [PATCH 146/161] Bump to 0.7.6 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 4c3fb1a8f9..7d09abd83b 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -2,7 +2,7 @@ import argparse from git_fleximod import utils -__version__ = "0.7.5" +__version__ = "0.7.6" def find_root_dir(filename=".gitmodules"): """ finds the highest directory in tree diff --git a/pyproject.toml b/pyproject.toml index ac1684ea0f..3c70c6c9c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.7.5" +version = "0.7.6" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index e644fc4d24..3854682df4 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.7.5" +current = "0.7.6" # Example of a semver regexp. # Make sure this matches current_version before From 5ceadc9e923b337e206e03d73a35b040d908e75a Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Sat, 1 Jun 2024 12:01:40 -0600 Subject: [PATCH 147/161] remove git-fleximod --- .../git-fleximod/.github/workflows/pre-commit | 13 - .../.github/workflows/pytest.yaml | 77 -- .lib/git-fleximod/.pre-commit-config.yaml | 18 - .lib/git-fleximod/License | 20 - .lib/git-fleximod/README.md | 108 --- .lib/git-fleximod/doc/Makefile | 20 - .lib/git-fleximod/doc/conf.py | 26 - .lib/git-fleximod/doc/index.rst | 24 - .lib/git-fleximod/doc/make.bat | 35 - .lib/git-fleximod/escomp_install | 25 - .lib/git-fleximod/git_fleximod/__init__.py | 0 .lib/git-fleximod/git_fleximod/cli.py | 129 ---- .../git-fleximod/git_fleximod/git_fleximod.py | 605 --------------- .../git-fleximod/git_fleximod/gitinterface.py | 79 -- .lib/git-fleximod/git_fleximod/gitmodules.py | 97 --- .../git-fleximod/git_fleximod/lstripreader.py | 43 -- .lib/git-fleximod/git_fleximod/metoflexi.py | 236 ------ .lib/git-fleximod/git_fleximod/utils.py | 365 --------- .lib/git-fleximod/poetry.lock | 693 ------------------ .lib/git-fleximod/pyproject.toml | 41 -- .lib/git-fleximod/tbump.toml | 43 -- .lib/git-fleximod/tests/__init__.py | 3 - .lib/git-fleximod/tests/conftest.py | 138 ---- .lib/git-fleximod/tests/test_a_import.py | 8 - .lib/git-fleximod/tests/test_b_update.py | 26 - .lib/git-fleximod/tests/test_c_required.py | 30 - .lib/git-fleximod/tests/test_d_complex.py | 67 -- 27 files changed, 2969 deletions(-) delete mode 100644 .lib/git-fleximod/.github/workflows/pre-commit delete mode 100644 .lib/git-fleximod/.github/workflows/pytest.yaml delete mode 100644 .lib/git-fleximod/.pre-commit-config.yaml delete mode 100644 .lib/git-fleximod/License delete mode 100644 .lib/git-fleximod/README.md delete mode 100644 .lib/git-fleximod/doc/Makefile delete mode 100644 .lib/git-fleximod/doc/conf.py delete mode 100644 .lib/git-fleximod/doc/index.rst delete mode 100644 .lib/git-fleximod/doc/make.bat delete mode 100644 .lib/git-fleximod/escomp_install delete mode 100644 .lib/git-fleximod/git_fleximod/__init__.py delete mode 100644 .lib/git-fleximod/git_fleximod/cli.py delete mode 100755 .lib/git-fleximod/git_fleximod/git_fleximod.py delete mode 100644 .lib/git-fleximod/git_fleximod/gitinterface.py delete mode 100644 .lib/git-fleximod/git_fleximod/gitmodules.py delete mode 100644 .lib/git-fleximod/git_fleximod/lstripreader.py delete mode 100755 .lib/git-fleximod/git_fleximod/metoflexi.py delete mode 100644 .lib/git-fleximod/git_fleximod/utils.py delete mode 100644 .lib/git-fleximod/poetry.lock delete mode 100644 .lib/git-fleximod/pyproject.toml delete mode 100644 .lib/git-fleximod/tbump.toml delete mode 100644 .lib/git-fleximod/tests/__init__.py delete mode 100644 .lib/git-fleximod/tests/conftest.py delete mode 100644 .lib/git-fleximod/tests/test_a_import.py delete mode 100644 .lib/git-fleximod/tests/test_b_update.py delete mode 100644 .lib/git-fleximod/tests/test_c_required.py delete mode 100644 .lib/git-fleximod/tests/test_d_complex.py diff --git a/.lib/git-fleximod/.github/workflows/pre-commit b/.lib/git-fleximod/.github/workflows/pre-commit deleted file mode 100644 index 1a6ad0082a..0000000000 --- a/.lib/git-fleximod/.github/workflows/pre-commit +++ /dev/null @@ -1,13 +0,0 @@ -name: pre-commit -on: - pull_request: - push: - branches: [main] - -jobs: - pre-commit: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 - - uses: pre-commit/action@v3.0.0 diff --git a/.lib/git-fleximod/.github/workflows/pytest.yaml b/.lib/git-fleximod/.github/workflows/pytest.yaml deleted file mode 100644 index 0868dd9a33..0000000000 --- a/.lib/git-fleximod/.github/workflows/pytest.yaml +++ /dev/null @@ -1,77 +0,0 @@ -# Run this job on pushes to `main`, and for pull requests. If you don't specify -# `branches: [main], then this actions runs _twice_ on pull requests, which is -# annoying. - -on: - push: - branches: [main] - pull_request: - branches: [main] - -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and - # reference the matrixe python version here. - - uses: actions/setup-python@v5 - with: - python-version: '3.9' - - # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow - # from installing Poetry every time, which can be slow. Note the use of the Poetry version - # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache - # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be - # mildly cleaner by using an environment variable, but I don't really care. - - name: cache poetry install - uses: actions/cache@v4 - with: - path: ~/.local - key: poetry-1.7.1 - - # Install Poetry. You could do this manually, or there are several actions that do this. - # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to - # Poetry's default install script, which feels correct. I pin the Poetry version here - # because Poetry does occasionally change APIs between versions and I don't want my - # actions to break if it does. - # - # The key configuration value here is `virtualenvs-in-project: true`: this creates the - # venv as a `.venv` in your testing directory, which allows the next step to easily - # cache it. - - uses: snok/install-poetry@v1 - with: - version: 1.7.1 - virtualenvs-create: true - virtualenvs-in-project: true - - # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache - # key: if you're using multiple Python versions, or multiple OSes, you'd need to include - # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock. - - name: cache deps - id: cache-deps - uses: actions/cache@v4 - with: - path: .venv - key: pydeps-${{ hashFiles('**/poetry.lock') }} - - # Install dependencies. `--no-root` means "install all dependencies but not the project - # itself", which is what you want to avoid caching _your_ code. The `if` statement - # ensures this only runs on a cache miss. - - run: poetry install --no-interaction --no-root - if: steps.cache-deps.outputs.cache-hit != 'true' - - # Now install _your_ project. This isn't necessary for many types of projects -- particularly - # things like Django apps don't need this. But it's a good idea since it fully-exercises the - # pyproject.toml and makes that if you add things like console-scripts at some point that - # they'll be installed and working. - - run: poetry install --no-interaction - - # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` - # so this line is super-simple. But it could be as complex as you need. - - run: | - git config --global user.name "${GITHUB_ACTOR}" - git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" - poetry run pytest - diff --git a/.lib/git-fleximod/.pre-commit-config.yaml b/.lib/git-fleximod/.pre-commit-config.yaml deleted file mode 100644 index 2f6089da72..0000000000 --- a/.lib/git-fleximod/.pre-commit-config.yaml +++ /dev/null @@ -1,18 +0,0 @@ -exclude: ^utils/.*$ - -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.0.1 - hooks: - - id: end-of-file-fixer - - id: trailing-whitespace - - repo: https://github.com/psf/black - rev: 22.3.0 - hooks: - - id: black - - repo: https://github.com/PyCQA/pylint - rev: v2.11.1 - hooks: - - id: pylint - args: - - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member diff --git a/.lib/git-fleximod/License b/.lib/git-fleximod/License deleted file mode 100644 index 88bc22515e..0000000000 --- a/.lib/git-fleximod/License +++ /dev/null @@ -1,20 +0,0 @@ -Copyright 2024 NSF National Center for Atmospheric Sciences (NCAR) - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -“Softwareâ€), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS ISâ€, WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.lib/git-fleximod/README.md b/.lib/git-fleximod/README.md deleted file mode 100644 index 53917da400..0000000000 --- a/.lib/git-fleximod/README.md +++ /dev/null @@ -1,108 +0,0 @@ -# git-fleximod - -Flexible, Enhanced Submodule Management for Git - -## Overview - -Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way. - -## Installation - - If you choose to locate git-fleximod in your path you can access it via command: git fleximod - -## Usage - - Basic Usage: - git fleximod [options] - Available Commands: - status: Display the status of submodules. - update: Update submodules to the tag indicated in .gitmodules variable fxtag. - test: Make sure that fxtags and submodule hashes are consistant, - make sure that official urls (as defined by fxDONOTUSEurl) are set - make sure that fxtags are defined for all submodules - Additional Options: - See git fleximod --help for more details. - -## Supported .gitmodules Variables - - fxtag: Specify a specific tag or branch to checkout for a submodule. - fxrequired: Mark a submodule's checkout behavior, with allowed values: - - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module). - - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). - - AlwaysRequired: Always required (always checked out). - - AlwaysOptional: Always optional (checked out with --optional flag). - fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. - fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks - **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be - changed by users. Use the url variable to change to a fork if desired. - -## Sparse Checkouts - - To enable sparse checkout for a submodule, set the fxsparse variable - in the .gitmodules file to the path of a file containing the desired - sparse checkout paths. Git-fleximod will automatically configure - sparse checkout based on this file when applicable commands are run. - See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) - for details on the format of this file. - -## Tests - - The git fleximod test action is designed to be used by, for example, github workflows - to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags - -## Examples - -Here are some common usage examples: - -Update all submodules, including optional ones: -```bash - git fleximod update --optional -``` - -Updating a specific submodule to the fxtag indicated in .gitmodules: - -```bash - git fleximod update submodule-name -``` -Example .gitmodules entry: -```ini, toml - [submodule "cosp2"] - path = src/physics/cosp2/src - url = https://github.com/CFMIP/COSPv2.0 - fxsparse = ../.cosp_sparse_checkout - fxrequired = AlwaysRequired - fxtag = v2.1.4cesm -``` -Explanation: - -This entry indicates that the submodule named cosp2 at tag v2.1.4cesm -should be checked out into the directory src/physics/cosp2/src -relative to the .gitmodules directory. It should be checked out from -the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as -described in the file ../.cosp_sparse_checkout relative to the path -directory. It should be checked out anytime this .gitmodules entry is -read. - -Additional example: -```ini, toml - [submodule "cime"] - path = cime - url = https://github.com/jedwards4b/cime - fxrequired = ToplevelRequired - fxtag = cime6.0.198_rme01 -``` - -Explanation: - -This entry indicates that the submodule cime should be checked out -into a directory named cime at tag cime6.0.198_rme01 from the URL -https://github.com/jedwards4b/cime. This should only be done if -the .gitmodules file is at the top level of the repository clone. - -## Contributing - -We welcome contributions! Please see the CONTRIBUTING.md file for guidelines. - -## License - -Git-fleximod is released under the MIT License. diff --git a/.lib/git-fleximod/doc/Makefile b/.lib/git-fleximod/doc/Makefile deleted file mode 100644 index d4bb2cbb9e..0000000000 --- a/.lib/git-fleximod/doc/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line, and also -# from the environment for the first two. -SPHINXOPTS ?= -SPHINXBUILD ?= sphinx-build -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/.lib/git-fleximod/doc/conf.py b/.lib/git-fleximod/doc/conf.py deleted file mode 100644 index 423099eec9..0000000000 --- a/.lib/git-fleximod/doc/conf.py +++ /dev/null @@ -1,26 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# For the full list of built-in configuration values, see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Project information ----------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information - -project = "git-fleximod" -author = "Jim Edwards " -release = "0.4.0" - -# -- General configuration --------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration - -extensions = ["sphinx_argparse_cli"] - -templates_path = ["_templates"] -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - - -# -- Options for HTML output ------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output - -html_theme = "alabaster" -html_static_path = ["_static"] diff --git a/.lib/git-fleximod/doc/index.rst b/.lib/git-fleximod/doc/index.rst deleted file mode 100644 index 0f9c1a7f7e..0000000000 --- a/.lib/git-fleximod/doc/index.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. git-fleximod documentation master file, created by - sphinx-quickstart on Sat Feb 3 12:02:22 2024. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to git-fleximod's documentation! -======================================== - -.. toctree:: - :maxdepth: 2 - :caption: Contents: -.. module:: sphinxcontrib.autoprogram -.. sphinx_argparse_cli:: - :module: git_fleximod.cli - :func: get_parser - :prog: git-fleximod - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/.lib/git-fleximod/doc/make.bat b/.lib/git-fleximod/doc/make.bat deleted file mode 100644 index 32bb24529f..0000000000 --- a/.lib/git-fleximod/doc/make.bat +++ /dev/null @@ -1,35 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.https://www.sphinx-doc.org/ - exit /b 1 -) - -if "%1" == "" goto help - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd diff --git a/.lib/git-fleximod/escomp_install b/.lib/git-fleximod/escomp_install deleted file mode 100644 index ae782e72a4..0000000000 --- a/.lib/git-fleximod/escomp_install +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python -# updates git-fleximod in an ESCOMP model -# this script should be run from the model root directory, it expects -# git-fleximod to already be installed with the script in bin -# and the classes in lib/python/site-packages -import sys -import shutil -import os - -from glob import iglob - -fleximod_root = sys.argv[1] -fleximod_path = os.path.join(fleximod_root,"src","git-fleximod") -if os.path.isfile(fleximod_path): - with open(fleximod_path,"r") as f: - fleximod = f.readlines() - with open(os.path.join(".","bin","git-fleximod"),"w") as f: - for line in fleximod: - f.write(line) - if "import argparse" in line: - f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n') - - for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")): - shutil.copy(file, - os.path.join("lib","python","site-packages","fleximod",os.path.basename(file))) diff --git a/.lib/git-fleximod/git_fleximod/__init__.py b/.lib/git-fleximod/git_fleximod/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/.lib/git-fleximod/git_fleximod/cli.py b/.lib/git-fleximod/git_fleximod/cli.py deleted file mode 100644 index 4c3fb1a8f9..0000000000 --- a/.lib/git-fleximod/git_fleximod/cli.py +++ /dev/null @@ -1,129 +0,0 @@ -from pathlib import Path -import argparse -from git_fleximod import utils - -__version__ = "0.7.5" - -def find_root_dir(filename=".gitmodules"): - """ finds the highest directory in tree - which contains a file called filename """ - d = Path.cwd() - root = Path(d.root) - dirlist = [] - dl = d - while dl != root: - dirlist.append(dl) - dl = dl.parent - dirlist.append(root) - dirlist.reverse() - - for dl in dirlist: - attempt = dl / filename - if attempt.is_file(): - return str(dl) - utils.fatal_error("No .gitmodules found in directory tree") - - -def get_parser(): - description = """ - %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models - """ - parser = argparse.ArgumentParser( - description=description, formatter_class=argparse.RawDescriptionHelpFormatter - ) - - # - # user options - # - choices = ["update", "status", "test"] - parser.add_argument( - "action", - choices=choices, - default="update", - help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", - ) - - parser.add_argument( - "components", - nargs="*", - help="Specific component(s) to checkout. By default, " - "all required submodules are checked out.", - ) - - parser.add_argument( - "-C", - "--path", - default=find_root_dir(), - help="Toplevel repository directory. Defaults to top git directory relative to current.", - ) - - parser.add_argument( - "-g", - "--gitmodules", - nargs="?", - default=".gitmodules", - help="The submodule description filename. " "Default: %(default)s.", - ) - - parser.add_argument( - "-x", - "--exclude", - nargs="*", - help="Component(s) listed in the gitmodules file which should be ignored.", - ) - parser.add_argument( - "-f", - "--force", - action="store_true", - default=False, - help="Override cautions and update or checkout over locally modified repository.", - ) - - parser.add_argument( - "-o", - "--optional", - action="store_true", - default=False, - help="By default only the required submodules " - "are checked out. This flag will also checkout the " - "optional submodules relative to the toplevel directory.", - ) - - parser.add_argument( - "-v", - "--verbose", - action="count", - default=0, - help="Output additional information to " - "the screen and log file. This flag can be " - "used up to two times, increasing the " - "verbosity level each time.", - ) - - parser.add_argument( - "-V", - "--version", - action="version", - version=f"%(prog)s {__version__}", - help="Print version and exit.", - ) - - # - # developer options - # - parser.add_argument( - "--backtrace", - action="store_true", - help="DEVELOPER: show exception backtraces as extra " "debugging output", - ) - - parser.add_argument( - "-d", - "--debug", - action="store_true", - default=False, - help="DEVELOPER: output additional debugging " - "information to the screen and log file.", - ) - - return parser diff --git a/.lib/git-fleximod/git_fleximod/git_fleximod.py b/.lib/git-fleximod/git_fleximod/git_fleximod.py deleted file mode 100755 index f080513a52..0000000000 --- a/.lib/git-fleximod/git_fleximod/git_fleximod.py +++ /dev/null @@ -1,605 +0,0 @@ -#!/usr/bin/env python -import sys - -MIN_PYTHON = (3, 7) -if sys.version_info < MIN_PYTHON: - sys.exit("Python %s.%s or later is required." % MIN_PYTHON) - -import os -import shutil -import logging -import textwrap -from git_fleximod import utils -from git_fleximod import cli -from git_fleximod.gitinterface import GitInterface -from git_fleximod.gitmodules import GitModules -from configparser import NoOptionError - -# logger variable is global -logger = None - - -def fxrequired_allowed_values(): - return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional"] - - -def commandline_arguments(args=None): - parser = cli.get_parser() - - if args: - options = parser.parse_args(args) - else: - options = parser.parse_args() - - # explicitly listing a component overrides the optional flag - if options.optional or options.components: - fxrequired = [ - "ToplevelRequired", - "ToplevelOptional", - "AlwaysRequired", - "AlwaysOptional", - ] - else: - fxrequired = ["ToplevelRequired", "AlwaysRequired"] - - action = options.action - if not action: - action = "update" - handlers = [logging.StreamHandler()] - - if options.debug: - try: - open("fleximod.log", "w") - except PermissionError: - sys.exit("ABORT: Could not write file fleximod.log") - level = logging.DEBUG - handlers.append(logging.FileHandler("fleximod.log")) - elif options.verbose: - level = logging.INFO - else: - level = logging.WARNING - # Configure the root logger - logging.basicConfig( - level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers - ) - - if hasattr(options, "version"): - exit() - - return ( - options.path, - options.gitmodules, - fxrequired, - options.components, - options.exclude, - options.force, - action, - ) - - -def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): - """ - This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq - in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. - Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important - because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time - and disk space consuming. - - Parameters: - root_dir (str): The root directory for the git operation. - name (str): The name of the submodule. - url (str): The URL of the submodule. - path (str): The path to the submodule. - sparsefile (str): The sparse file for the submodule. - tag (str, optional): The tag to checkout. Defaults to "master". - - Returns: - None - """ - logger.info("Called sparse_checkout for {}".format(name)) - rgit = GitInterface(root_dir, logger) - superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") - if superroot: - gitroot = superroot.strip() - else: - gitroot = root_dir.strip() - assert os.path.isdir(os.path.join(gitroot, ".git")) - # first create the module directory - if not os.path.isdir(os.path.join(root_dir, path)): - os.makedirs(os.path.join(root_dir, path)) - - # initialize a new git repo and set the sparse checkout flag - sprep_repo = os.path.join(root_dir, path) - sprepo_git = GitInterface(sprep_repo, logger) - if os.path.exists(os.path.join(sprep_repo, ".git")): - try: - logger.info("Submodule {} found".format(name)) - chk = sprepo_git.config_get_value("core", "sparseCheckout") - if chk == "true": - logger.info("Sparse submodule {} already checked out".format(name)) - return - except NoOptionError: - logger.debug("Sparse submodule {} not present".format(name)) - except Exception as e: - utils.fatal_error("Unexpected error {} occured.".format(e)) - - sprepo_git.config_set_value("core", "sparseCheckout", "true") - - # set the repository remote - - logger.info("Setting remote origin in {}/{}".format(root_dir, path)) - status = sprepo_git.git_operation("remote", "-v") - if url not in status: - sprepo_git.git_operation("remote", "add", "origin", url) - - topgit = os.path.join(gitroot, ".git") - - if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): - with open(os.path.join(root_dir, ".git")) as f: - gitpath = os.path.relpath( - os.path.join(root_dir, f.read().split()[1]), - start=os.path.join(root_dir, path), - ) - topgit = os.path.join(gitpath, "modules") - else: - topgit = os.path.relpath( - os.path.join(root_dir, ".git", "modules"), - start=os.path.join(root_dir, path), - ) - - with utils.pushd(sprep_repo): - if not os.path.isdir(topgit): - os.makedirs(topgit) - topgit += os.sep + name - - if os.path.isdir(os.path.join(root_dir, path, ".git")): - with utils.pushd(sprep_repo): - shutil.move(".git", topgit) - with open(".git", "w") as f: - f.write("gitdir: " + os.path.relpath(topgit)) - # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) - gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) - if os.path.isfile(gitsparse): - logger.warning( - "submodule {} is already initialized {}".format(name, topgit) - ) - return - - with utils.pushd(sprep_repo): - shutil.copy(sparsefile, gitsparse) - - # Finally checkout the repo - sprepo_git.git_operation("fetch", "origin", "--tags") - sprepo_git.git_operation("checkout", tag) - - print(f"Successfully checked out {name:>20} at {tag}") - rgit.config_set_value(f'submodule "{name}"', "active", "true") - rgit.config_set_value(f'submodule "{name}"', "url", url) - - -def single_submodule_checkout( - root, name, path, url=None, tag=None, force=False, optional=False -): - """ - This function checks out a single git submodule. - - Parameters: - root (str): The root directory for the git operation. - name (str): The name of the submodule. - path (str): The path to the submodule. - url (str, optional): The URL of the submodule. Defaults to None. - tag (str, optional): The tag to checkout. Defaults to None. - force (bool, optional): If set to True, forces the checkout operation. Defaults to False. - optional (bool, optional): If set to True, the submodule is considered optional. Defaults to False. - - Returns: - None - """ - # function implementation... - git = GitInterface(root, logger) - repodir = os.path.join(root, path) - logger.info("Checkout {} into {}/{}".format(name, root, path)) - # if url is provided update to the new url - tmpurl = None - repo_exists = False - if os.path.exists(os.path.join(repodir, ".git")): - logger.info("Submodule {} already checked out".format(name)) - repo_exists = True - # Look for a .gitmodules file in the newly checkedout repo - if not repo_exists and url: - # ssh urls cause problems for those who dont have git accounts with ssh keys defined - # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was - # opened with a GitModules object we don't need to worry about restoring the file here - # it will be done by the GitModules class - if url.startswith("git@"): - tmpurl = url - url = url.replace("git@github.com:", "https://github.com/") - git.git_operation("clone", url, path) - smgit = GitInterface(repodir, logger) - if not tag: - tag = smgit.git_operation("describe", "--tags", "--always").rstrip() - smgit.git_operation("checkout", tag) - # Now need to move the .git dir to the submodule location - rootdotgit = os.path.join(root, ".git") - if os.path.isfile(rootdotgit): - with open(rootdotgit) as f: - line = f.readline() - if line.startswith("gitdir: "): - rootdotgit = line[8:].rstrip() - - newpath = os.path.abspath(os.path.join(root, rootdotgit, "modules", name)) - if os.path.exists(newpath): - shutil.rmtree(os.path.join(repodir, ".git")) - else: - shutil.move(os.path.join(repodir, ".git"), newpath) - - with open(os.path.join(repodir, ".git"), "w") as f: - f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) - - if not os.path.exists(repodir): - parent = os.path.dirname(repodir) - if not os.path.isdir(parent): - os.makedirs(parent) - git.git_operation("submodule", "add", "--name", name, "--", url, path) - - if not repo_exists or not tmpurl: - git.git_operation("submodule", "update", "--init", "--", path) - - if os.path.exists(os.path.join(repodir, ".gitmodules")): - # recursively handle this checkout - print(f"Recursively checking out submodules of {name}") - gitmodules = GitModules(logger, confpath=repodir) - requiredlist = ["AlwaysRequired"] - if optional: - requiredlist.append("AlwaysOptional") - submodules_checkout(gitmodules, repodir, requiredlist, force=force) - if not os.path.exists(os.path.join(repodir, ".git")): - utils.fatal_error( - f"Failed to checkout {name} {repo_exists} {tmpurl} {repodir} {path}" - ) - - if tmpurl: - print(git.git_operation("restore", ".gitmodules")) - - return - - -def submodules_status(gitmodules, root_dir, toplevel=False): - testfails = 0 - localmods = 0 - needsupdate = 0 - for name in gitmodules.sections(): - path = gitmodules.get(name, "path") - tag = gitmodules.get(name, "fxtag") - required = gitmodules.get(name, "fxrequired") - level = required and "Toplevel" in required - if not path: - utils.fatal_error("No path found in .gitmodules for {}".format(name)) - newpath = os.path.join(root_dir, path) - logger.debug("newpath is {}".format(newpath)) - if not os.path.exists(os.path.join(newpath, ".git")): - rootgit = GitInterface(root_dir, logger) - # submodule commands use path, not name - url = gitmodules.get(name, "url") - url = url.replace("git@github.com:", "https://github.com/") - tags = rootgit.git_operation("ls-remote", "--tags", url) - atag = None - needsupdate += 1 - if not toplevel and level: - continue - for htag in tags.split("\n"): - if tag and tag in htag: - atag = (htag.split()[1])[10:] - break - if tag and tag == atag: - print(f"e {name:>20} not checked out, aligned at tag {tag}") - elif tag: - ahash = rootgit.git_operation( - "submodule", "status", "{}".format(path) - ).rstrip() - ahash = ahash[1 : len(tag) + 1] - if tag == ahash: - print(f"e {name:>20} not checked out, aligned at hash {ahash}") - else: - print( - f"e {name:>20} not checked out, out of sync at tag {atag}, expected tag is {tag}" - ) - testfails += 1 - else: - print(f"e {name:>20} has no fxtag defined in .gitmodules") - testfails += 1 - else: - with utils.pushd(newpath): - git = GitInterface(newpath, logger) - atag = git.git_operation("describe", "--tags", "--always").rstrip() - part = git.git_operation("status").partition("\n")[0] - # fake hash to initialize - ahash = "xxxx" - if part: - ahash = part.split()[-1] - if tag and atag == tag: - print(f" {name:>20} at tag {tag}") - elif tag and ahash[: len(tag)] == tag: - print(f" {name:>20} at hash {ahash}") - elif atag == ahash: - print(f" {name:>20} at hash {ahash}") - elif tag: - print( - f"s {name:>20} {atag} {ahash} is out of sync with .gitmodules {tag}" - ) - testfails += 1 - needsupdate += 1 - else: - print( - f"e {name:>20} has no fxtag defined in .gitmodules, module at {atag}" - ) - testfails += 1 - - status = git.git_operation("status", "--ignore-submodules") - if "nothing to commit" not in status: - localmods = localmods + 1 - print("M" + textwrap.indent(status, " ")) - - return testfails, localmods, needsupdate - - -def submodules_update(gitmodules, root_dir, requiredlist, force): - _, localmods, needsupdate = submodules_status(gitmodules, root_dir) - - if localmods and not force: - local_mods_output() - return - if needsupdate == 0: - return - - for name in gitmodules.sections(): - fxtag = gitmodules.get(name, "fxtag") - path = gitmodules.get(name, "path") - url = gitmodules.get(name, "url") - logger.info( - "name={} path={} url={} fxtag={} requiredlist={}".format( - name, os.path.join(root_dir, path), url, fxtag, requiredlist - ) - ) - # if not os.path.exists(os.path.join(root_dir,path, ".git")): - fxrequired = gitmodules.get(name, "fxrequired") - assert fxrequired in fxrequired_allowed_values() - rgit = GitInterface(root_dir, logger) - superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") - - fxsparse = gitmodules.get(name, "fxsparse") - - if ( - fxrequired - and (superroot and "Toplevel" in fxrequired) - or fxrequired not in requiredlist - ): - if "ToplevelOptional" == fxrequired: - print("Skipping optional component {}".format(name)) - continue - if fxsparse: - logger.debug( - "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( - root_dir, name, url, path, fxsparse, fxtag - ) - ) - submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) - else: - logger.info( - "Calling submodule_checkout({},{},{},{})".format( - root_dir, name, path, url - ) - ) - - single_submodule_checkout( - root_dir, - name, - path, - url=url, - tag=fxtag, - force=force, - optional=("AlwaysOptional" in requiredlist), - ) - - if os.path.exists(os.path.join(path, ".git")): - submoddir = os.path.join(root_dir, path) - with utils.pushd(submoddir): - git = GitInterface(submoddir, logger) - # first make sure the url is correct - upstream = git.git_operation("ls-remote", "--get-url").rstrip() - newremote = "origin" - if upstream != url: - # TODO - this needs to be a unique name - remotes = git.git_operation("remote", "-v") - if url in remotes: - for line in remotes: - if url in line and "fetch" in line: - newremote = line.split()[0] - break - else: - i = 0 - while newremote in remotes: - i = i + 1 - newremote = f"newremote.{i:02d}" - git.git_operation("remote", "add", newremote, url) - - tags = git.git_operation("tag", "-l") - if fxtag and fxtag not in tags: - git.git_operation("fetch", newremote, "--tags") - atag = git.git_operation("describe", "--tags", "--always").rstrip() - if fxtag and fxtag != atag: - try: - git.git_operation("checkout", fxtag) - print(f"{name:>20} updated to {fxtag}") - except Exception as error: - print(error) - elif not fxtag: - print(f"No fxtag found for submodule {name:>20}") - else: - print(f"{name:>20} up to date.") - - -def local_mods_output(): - text = """\ - The submodules labeled with 'M' above are not in a clean state. - The following are options for how to proceed: - (1) Go into each submodule which is not in a clean state and issue a 'git status' - Either revert or commit your changes so that the submodule is in a clean state. - (2) use the --force option to git-fleximod - (3) you can name the particular submodules to update using the git-fleximod command line - (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') - then rerun git-fleximod update. -""" - print(text) - - -# checkout is done by update if required so this function may be depricated -def submodules_checkout(gitmodules, root_dir, requiredlist, force=False): - """ - This function checks out all git submodules based on the provided parameters. - - Parameters: - gitmodules (ConfigParser): The gitmodules configuration. - root_dir (str): The root directory for the git operation. - requiredlist (list): The list of required modules. - force (bool, optional): If set to True, forces the checkout operation. Defaults to False. - - Returns: - None - """ - # function implementation... - print("") - _, localmods, needsupdate = submodules_status(gitmodules, root_dir) - if localmods and not force: - local_mods_output() - return - if not needsupdate: - return - for name in gitmodules.sections(): - fxrequired = gitmodules.get(name, "fxrequired") - fxsparse = gitmodules.get(name, "fxsparse") - fxtag = gitmodules.get(name, "fxtag") - path = gitmodules.get(name, "path") - url = gitmodules.get(name, "url") - if fxrequired and fxrequired not in requiredlist: - if "Optional" in fxrequired: - print("Skipping optional component {}".format(name)) - continue - - if fxsparse: - logger.debug( - "Callng submodule_sparse_checkout({}, {}, {}, {}, {}, {}".format( - root_dir, name, url, path, fxsparse, fxtag - ) - ) - submodule_sparse_checkout(root_dir, name, url, path, fxsparse, tag=fxtag) - else: - logger.debug( - "Calling submodule_checkout({},{},{})".format(root_dir, name, path) - ) - single_submodule_checkout( - root_dir, - name, - path, - url=url, - tag=fxtag, - force=force, - optional="AlwaysOptional" in requiredlist, - ) - - -def submodules_test(gitmodules, root_dir): - """ - This function tests the git submodules based on the provided parameters. - - It first checks that fxtags are present and in sync with submodule hashes. - Then it ensures that urls are consistent with fxurls (not forks and not ssh) - and that sparse checkout files exist. - - Parameters: - gitmodules (ConfigParser): The gitmodules configuration. - root_dir (str): The root directory for the git operation. - - Returns: - int: The number of test failures. - """ - # First check that fxtags are present and in sync with submodule hashes - testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir) - print("") - # Then make sure that urls are consistant with fxurls (not forks and not ssh) - # and that sparse checkout files exist - for name in gitmodules.sections(): - url = gitmodules.get(name, "url") - fxurl = gitmodules.get(name, "fxDONOTMODIFYurl") - fxsparse = gitmodules.get(name, "fxsparse") - path = gitmodules.get(name, "path") - fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl - url = url[:-4] if url.endswith(".git") else url - if not fxurl or url.lower() != fxurl.lower(): - print(f"{name:>20} url {url} not in sync with required {fxurl}") - testfails += 1 - if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): - print(f"{name:>20} sparse checkout file {fxsparse} not found") - testfails += 1 - return testfails + localmods + needsupdate - - -def main(): - ( - root_dir, - file_name, - fxrequired, - includelist, - excludelist, - force, - action, - ) = commandline_arguments() - # Get a logger for the package - global logger - logger = logging.getLogger(__name__) - - logger.info("action is {} root_dir={} file_name={}".format(action, root_dir, file_name)) - - if not os.path.isfile(os.path.join(root_dir, file_name)): - file_path = utils.find_upwards(root_dir, file_name) - - if file_path is None: - utils.fatal_error( - "No {} found in {} or any of it's parents".format(file_name, root_dir) - ) - - root_dir = os.path.dirname(file_path) - logger.info( - "root_dir is {} includelist={} excludelist={}".format( - root_dir, includelist, excludelist - ) - ) - gitmodules = GitModules( - logger, - confpath=root_dir, - conffile=file_name, - includelist=includelist, - excludelist=excludelist, - ) - if not gitmodules.sections(): - sys.exit("No submodule components found") - retval = 0 - if action == "update": - submodules_update(gitmodules, root_dir, fxrequired, force) - elif action == "status": - tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) - if tfails + lmods + updates > 0: - print( - f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n" - ) - if lmods > 0: - local_mods_output() - elif action == "test": - retval = submodules_test(gitmodules, root_dir) - else: - utils.fatal_error(f"unrecognized action request {action}") - return retval - - -if __name__ == "__main__": - sys.exit(main()) diff --git a/.lib/git-fleximod/git_fleximod/gitinterface.py b/.lib/git-fleximod/git_fleximod/gitinterface.py deleted file mode 100644 index 93ae38ecde..0000000000 --- a/.lib/git-fleximod/git_fleximod/gitinterface.py +++ /dev/null @@ -1,79 +0,0 @@ -import os -import sys -from . import utils -from pathlib import Path - -class GitInterface: - def __init__(self, repo_path, logger): - logger.debug("Initialize GitInterface for {}".format(repo_path)) - if isinstance(repo_path, str): - self.repo_path = Path(repo_path).resolve() - elif isinstance(repo_path, Path): - self.repo_path = repo_path.resolve() - else: - raise TypeError("repo_path must be a str or Path object") - self.logger = logger - try: - import git - - self._use_module = True - try: - self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo - except git.exc.InvalidGitRepositoryError: - self.git = git - self._init_git_repo() - msg = "Using GitPython interface to git" - except ImportError: - self._use_module = False - if not (self.repo_path / ".git").exists(): - self._init_git_repo() - msg = "Using shell interface to git" - self.logger.info(msg) - - def _git_command(self, operation, *args): - self.logger.info(operation) - if self._use_module and operation != "submodule": - try: - return getattr(self.repo.git, operation)(*args) - except Exception as e: - sys.exit(e) - else: - return ["git", "-C", str(self.repo_path), operation] + list(args) - - def _init_git_repo(self): - if self._use_module: - self.repo = self.git.Repo.init(str(self.repo_path)) - else: - command = ("git", "-C", str(self.repo_path), "init") - utils.execute_subprocess(command) - - # pylint: disable=unused-argument - def git_operation(self, operation, *args, **kwargs): - command = self._git_command(operation, *args) - self.logger.info(command) - if isinstance(command, list): - try: - return utils.execute_subprocess(command, output_to_caller=True) - except Exception as e: - sys.exit(e) - else: - return command - - def config_get_value(self, section, name): - if self._use_module: - config = self.repo.config_reader() - return config.get_value(section, name) - else: - cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}") - output = utils.execute_subprocess(cmd, output_to_caller=True) - return output.strip() - - def config_set_value(self, section, name, value): - if self._use_module: - with self.repo.config_writer() as writer: - writer.set_value(section, name, value) - writer.release() # Ensure changes are saved - else: - cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value) - self.logger.info(cmd) - utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/.lib/git-fleximod/git_fleximod/gitmodules.py b/.lib/git-fleximod/git_fleximod/gitmodules.py deleted file mode 100644 index 68c82d066f..0000000000 --- a/.lib/git-fleximod/git_fleximod/gitmodules.py +++ /dev/null @@ -1,97 +0,0 @@ -import shutil -from pathlib import Path -from configparser import RawConfigParser, ConfigParser -from .lstripreader import LstripReader - - -class GitModules(RawConfigParser): - def __init__( - self, - logger, - confpath=Path.cwd(), - conffile=".gitmodules", - includelist=None, - excludelist=None, - ): - """ - confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory). - conffile: Name of the configuration file (defaults to .gitmodules). - includelist: Optional list of submodules to include. - excludelist: Optional list of submodules to exclude. - """ - self.logger = logger - self.logger.debug( - "Creating a GitModules object {} {} {} {}".format( - confpath, conffile, includelist, excludelist - ) - ) - super().__init__() - self.conf_file = (Path(confpath) / Path(conffile)) - if self.conf_file.exists(): - self.read_file(LstripReader(str(self.conf_file)), source=conffile) - self.includelist = includelist - self.excludelist = excludelist - self.isdirty = False - - def reload(self): - self.clear() - if self.conf_file.exists(): - self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file) - - - def set(self, name, option, value): - """ - Sets a configuration value for a specific submodule: - Ensures the appropriate section exists for the submodule. - Calls the parent class's set method to store the value. - """ - self.isdirty = True - self.logger.debug("set called {} {} {}".format(name, option, value)) - section = f'submodule "{name}"' - if not self.has_section(section): - self.add_section(section) - super().set(section, option, str(value)) - - # pylint: disable=redefined-builtin, arguments-differ - def get(self, name, option, raw=False, vars=None, fallback=None): - """ - Retrieves a configuration value for a specific submodule: - Uses the parent class's get method to access the value. - Handles potential errors if the section or option doesn't exist. - """ - self.logger.debug("get called {} {}".format(name, option)) - section = f'submodule "{name}"' - try: - return ConfigParser.get( - self, section, option, raw=raw, vars=vars, fallback=fallback - ) - except ConfigParser.NoOptionError: - return None - - def save(self): - if self.isdirty: - self.logger.info("Writing {}".format(self.conf_file)) - with open(self.conf_file, "w") as fd: - self.write(fd) - self.isdirty = False - - def __del__(self): - self.save() - - def sections(self): - """Strip the submodule part out of section and just use the name""" - self.logger.debug("calling GitModules sections iterator") - names = [] - for section in ConfigParser.sections(self): - name = section[11:-1] - if self.includelist and name not in self.includelist: - continue - if self.excludelist and name in self.excludelist: - continue - names.append(name) - return names - - def items(self, name, raw=False, vars=None): - self.logger.debug("calling GitModules items for {}".format(name)) - section = f'submodule "{name}"' - return ConfigParser.items(section, raw=raw, vars=vars) diff --git a/.lib/git-fleximod/git_fleximod/lstripreader.py b/.lib/git-fleximod/git_fleximod/lstripreader.py deleted file mode 100644 index 01d5580ee8..0000000000 --- a/.lib/git-fleximod/git_fleximod/lstripreader.py +++ /dev/null @@ -1,43 +0,0 @@ -class LstripReader(object): - "LstripReader formats .gitmodules files to be acceptable for configparser" - - def __init__(self, filename): - with open(filename, "r") as infile: - lines = infile.readlines() - self._lines = list() - self._num_lines = len(lines) - self._index = 0 - for line in lines: - self._lines.append(line.lstrip()) - - def readlines(self): - """Return all the lines from this object's file""" - return self._lines - - def readline(self, size=-1): - """Format and return the next line or raise StopIteration""" - try: - line = self.next() - except StopIteration: - line = "" - - if (size > 0) and (len(line) < size): - return line[0:size] - - return line - - def __iter__(self): - """Begin an iteration""" - self._index = 0 - return self - - def next(self): - """Return the next line or raise StopIteration""" - if self._index >= self._num_lines: - raise StopIteration - - self._index = self._index + 1 - return self._lines[self._index - 1] - - def __next__(self): - return self.next() diff --git a/.lib/git-fleximod/git_fleximod/metoflexi.py b/.lib/git-fleximod/git_fleximod/metoflexi.py deleted file mode 100755 index cc347db2dd..0000000000 --- a/.lib/git-fleximod/git_fleximod/metoflexi.py +++ /dev/null @@ -1,236 +0,0 @@ -#!/usr/bin/env python -from configparser import ConfigParser -import sys -import shutil -from pathlib import Path -import argparse -import logging -from git_fleximod.gitinterface import GitInterface -from git_fleximod.gitmodules import GitModules -from git_fleximod import utils - -logger = None - -def find_root_dir(filename=".git"): - d = Path.cwd() - root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.is_dir(): - return d - d = d.parent - return None - - -def get_parser(): - description = """ - %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models - """ - parser = argparse.ArgumentParser( - description=description, formatter_class=argparse.RawDescriptionHelpFormatter - ) - - parser.add_argument('-e', '--externals', nargs='?', - default='Externals.cfg', - help='The externals description filename. ' - 'Default: %(default)s.') - - parser.add_argument( - "-C", - "--path", - default=find_root_dir(), - help="Toplevel repository directory. Defaults to top git directory relative to current.", - ) - - parser.add_argument( - "-g", - "--gitmodules", - nargs="?", - default=".gitmodules", - help="The submodule description filename. " "Default: %(default)s.", - ) - parser.add_argument( - "-v", - "--verbose", - action="count", - default=0, - help="Output additional information to " - "the screen and log file. This flag can be " - "used up to two times, increasing the " - "verbosity level each time.", - ) - parser.add_argument( - "-d", - "--debug", - action="store_true", - default=False, - help="DEVELOPER: output additional debugging " - "information to the screen and log file.", - ) - - return parser - -def commandline_arguments(args=None): - parser = get_parser() - - options = parser.parse_args(args) - handlers = [logging.StreamHandler()] - - if options.debug: - try: - open("fleximod.log", "w") - except PermissionError: - sys.exit("ABORT: Could not write file fleximod.log") - level = logging.DEBUG - handlers.append(logging.FileHandler("fleximod.log")) - elif options.verbose: - level = logging.INFO - else: - level = logging.WARNING - # Configure the root logger - logging.basicConfig( - level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers - ) - - return( - options.path, - options.gitmodules, - options.externals - ) - -class ExternalRepoTranslator: - """ - Translates external repositories configured in an INI-style externals file. - """ - - def __init__(self, rootpath, gitmodules, externals): - self.rootpath = rootpath - if gitmodules: - self.gitmodules = GitModules(logger, confpath=rootpath) - self.externals = (rootpath / Path(externals)).resolve() - print(f"Translating {self.externals}") - self.git = GitInterface(rootpath, logger) - -# def __del__(self): -# if (self.rootpath / "save.gitignore"): - - - def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol): - """ - Translates a single repository based on configuration details. - - Args: - rootpath (str): Root path of the main repository. - gitmodules (str): Path to the .gitmodules file. - tag (str): The tag to use for the external repository. - url (str): The URL of the external repository. - path (str): The relative path within the main repository for the external repository. - efile (str): The external file or file containing submodules. - hash_ (str): The commit hash to checkout (if applicable). - sparse (str): Boolean indicating whether to use sparse checkout (if applicable). - protocol (str): The protocol to use (e.g., 'git', 'http'). - """ - assert protocol != "svn", "SVN protocol is not currently supported" - print(f"Translating repository {section}") - if efile: - file_path = Path(path) / Path(efile) - newroot = (self.rootpath / file_path).parent.resolve() - if not newroot.exists(): - newroot.mkdir(parents=True) - logger.info("Newroot is {}".format(newroot)) - newt = ExternalRepoTranslator(newroot, ".gitmodules", efile) - newt.translate_repo() - if protocol == "externals_only": - if tag: - self.gitmodules.set(section, "fxtag", tag) - if hash_: - self.gitmodules.set(section, "fxtag", hash_) - - self.gitmodules.set(section, "fxDONOTUSEurl", url) - if sparse: - self.gitmodules.set(section, "fxsparse", sparse) - self.gitmodules.set(section, "fxrequired", "ToplevelRequired") - else: - newpath = (self.rootpath / Path(path)) - if newpath.exists(): - shutil.rmtree(newpath) - logger.info("Creating directory {}".format(newpath)) - newpath.mkdir(parents=True) - if tag: - logger.info("cloning {}".format(section)) - try: - self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) - except: - self.git.git_operation("clone", url, path) - with utils.pushd(newpath): - ngit = GitInterface(newpath, logger) - ngit.git_operation("checkout", tag) - if hash_: - self.git.git_operation("clone", url, path) - git = GitInterface(newpath, logger) - git.git_operation("fetch", "origin") - git.git_operation("checkout", hash_) - if sparse: - print("setting as sparse submodule {}".format(section)) - sparsefile = (newpath / Path(sparse)) - newfile = (newpath / ".git" / "info" / "sparse-checkout") - print(f"sparsefile {sparsefile} newfile {newfile}") - shutil.copy(sparsefile, newfile) - - logger.info("adding submodule {}".format(section)) - self.gitmodules.save() - self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) - self.git.git_operation("submodule","absorbgitdirs") - self.gitmodules.reload() - if tag: - self.gitmodules.set(section, "fxtag", tag) - if hash_: - self.gitmodules.set(section, "fxtag", hash_) - - self.gitmodules.set(section, "fxDONOTUSEurl", url) - if sparse: - self.gitmodules.set(section, "fxsparse", sparse) - self.gitmodules.set(section, "fxrequired", "ToplevelRequired") - - - def translate_repo(self): - """ - Translates external repositories defined within an external file. - - Args: - rootpath (str): Root path of the main repository. - gitmodules (str): Path to the .gitmodules file. - external_file (str): The path to the external file containing repository definitions. - """ - econfig = ConfigParser() - econfig.read((self.rootpath / Path(self.externals))) - - for section in econfig.sections(): - if section == "externals_description": - logger.info("skipping section {}".format(section)) - return - logger.info("Translating section {}".format(section)) - tag = econfig.get(section, "tag", raw=False, fallback=None) - url = econfig.get(section, "repo_url", raw=False, fallback=None) - path = econfig.get(section, "local_path", raw=False, fallback=None) - efile = econfig.get(section, "externals", raw=False, fallback=None) - hash_ = econfig.get(section, "hash", raw=False, fallback=None) - sparse = econfig.get(section, "sparse", raw=False, fallback=None) - protocol = econfig.get(section, "protocol", raw=False, fallback=None) - - self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol) - - - -def _main(): - rootpath, gitmodules, externals = commandline_arguments() - global logger - logger = logging.getLogger(__name__) - with utils.pushd(rootpath): - t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals) - logger.info("Translating {}".format(rootpath)) - t.translate_repo() - - -if __name__ == "__main__": - sys.exit(_main()) diff --git a/.lib/git-fleximod/git_fleximod/utils.py b/.lib/git-fleximod/git_fleximod/utils.py deleted file mode 100644 index 7cc1de38cc..0000000000 --- a/.lib/git-fleximod/git_fleximod/utils.py +++ /dev/null @@ -1,365 +0,0 @@ -#!/usr/bin/env python3 -""" -Common public utilities for manic package - -""" - -import logging -import os -import subprocess -import sys -from threading import Timer -from pathlib import Path - -LOCAL_PATH_INDICATOR = "." -# --------------------------------------------------------------------- -# -# functions to massage text for output and other useful utilities -# -# --------------------------------------------------------------------- -from contextlib import contextmanager - - -@contextmanager -def pushd(new_dir): - """context for chdir. usage: with pushd(new_dir)""" - previous_dir = os.getcwd() - os.chdir(new_dir) - try: - yield - finally: - os.chdir(previous_dir) - - -def log_process_output(output): - """Log each line of process output at debug level so it can be - filtered if necessary. By default, output is a single string, and - logging.debug(output) will only put log info heading on the first - line. This makes it hard to filter with grep. - - """ - output = output.split("\n") - for line in output: - logging.debug(line) - - -def printlog(msg, **kwargs): - """Wrapper script around print to ensure that everything printed to - the screen also gets logged. - - """ - logging.info(msg) - if kwargs: - print(msg, **kwargs) - else: - print(msg) - sys.stdout.flush() - - -def find_upwards(root_dir, filename): - """Find a file in root dir or any of it's parents""" - d = Path(root_dir) - root = Path(d.root) - while d != root: - attempt = d / filename - if attempt.exists(): - return attempt - d = d.parent - return None - - -def last_n_lines(the_string, n_lines, truncation_message=None): - """Returns the last n lines of the given string - - Args: - the_string: str - n_lines: int - truncation_message: str, optional - - Returns a string containing the last n lines of the_string - - If truncation_message is provided, the returned string begins with - the given message if and only if the string is greater than n lines - to begin with. - """ - - lines = the_string.splitlines(True) - if len(lines) <= n_lines: - return_val = the_string - else: - lines_subset = lines[-n_lines:] - str_truncated = "".join(lines_subset) - if truncation_message: - str_truncated = truncation_message + "\n" + str_truncated - return_val = str_truncated - - return return_val - - -def indent_string(the_string, indent_level): - """Indents the given string by a given number of spaces - - Args: - the_string: str - indent_level: int - - Returns a new string that is the same as the_string, except that - each line is indented by 'indent_level' spaces. - - In python3, this can be done with textwrap.indent. - """ - - lines = the_string.splitlines(True) - padding = " " * indent_level - lines_indented = [padding + line for line in lines] - return "".join(lines_indented) - - -# --------------------------------------------------------------------- -# -# error handling -# -# --------------------------------------------------------------------- - - -def fatal_error(message): - """ - Error output function - """ - logging.error(message) - raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) - - -# --------------------------------------------------------------------- -# -# Data conversion / manipulation -# -# --------------------------------------------------------------------- -def str_to_bool(bool_str): - """Convert a sting representation of as boolean into a true boolean. - - Conversion should be case insensitive. - """ - value = None - str_lower = bool_str.lower() - if str_lower in ("true", "t"): - value = True - elif str_lower in ("false", "f"): - value = False - if value is None: - msg = ( - 'ERROR: invalid boolean string value "{0}". ' - 'Must be "true" or "false"'.format(bool_str) - ) - fatal_error(msg) - return value - - -REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"] - - -def is_remote_url(url): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - """ - remote_url = False - for prefix in REMOTE_PREFIXES: - if url.startswith(prefix): - remote_url = True - return remote_url - - -def split_remote_url(url): - """check if the user provided a local file path or a - remote. If remote, try to strip off protocol info. - - """ - remote_url = is_remote_url(url) - if not remote_url: - return url - - for prefix in REMOTE_PREFIXES: - url = url.replace(prefix, "") - - if "@" in url: - url = url.split("@")[1] - - if ":" in url: - url = url.split(":")[1] - - return url - - -def expand_local_url(url, field): - """check if the user provided a local file path instead of a - remote. If so, it must be expanded to an absolute - path. - - Note: local paths of LOCAL_PATH_INDICATOR have special meaning and - represent local copy only, don't work with the remotes. - - """ - remote_url = is_remote_url(url) - if not remote_url: - if url.strip() == LOCAL_PATH_INDICATOR: - pass - else: - url = os.path.expandvars(url) - url = os.path.expanduser(url) - if not os.path.isabs(url): - msg = ( - 'WARNING: Externals description for "{0}" contains a ' - "url that is not remote and does not expand to an " - "absolute path. Version control operations may " - "fail.\n\nurl={1}".format(field, url) - ) - printlog(msg) - else: - url = os.path.normpath(url) - return url - - -# --------------------------------------------------------------------- -# -# subprocess -# -# --------------------------------------------------------------------- - -# Give the user a helpful message if we detect that a command seems to -# be hanging. -_HANGING_SEC = 300 - - -def _hanging_msg(working_directory, command): - print( - """ - -Command '{command}' -from directory {working_directory} -has taken {hanging_sec} seconds. It may be hanging. - -The command will continue to run, but you may want to abort -manage_externals with ^C and investigate. A possible cause of hangs is -when svn or git require authentication to access a private -repository. On some systems, svn and git requests for authentication -information will not be displayed to the user. In this case, the program -will appear to hang. Ensure you can run svn and git manually and access -all repositories without entering your authentication information. - -""".format( - command=command, - working_directory=working_directory, - hanging_sec=_HANGING_SEC, - ) - ) - - -def execute_subprocess(commands, status_to_caller=False, output_to_caller=False): - """Wrapper around subprocess.check_output to handle common - exceptions. - - check_output runs a command with arguments and waits - for it to complete. - - check_output raises an exception on a nonzero return code. if - status_to_caller is true, execute_subprocess returns the subprocess - return code, otherwise execute_subprocess treats non-zero return - status as an error and raises an exception. - - """ - cwd = os.getcwd() - msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) - logging.info(msg) - commands_str = " ".join(str(element) for element in commands) - logging.info(commands_str) - return_to_caller = status_to_caller or output_to_caller - status = -1 - output = "" - hanging_timer = Timer( - _HANGING_SEC, - _hanging_msg, - kwargs={"working_directory": cwd, "command": commands_str}, - ) - hanging_timer.start() - try: - output = subprocess.check_output( - commands, stderr=subprocess.STDOUT, universal_newlines=True - ) - log_process_output(output) - status = 0 - except OSError as error: - msg = failed_command_msg( - "Command execution failed. Does the executable exist?", commands - ) - logging.error(error) - fatal_error(msg) - except ValueError as error: - msg = failed_command_msg( - "DEV_ERROR: Invalid arguments trying to run subprocess", commands - ) - logging.error(error) - fatal_error(msg) - except subprocess.CalledProcessError as error: - # Only report the error if we are NOT returning to the - # caller. If we are returning to the caller, then it may be a - # simple status check. If returning, it is the callers - # responsibility determine if an error occurred and handle it - # appropriately. - if not return_to_caller: - msg_context = ( - "Process did not run successfully; " - "returned status {0}".format(error.returncode) - ) - msg = failed_command_msg(msg_context, commands, output=error.output) - logging.error(error) - logging.error(msg) - log_process_output(error.output) - fatal_error(msg) - status = error.returncode - finally: - hanging_timer.cancel() - - if status_to_caller and output_to_caller: - ret_value = (status, output) - elif status_to_caller: - ret_value = status - elif output_to_caller: - ret_value = output - else: - ret_value = None - - return ret_value - - -def failed_command_msg(msg_context, command, output=None): - """Template for consistent error messages from subprocess calls. - - If 'output' is given, it should provide the output from the failed - command - """ - - if output: - output_truncated = last_n_lines( - output, 20, truncation_message="[... Output truncated for brevity ...]" - ) - errmsg = ( - "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: " - ) - else: - errmsg = "" - - command_str = " ".join(command) - errmsg += """In directory - {cwd} -{context}: - {command} -""".format( - cwd=os.getcwd(), context=msg_context, command=command_str - ) - - if output: - errmsg += "See above for output from failed command.\n" - - return errmsg diff --git a/.lib/git-fleximod/poetry.lock b/.lib/git-fleximod/poetry.lock deleted file mode 100644 index b59ed3942c..0000000000 --- a/.lib/git-fleximod/poetry.lock +++ /dev/null @@ -1,693 +0,0 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. - -[[package]] -name = "alabaster" -version = "0.7.13" -description = "A configurable sidebar-enabled Sphinx theme" -optional = false -python-versions = ">=3.6" -files = [ - {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, - {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, -] - -[[package]] -name = "babel" -version = "2.14.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, - {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, -] - -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "certifi" -version = "2024.2.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, - {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "docutils" -version = "0.19" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, - {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "fsspec" -version = "2023.12.2" -description = "File-system specification" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, - {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, -] - -[package.extras] -abfs = ["adlfs"] -adl = ["adlfs"] -arrow = ["pyarrow (>=1)"] -dask = ["dask", "distributed"] -devel = ["pytest", "pytest-cov"] -dropbox = ["dropbox", "dropboxdrivefs", "requests"] -full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] -fuse = ["fusepy"] -gcs = ["gcsfs"] -git = ["pygit2"] -github = ["requests"] -gs = ["gcsfs"] -gui = ["panel"] -hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] -libarchive = ["libarchive-c"] -oci = ["ocifs"] -s3 = ["s3fs"] -sftp = ["paramiko"] -smb = ["smbprotocol"] -ssh = ["paramiko"] -tqdm = ["tqdm"] - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.41" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, - {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.0.1" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "jinja2" -version = "3.1.3" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, - {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pluggy" -version = "1.4.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, - {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pyfakefs" -version = "5.3.5" -description = "pyfakefs implements a fake file system that mocks the Python file system modules." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyfakefs-5.3.5-py3-none-any.whl", hash = "sha256:751015c1de94e1390128c82b48cdedc3f088bbdbe4bc713c79d02a27f0f61e69"}, - {file = "pyfakefs-5.3.5.tar.gz", hash = "sha256:7cdc500b35a214cb7a614e1940543acc6650e69a94ac76e30f33c9373bd9cf90"}, -] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pytest" -version = "8.0.0" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.0.0-py3-none-any.whl", hash = "sha256:50fb9cbe836c3f20f0dfa99c565201fb75dc54c8d76373cd1bde06b06657bdb6"}, - {file = "pytest-8.0.0.tar.gz", hash = "sha256:249b1b0864530ba251b7438274c4d251c58d868edaaec8762893ad4a0d71c36c"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.3.0,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "sphinx" -version = "5.3.0" -description = "Python documentation generator" -optional = false -python-versions = ">=3.6" -files = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, -] - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=2.9" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" -imagesize = ">=1.3" -importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.0" -packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" -snowballstemmer = ">=2.0" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.5" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.4" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, - {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.0.1" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, - {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.5" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, - {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, -] - -[package.extras] -lint = ["docutils-stubs", "flake8", "mypy"] -test = ["pytest"] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "urllib3" -version = "2.2.0" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "wheel" -version = "0.42.0" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, - {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - -[[package]] -name = "zipp" -version = "3.17.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.8" -content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d" diff --git a/.lib/git-fleximod/pyproject.toml b/.lib/git-fleximod/pyproject.toml deleted file mode 100644 index ac1684ea0f..0000000000 --- a/.lib/git-fleximod/pyproject.toml +++ /dev/null @@ -1,41 +0,0 @@ -[tool.poetry] -name = "git-fleximod" -version = "0.7.5" -description = "Extended support for git-submodule and git-sparse-checkout" -authors = ["Jim Edwards "] -maintainers = ["Jim Edwards "] -license = "MIT" -readme = "README.md" -homepage = "https://github.com/jedwards4b/git-fleximod" -keywords = ["git", "submodule", "sparse-checkout"] -packages = [ -{ include = "git_fleximod"}, -{ include = "doc"}, -] - -[tool.poetry.scripts] -git-fleximod = "git_fleximod.git_fleximod:main" -me2flexi = "git_fleximod.metoflexi:_main" -fsspec = "fsspec.fuse:main" - -[tool.poetry.dependencies] -python = "^3.8" -GitPython = "^3.1.0" -sphinx = "^5.0.0" -fsspec = "^2023.12.2" -wheel = "^0.42.0" -pytest = "^8.0.0" -pyfakefs = "^5.3.5" - -[tool.poetry.urls] -"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" - -[tool.pytest.ini_options] -markers = [ - "skip_after_first: only run on first iteration" -] - -[build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" - diff --git a/.lib/git-fleximod/tbump.toml b/.lib/git-fleximod/tbump.toml deleted file mode 100644 index e644fc4d24..0000000000 --- a/.lib/git-fleximod/tbump.toml +++ /dev/null @@ -1,43 +0,0 @@ -# Uncomment this if your project is hosted on GitHub: -github_url = "https://github.com/jedwards4b/git-fleximod/" - -[version] -current = "0.7.5" - -# Example of a semver regexp. -# Make sure this matches current_version before -# using tbump -regex = ''' - (?P\d+) - \. - (?P\d+) - \. - (?P\d+) - ''' - -[git] -message_template = "Bump to {new_version}" -tag_template = "v{new_version}" - -# For each file to patch, add a [[file]] config -# section containing the path of the file, relative to the -# tbump.toml location. -[[file]] -src = "git_fleximod/cli.py" - -[[file]] -src = "pyproject.toml" - -# You can specify a list of commands to -# run after the files have been patched -# and before the git commit is made - -# [[before_commit]] -# name = "check changelog" -# cmd = "grep -q {new_version} Changelog.rst" - -# Or run some commands after the git tag and the branch -# have been pushed: -# [[after_push]] -# name = "publish" -# cmd = "./publish.sh" diff --git a/.lib/git-fleximod/tests/__init__.py b/.lib/git-fleximod/tests/__init__.py deleted file mode 100644 index 4d4c66c78e..0000000000 --- a/.lib/git-fleximod/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -import sys, os - -sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src")) diff --git a/.lib/git-fleximod/tests/conftest.py b/.lib/git-fleximod/tests/conftest.py deleted file mode 100644 index 942a0efb97..0000000000 --- a/.lib/git-fleximod/tests/conftest.py +++ /dev/null @@ -1,138 +0,0 @@ -import pytest -from git_fleximod.gitinterface import GitInterface -import os -import subprocess -import logging -from pathlib import Path - -@pytest.fixture(scope='session') -def logger(): - logging.basicConfig( - level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()] - ) - logger = logging.getLogger(__name__) - return logger - -all_repos=[ - {"subrepo_path": "modules/test", - "submodule_name": "test_submodule", - "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", - "status2" : "test_submodule at tag MPIserial_2.4.0", - "status3" : "test_submodule at tag MPIserial_2.4.0", - "status4" : "test_submodule at tag MPIserial_2.4.0", - "gitmodules_content" : """ - [submodule "test_submodule"] - path = modules/test - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.4.0 - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelRequired -"""}, - {"subrepo_path": "modules/test_optional", - "submodule_name": "test_optional", - "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", - "status2" : "test_optional at tag MPIserial_2.4.0", - "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", - "status4" : "test_optional at tag MPIserial_2.4.0", - "gitmodules_content": """ - [submodule "test_optional"] - path = modules/test_optional - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.4.0 - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = ToplevelOptional -"""}, - {"subrepo_path": "modules/test_alwaysoptional", - "submodule_name": "test_alwaysoptional", - "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", - "status2" : "test_alwaysoptional at hash e5cf35c", - "status3" : "test_alwaysoptional not checked out, out of sync at tag MPIserial_2.3.0", - "status4" : "test_alwaysoptional at hash e5cf35c", - "gitmodules_content": """ - [submodule "test_alwaysoptional"] - path = modules/test_alwaysoptional - url = https://github.com/ESMCI/mpi-serial.git - fxtag = e5cf35c - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = AlwaysOptional -"""}, - {"subrepo_path": "modules/test_sparse", - "submodule_name": "test_sparse", - "status1" : "test_sparse at tag MPIserial_2.5.0", - "status2" : "test_sparse at tag MPIserial_2.5.0", - "status3" : "test_sparse at tag MPIserial_2.5.0", - "status4" : "test_sparse at tag MPIserial_2.5.0", - "gitmodules_content": """ - [submodule "test_sparse"] - path = modules/test_sparse - url = https://github.com/ESMCI/mpi-serial.git - fxtag = MPIserial_2.5.0 - fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git - fxrequired = AlwaysRequired - fxsparse = ../.sparse_file_list -"""}, -] -@pytest.fixture(params=all_repos) - -def shared_repos(request): - return request.param - -@pytest.fixture -def get_all_repos(): - return all_repos - -def write_sparse_checkout_file(fp): - sparse_content = """m4 -""" - fp.write_text(sparse_content) - -@pytest.fixture -def test_repo(shared_repos, tmp_path, logger): - subrepo_path = shared_repos["subrepo_path"] - submodule_name = shared_repos["submodule_name"] - test_dir = tmp_path / "testrepo" - test_dir.mkdir() - str_path = str(test_dir) - gitp = GitInterface(str_path, logger) - assert test_dir.joinpath(".git").is_dir() - (test_dir / "modules").mkdir() - if "sparse" in submodule_name: - (test_dir / subrepo_path).mkdir() - # Add the sparse checkout file - write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list") - gitp.git_operation("add","modules/.sparse_file_list") - else: - gitp = GitInterface(str(test_dir), logger) - gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) - assert test_dir.joinpath(".gitmodules").is_file() - gitp.git_operation("add",subrepo_path) - gitp.git_operation("commit","-a","-m","\"add submod\"") - test_dir2 = tmp_path / "testrepo2" - gitp.git_operation("clone",test_dir,test_dir2) - return test_dir2 - - -@pytest.fixture -def complex_repo(tmp_path, logger): - test_dir = tmp_path / "testcomplex" - test_dir.mkdir() - str_path = str(test_dir) - gitp = GitInterface(str_path, logger) - gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") - gitp.git_operation("fetch", "origin", "main") - gitp.git_operation("checkout", "main") - return test_dir - -@pytest.fixture -def git_fleximod(): - def _run_fleximod(path, args, input=None): - cmd = ["git", "fleximod"] + args.split() - result = subprocess.run(cmd, cwd=path, input=input, - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - text=True) - if result.returncode: - print(result.stdout) - print(result.stderr) - return result - return _run_fleximod - diff --git a/.lib/git-fleximod/tests/test_a_import.py b/.lib/git-fleximod/tests/test_a_import.py deleted file mode 100644 index d5ca878de5..0000000000 --- a/.lib/git-fleximod/tests/test_a_import.py +++ /dev/null @@ -1,8 +0,0 @@ -# pylint: disable=unused-import -from git_fleximod import cli -from git_fleximod import utils -from git_fleximod.gitinterface import GitInterface -from git_fleximod.gitmodules import GitModules - -def test_import(): - print("here") diff --git a/.lib/git-fleximod/tests/test_b_update.py b/.lib/git-fleximod/tests/test_b_update.py deleted file mode 100644 index 159f1cfae0..0000000000 --- a/.lib/git-fleximod/tests/test_b_update.py +++ /dev/null @@ -1,26 +0,0 @@ -import pytest -from pathlib import Path - -def test_basic_checkout(git_fleximod, test_repo, shared_repos): - # Prepare a simple .gitmodules - gm = shared_repos['gitmodules_content'] - file_path = (test_repo / ".gitmodules") - repo_name = shared_repos["submodule_name"] - repo_path = shared_repos["subrepo_path"] - - file_path.write_text(gm) - - # Run the command - result = git_fleximod(test_repo, f"update {repo_name}") - - # Assertions - assert result.returncode == 0 - assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? - if "sparse" in repo_name: - assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? - assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? - - status = git_fleximod(test_repo, f"status {repo_name}") - - assert shared_repos["status2"] in status.stdout - diff --git a/.lib/git-fleximod/tests/test_c_required.py b/.lib/git-fleximod/tests/test_c_required.py deleted file mode 100644 index 89ab8d294d..0000000000 --- a/.lib/git-fleximod/tests/test_c_required.py +++ /dev/null @@ -1,30 +0,0 @@ -import pytest -from pathlib import Path - -def test_required(git_fleximod, test_repo, shared_repos): - file_path = (test_repo / ".gitmodules") - gm = shared_repos["gitmodules_content"] - repo_name = shared_repos["submodule_name"] - if file_path.exists(): - with file_path.open("r") as f: - gitmodules_content = f.read() - # add the entry if it does not exist - if repo_name not in gitmodules_content: - file_path.write_text(gitmodules_content+gm) - # or if it is incomplete - elif gm not in gitmodules_content: - file_path.write_text(gm) - else: - file_path.write_text(gm) - result = git_fleximod(test_repo, "update") - assert result.returncode == 0 - status = git_fleximod(test_repo, f"status {repo_name}") - assert shared_repos["status3"] in status.stdout - status = git_fleximod(test_repo, f"update --optional") - assert result.returncode == 0 - status = git_fleximod(test_repo, f"status {repo_name}") - assert shared_repos["status4"] in status.stdout - status = git_fleximod(test_repo, f"update {repo_name}") - assert result.returncode == 0 - status = git_fleximod(test_repo, f"status {repo_name}") - assert shared_repos["status4"] in status.stdout diff --git a/.lib/git-fleximod/tests/test_d_complex.py b/.lib/git-fleximod/tests/test_d_complex.py deleted file mode 100644 index fdce516274..0000000000 --- a/.lib/git-fleximod/tests/test_d_complex.py +++ /dev/null @@ -1,67 +0,0 @@ -import pytest -from pathlib import Path -from git_fleximod.gitinterface import GitInterface - -def test_complex_checkout(git_fleximod, complex_repo, logger): - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) - assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) - assert("Complex not checked out, aligned at tag testtag01" in status.stdout) - assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) - - # This should checkout and update test_submodule and complex_sub - result = git_fleximod(complex_repo, "update") - assert result.returncode == 0 - - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) - assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) - - # now check the complex_sub - root = (complex_repo / "modules" / "complex") - assert(not (root / "libraries" / "gptl" / ".git").exists()) - assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) - assert((root / "modules" / "mpi-serial" / ".git").exists()) - assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / "m4").exists()) - assert(not (root / "modules" / "mpi-sparse" / "README").exists()) - - # update a single optional submodule - - result = git_fleximod(complex_repo, "update ToplevelOptional") - assert result.returncode == 0 - - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional at tag v5.3.2" in status.stdout) - assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) - assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) - - - # Finally update optional - result = git_fleximod(complex_repo, "update --optional") - assert result.returncode == 0 - - status = git_fleximod(complex_repo, "status") - assert("ToplevelOptional at tag v5.3.2" in status.stdout) - assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) - assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) - assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) - - # now check the complex_sub - root = (complex_repo / "modules" / "complex" ) - assert(not (root / "libraries" / "gptl" / ".git").exists()) - assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) - assert((root / "modules" / "mpi-serial" / ".git").exists()) - assert((root / "modules" / "mpi-serial2" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / ".git").exists()) - assert((root / "modules" / "mpi-sparse" / "m4").exists()) - assert(not (root / "modules" / "mpi-sparse" / "README").exists()) - - From 253ba00e6d2747d5dc9bd0ba043b8df58c55ec00 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Sat, 1 Jun 2024 14:59:33 -0600 Subject: [PATCH 148/161] update externals --- .gitmodules | 4 ++-- components/clm | 2 +- src/hemco | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitmodules b/.gitmodules index 2299a58c9d..62364e0354 100644 --- a/.gitmodules +++ b/.gitmodules @@ -185,8 +185,8 @@ fxDONOTUSEurl = https://github.com/NCAR/ParallelIO [submodule "clm"] path = components/clm -url = https://github.com/jedwards4b/CTSM -fxtag = ctsm5.2.007/candidate +url = https://github.com/ESCOMP/CTSM +fxtag = ctsm5.2.007 fxrequired = ToplevelRequired fxDONOTUSEurl = https://github.com/ESCOMP/CTSM diff --git a/components/clm b/components/clm index a7f62dab98..a9433779f0 160000 --- a/components/clm +++ b/components/clm @@ -1 +1 @@ -Subproject commit a7f62dab9830cf0fc6246ce75afff3a3412f1aab +Subproject commit a9433779f0ae499d60ad118d2ec331628f0eaaa8 diff --git a/src/hemco b/src/hemco index 83127485c1..3a6d999ab0 160000 --- a/src/hemco +++ b/src/hemco @@ -1 +1 @@ -Subproject commit 83127485c149846ba79d5472ae131acd7bda0544 +Subproject commit 3a6d999ab0dbee9f03ab6b9a13dd3b6d9670eb54 From 2c96bae4d59c6b7048448a266942a154a37dcaa3 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Mon, 3 Jun 2024 07:36:17 -0600 Subject: [PATCH 149/161] update hemco tag --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 62364e0354..cffb557ee8 100644 --- a/.gitmodules +++ b/.gitmodules @@ -57,7 +57,7 @@ [submodule "hemco"] path = src/hemco url = https://github.com/ESCOMP/HEMCO_CESM.git - fxtag = hemco-cesm1_2_1_hemco3_6_3_cesm_rme01 + fxtag = hemco-cesm1_2_1_hemco3_6_3_cesm_rme fxrequired = AlwaysRequired fxDONOTUSEurl = https://github.com/ESCOMP/HEMCO_CESM.git From 4ae9171f03b1da9cb0be7c90707b2be92cf1363d Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Mon, 3 Jun 2024 13:40:31 -0600 Subject: [PATCH 150/161] Update README.md with directions about git-fleximod --- README.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/README.md b/README.md index e03fb36018..d03fd2f311 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,18 @@ ## NOTE: This is **unsupported** development code and is subject to the [CESM developer's agreement](http://www.cgd.ucar.edu/cseg/development-code.html). +----------- + +To checkout externals: + bin/git-fleximod update + +The externals are stored in: + .gitmodules + +.gitmodules can be modified. Then run "bin/git-fleximod update" to get the updated externals + +------------ + ### CAM Documentation - https://ncar.github.io/CAM/doc/build/html/index.html ### CAM6 namelist settings - http://www.cesm.ucar.edu/models/cesm2/settings/current/cam_nml.html From 9473a8137c37bd6a5ea487bc139c8a25d808abc0 Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Mon, 3 Jun 2024 14:06:24 -0600 Subject: [PATCH 151/161] Add details to README.md and add script for manage_externals/checkout_externals --- README.md | 2 ++ manage_externals/checkout_externals | 3 +++ 2 files changed, 5 insertions(+) create mode 100755 manage_externals/checkout_externals diff --git a/README.md b/README.md index d03fd2f311..a6aa6fee8c 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,8 @@ The externals are stored in: .gitmodules can be modified. Then run "bin/git-fleximod update" to get the updated externals +Details about git-fleximod and the variables in the .gitmodules file can be found at: .lib/git-fleximod/README.md + ------------ ### CAM Documentation - https://ncar.github.io/CAM/doc/build/html/index.html diff --git a/manage_externals/checkout_externals b/manage_externals/checkout_externals new file mode 100755 index 0000000000..ac6b718ee0 --- /dev/null +++ b/manage_externals/checkout_externals @@ -0,0 +1,3 @@ +echo "Error: manage_externals/checkout_externals is no longer supported" +echo " It has been replaced by bin/git-fleximod" +echo " Please refer to the README.md file in the home directory of a CAM checkout for more information" From 87708b631dac23ff9f586fd7e6bc4f7c3bf6f66a Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Mon, 3 Jun 2024 15:12:26 -0600 Subject: [PATCH 152/161] Update TGIT test to check for .gitmodules --- test/system/TGIT.sh | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/test/system/TGIT.sh b/test/system/TGIT.sh index db04179217..e6d6557030 100755 --- a/test/system/TGIT.sh +++ b/test/system/TGIT.sh @@ -1,6 +1,6 @@ #!/bin/sh # Test for bad git repo -# Ensures that the top-level CAM directory +# Ensures that the top-level CAM directory # has ".git" directory and ".gitignore" file, # and no other git files or directories. @@ -9,7 +9,7 @@ # 2: Missing ".git" directory # 3: Missing ".gitignore" file # 4: Missing ".github" directory -# 5: More than three ".git*" files or directories +# 5: Missing ".gitmodules" file # 6: Error from running an external command # Utility to check return code. @@ -66,7 +66,7 @@ The ".gitignore" file is missing from the CAM git repo. Was this repo cloned, c modified incorrectly? If so then copy the .gitignore file from a standard CAM git repo. EOF rc=3 - fi + fi # Check for ".github" directory: if [ ! -d "${cam_top_dir}/.github" ]; then @@ -77,15 +77,11 @@ EOF rc=4 fi - # Check if there are more ".git*" files or directories than just ".git", ".gitignore", - # and ".github": - git_file_num=$(find "${cam_top_dir}" -maxdepth 1 -name '.git*' | wc -l) - - check_code "$?" "Problem running 'find' command for multi-git file check." - - if [ "${git_file_num}" -gt 3 ]; then + # Check for ".github" directory: + if [ ! -f "${cam_top_dir}/.gitmodules" ]; then cat < Date: Wed, 5 Jun 2024 09:11:47 -0600 Subject: [PATCH 153/161] Update chem_proc tag with one with .gitignore in it --- .gitmodules | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitmodules b/.gitmodules index cffb557ee8..77e9c2fc56 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,7 +2,7 @@ path = chem_proc url = https://github.com/ESCOMP/CHEM_PREPROCESSOR.git fxrequired = AlwaysRequired - fxtag = chem_proc5_0_05 + fxtag = chem_proc5_0_06 fxDONOTUSEurl = https://github.com/ESCOMP/CHEM_PREPROCESSOR.git [submodule "carma"] @@ -46,7 +46,7 @@ fxrequired = AlwaysRequired fxtag = fv3int_053124 fxDONOTUSEurl = https://github.com/ESCOMP/CAM_FV3_interface.git - + [submodule "geoschem"] path = src/chemistry/geoschem/geoschem_src url = https://github.com/geoschem/geos-chem.git @@ -95,7 +95,7 @@ path = src/physics/clubb url = https://github.com/larson-group/clubb_release fxrequired = AlwaysRequired - fxsparse = ../.clubb_sparse_checkout + fxsparse = ../.clubb_sparse_checkout fxtag = clubb_4ncar_20231115_5406350 fxDONOTUSEurl = https://github.com/larson-group/clubb_release @@ -126,7 +126,7 @@ url = https://github.com/ESCOMP/mizuRoute fxtag = cesm-coupling.n02_v2.1.2 fxrequired = ToplevelRequired fxDONOTUSEurl = https://github.com/ESCOMP/mizuRoute - + [submodule "ccs_config"] path = ccs_config url = https://github.com/ESMCI/ccs_config_cesm.git From c304487aff29425777e08fc5e009320b50282149 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 5 Jun 2024 11:36:26 -0600 Subject: [PATCH 154/161] update complex test --- git_fleximod/cli.py | 2 +- git_fleximod/git_fleximod.py | 8 +++++--- tests/test_d_complex.py | 8 ++++---- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index 7d09abd83b..d4498b84f9 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -21,7 +21,7 @@ def find_root_dir(filename=".gitmodules"): attempt = dl / filename if attempt.is_file(): return str(dl) - utils.fatal_error("No .gitmodules found in directory tree") + return None def get_parser(): diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index a003c0da8c..48a9b69f65 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -565,10 +565,12 @@ def main(): logger.info("action is {} root_dir={} file_name={}".format(action, root_dir, file_name)) - if not os.path.isfile(os.path.join(root_dir, file_name)): - file_path = utils.find_upwards(root_dir, file_name) + if not root_dir or not os.path.isfile(os.path.join(root_dir, file_name)): + if root_dir: + file_path = utils.find_upwards(root_dir, file_name) - if file_path is None: + if root_dir is None or file_path is None: + root_dir = "." utils.fatal_error( "No {} found in {} or any of it's parents".format(file_name, root_dir) ) diff --git a/tests/test_d_complex.py b/tests/test_d_complex.py index fdce516274..62889f2cba 100644 --- a/tests/test_d_complex.py +++ b/tests/test_d_complex.py @@ -7,7 +7,7 @@ def test_complex_checkout(git_fleximod, complex_repo, logger): assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) - assert("Complex not checked out, aligned at tag testtag01" in status.stdout) + assert("Complex not checked out, aligned at tag testtag02" in status.stdout) assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) # This should checkout and update test_submodule and complex_sub @@ -18,7 +18,7 @@ def test_complex_checkout(git_fleximod, complex_repo, logger): assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) + assert("Complex at tag testtag02" in status.stdout) # now check the complex_sub root = (complex_repo / "modules" / "complex") @@ -39,7 +39,7 @@ def test_complex_checkout(git_fleximod, complex_repo, logger): assert("ToplevelOptional at tag v5.3.2" in status.stdout) assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) + assert("Complex at tag testtag02" in status.stdout) assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) @@ -51,7 +51,7 @@ def test_complex_checkout(git_fleximod, complex_repo, logger): assert("ToplevelOptional at tag v5.3.2" in status.stdout) assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) - assert("Complex at tag testtag01" in status.stdout) + assert("Complex at tag testtag02" in status.stdout) assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) # now check the complex_sub From 8ca8b13358f476c9df671de8a260f97f0972f9d7 Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Wed, 5 Jun 2024 12:16:20 -0600 Subject: [PATCH 155/161] update chem_proc external --- chem_proc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chem_proc b/chem_proc index ed4ba1b27e..f923081508 160000 --- a/chem_proc +++ b/chem_proc @@ -1 +1 @@ -Subproject commit ed4ba1b27e40c8d5af6657ec49a3e3cc8e6f8b5f +Subproject commit f923081508f4264e61fcef2753a9898e55d1598e From d95f5d61ba381fbef6274eae5547e08d09e1a10d Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 5 Jun 2024 14:08:19 -0600 Subject: [PATCH 156/161] fix status issue --- git_fleximod/git_fleximod.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index 48a9b69f65..a847c9fc6b 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -298,15 +298,22 @@ def submodules_status(gitmodules, root_dir, toplevel=False): # submodule commands use path, not name url = url.replace("git@github.com:", "https://github.com/") tags = rootgit.git_operation("ls-remote", "--tags", url) + ahash = rootgit.git_operation("submodule","status",newpath).split()[0][1:] + hhash = None atag = None needsupdate += 1 if not toplevel and level: continue for htag in tags.split("\n"): - if tag and tag in htag: + if htag.endswith('^{}'): + htag = htag[:-3] + if not atag and ahash in htag: atag = (htag.split()[1])[10:] + if not hhash and htag.endswith(tag): + hhash = htag.split()[0] + if hhash and atag: break - if tag and tag == atag: + if tag and (ahash == hhash or atag == tag): print(f"e {name:>20} not checked out, aligned at tag {tag}") elif tag: ahash = rootgit.git_operation( From 0593149d3c106509daa1fbb235fe5b747e26c9a5 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 5 Jun 2024 14:17:46 -0600 Subject: [PATCH 157/161] one more improvement --- git_fleximod/git_fleximod.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index a847c9fc6b..b0a5a33070 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -309,7 +309,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): htag = htag[:-3] if not atag and ahash in htag: atag = (htag.split()[1])[10:] - if not hhash and htag.endswith(tag): + if tag and not hhash and htag.endswith(tag): hhash = htag.split()[0] if hhash and atag: break From 3ff935b835d1793e0840f00230e323efc52fba76 Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Wed, 5 Jun 2024 14:47:54 -0600 Subject: [PATCH 158/161] update tests --- git_fleximod/git_fleximod.py | 7 +++++-- tests/conftest.py | 4 ++-- tests/test_d_complex.py | 5 ++--- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/git_fleximod/git_fleximod.py b/git_fleximod/git_fleximod.py index b0a5a33070..ca5f90622d 100755 --- a/git_fleximod/git_fleximod.py +++ b/git_fleximod/git_fleximod.py @@ -298,7 +298,10 @@ def submodules_status(gitmodules, root_dir, toplevel=False): # submodule commands use path, not name url = url.replace("git@github.com:", "https://github.com/") tags = rootgit.git_operation("ls-remote", "--tags", url) - ahash = rootgit.git_operation("submodule","status",newpath).split()[0][1:] + result = rootgit.git_operation("submodule","status",newpath).split() + ahash = None + if result: + ahash = result[0][1:] hhash = None atag = None needsupdate += 1 @@ -307,7 +310,7 @@ def submodules_status(gitmodules, root_dir, toplevel=False): for htag in tags.split("\n"): if htag.endswith('^{}'): htag = htag[:-3] - if not atag and ahash in htag: + if ahash and not atag and ahash in htag: atag = (htag.split()[1])[10:] if tag and not hhash and htag.endswith(tag): hhash = htag.split()[0] diff --git a/tests/conftest.py b/tests/conftest.py index 942a0efb97..1cc008eb1d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -32,7 +32,7 @@ def logger(): "submodule_name": "test_optional", "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", "status2" : "test_optional at tag MPIserial_2.4.0", - "status3" : "test_optional not checked out, aligned at tag MPIserial_2.4.0", + "status3" : "test_optional not checked out, out of sync at tag None, expected tag is MPIserial_2.4.0", "status4" : "test_optional at tag MPIserial_2.4.0", "gitmodules_content": """ [submodule "test_optional"] @@ -46,7 +46,7 @@ def logger(): "submodule_name": "test_alwaysoptional", "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", "status2" : "test_alwaysoptional at hash e5cf35c", - "status3" : "test_alwaysoptional not checked out, out of sync at tag MPIserial_2.3.0", + "status3" : "out of sync at tag None, expected tag is e5cf35c", "status4" : "test_alwaysoptional at hash e5cf35c", "gitmodules_content": """ [submodule "test_alwaysoptional"] diff --git a/tests/test_d_complex.py b/tests/test_d_complex.py index 62889f2cba..edde7d816d 100644 --- a/tests/test_d_complex.py +++ b/tests/test_d_complex.py @@ -8,7 +8,7 @@ def test_complex_checkout(git_fleximod, complex_repo, logger): assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) assert("Complex not checked out, aligned at tag testtag02" in status.stdout) - assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) + assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout) # This should checkout and update test_submodule and complex_sub result = git_fleximod(complex_repo, "update") @@ -40,8 +40,7 @@ def test_complex_checkout(git_fleximod, complex_repo, logger): assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) assert("Complex at tag testtag02" in status.stdout) - assert("AlwaysOptional not checked out, aligned at tag MPIserial_2.3.0" in status.stdout) - + assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout) # Finally update optional result = git_fleximod(complex_repo, "update --optional") From 87640adf43cdd3c30acd258185f128e638cb41ac Mon Sep 17 00:00:00 2001 From: Jim Edwards Date: Thu, 6 Jun 2024 13:29:04 -0600 Subject: [PATCH 159/161] Bump to 0.7.7 --- git_fleximod/cli.py | 2 +- pyproject.toml | 2 +- tbump.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/git_fleximod/cli.py b/git_fleximod/cli.py index d4498b84f9..bc099fcbc0 100644 --- a/git_fleximod/cli.py +++ b/git_fleximod/cli.py @@ -2,7 +2,7 @@ import argparse from git_fleximod import utils -__version__ = "0.7.6" +__version__ = "0.7.7" def find_root_dir(filename=".gitmodules"): """ finds the highest directory in tree diff --git a/pyproject.toml b/pyproject.toml index 3c70c6c9c7..a316914bf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "git-fleximod" -version = "0.7.6" +version = "0.7.7" description = "Extended support for git-submodule and git-sparse-checkout" authors = ["Jim Edwards "] maintainers = ["Jim Edwards "] diff --git a/tbump.toml b/tbump.toml index 3854682df4..c22637ccda 100644 --- a/tbump.toml +++ b/tbump.toml @@ -2,7 +2,7 @@ github_url = "https://github.com/jedwards4b/git-fleximod/" [version] -current = "0.7.6" +current = "0.7.7" # Example of a semver regexp. # Make sure this matches current_version before From 74cfa9d7db42c6356505ac98dfc45817a473f92b Mon Sep 17 00:00:00 2001 From: Cheryl Craig Date: Fri, 7 Jun 2024 12:06:23 -0600 Subject: [PATCH 160/161] update ChangeLog for cam6_3_162 --- doc/ChangeLog | 130 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 130 insertions(+) diff --git a/doc/ChangeLog b/doc/ChangeLog index 3a5c46527d..9e627f133c 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,6 +1,136 @@ =============================================================== +Tag name: cam6_3_162 +Originator(s): cacraig, jedwards, nusbaume +Date: June 7, 2024 +One-line Summary: Remove manage externals +Github PR URL: https://github.com/ESCOMP/CAM/pull/1052 + +Purpose of changes (include the issue number and title text for each relevant GitHub issue): + - Removes manage_externals/checkout_externals and replaces with git-fleximod (no GitHub issue) + +Describe any changes made to build system: N/A + +Describe any changes made to the namelist: N/A + +List any changes to the defaults for the boundary datasets: N/A + +Describe any substantial timing or memory changes: N/A + +Code reviewed by: cacraigucar + +List all files eliminated: +D Externals.cfg +D Externals_CAM.cfg +D manage_externals + - Removed manage_externals functionality + +List all files added and what they do: +A .gitmodules +A .lib/git-fleximod + - Add git-fleximod functionality + +A bin/git-fleximod + - The actual git-fleximod executable + +A ccs_config +A chem_proc +A cime +A components/cdeps +A components/cice +A components/cism +A components/clm +A components/cmeps +A components/mizuRoute +A components/mosart +A components/rtm +A libraries/FMS +A libraries/mct +A libraries/parallelio +A share +A src/atmos_phys +A src/chemistry/geoschem/geoschem_src +A src/dynamics/fv3 +A src/dynamics/mpas/dycore +A src/hemco +A src/physics/ali_arms +A src/physics/carma/base +A src/physics/clubb +A src/physics/cosp2/src +A src/physics/pumas +A src/physics/pumas-frozen +A src/physics/rrtmgp/data +A src/physics/rrtmgp/ext + - Added external directories as required by git-submodule, which git-fleximod is built upon + +List all existing files that have been modified, and describe the changes: + +M .gitignore + - Removed unneeded gitignore lines + +M README.md + - Updated README to reflect new git-fleximod + +M manage_externals/checkout_externals + - Script which tells folks where to find git-fleximod (when they were used to running manage_externals) + +M test/system/TGIT.sh + - modified test to include a check for require .gitmodules file + +If there were any failures reported from running test_driver.sh on any test +platform, and checkin with these failures has been OK'd by the gatekeeper, +then copy the lines from the td.*.status files for the failed tests to the +appropriate machine below. All failed tests must be justified. + +Most tests had the following namelist failures: + ----- +Comparison failed between '/glade/derecho/scratch/cacraig/aux_cam_20240606135848/SMS_Ln9.f19_f19_mg17.FHIST.derecho_intel.cam-outfrq9s_nochem.GC.aux_cam_20240606135848/CaseDocs/nuopc.runconfig' with '/glade/campaign/cesm/community/amwg/cam_baselines/cam6_3_161/SMS_Ln9.f19_f19_mg17.FHIST.derecho_intel.cam-outfrq9s_nochem/CaseDocs/nuopc.runconfig' +DRIVER_attributes->PELAYOUT_attributes->ALLCOMP_attributes: +ocn2glc_levels as key not in /glade/derecho/scratch/cacraig/aux_cam_20240606135848/SMS_Ln9.f19_f19_mg17.FHIST.derecho_intel.cam-outfrq9s_nochem.GC.aux_cam_20240606135848/CaseDocs/nuopc.runconfig + ----- + +derecho/intel/aux_cam: + + SMS_Lh12.f09_f09_mg17.FCSD_HCO.derecho_intel.cam-outfrq3h (Overall: DIFF) details: + ERP_Ln9.f09_f09_mg17.FCSD_HCO.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + - pre-existing failure due to HEMCO not having reproducible results + + ERP_Lh12.f19_f19_mg17.FW4madSD.derecho_intel.cam-outfrq3h (Overall: DIFF) details: + FAIL ERP_Lh12.f19_f19_mg17.FW4madSD.derecho_intel.cam-outfrq3h BASELINE /glade/campaign/cesm/community/amwg/cam_baselines/cam6_3_161: ERROR BFAIL some baseline files were missing + - This test did not run in previous tag due to CTSM tag failure. Is now running, but no baseline to compare against + + ERP_Ln9.ne30_ne30_mg17.FCnudged.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + FAIL ERP_Ln9.ne30_ne30_mg17.FCnudged.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD failed to initialize + SMS_D_Ln9.ne16_ne16_mg17.FX2000.derecho_intel.cam-outfrq9s (Overall: PEND) details: + PEND SMS_D_Ln9.ne16_ne16_mg17.FX2000.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD + SMS_D_Ln9_P1280x1.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s (Overall: PEND) details: + PEND SMS_D_Ln9_P1280x1.ne0ARCTICne30x4_ne0ARCTICne30x4_mt12.FHIST.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD + SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCHIST.derecho_intel.cam-outfrq9s (Overall: PEND) details: + PEND SMS_D_Ln9_P1280x1.ne0CONUSne30x8_ne0CONUSne30x8_mt12.FCHIST.derecho_intel.cam-outfrq9s SHAREDLIB_BUILD + - These transient grids are no longer supported by CTSM - will update tests in future PR + + SMS_D_Ln9.T42_T42.FSCAM.derecho_intel.cam-outfrq9s (Overall: FAIL) details: + FAIL SMS_D_Ln9.T42_T42.FSCAM.derecho_intel.cam-outfrq9s RUN time=43 + - Bug in med.F90 - Will go away when CICE external is updated post git-fleximod + + SMS_Lm13.f10_f10_mg37.F2000climo.derecho_intel.cam-outfrq1m (Overall: DIFF) details: + FAIL SMS_Lm13.f10_f10_mg37.F2000climo.derecho_intel.cam-outfrq1m BASELINE +/glade/campaign/cesm/community/amwg/cam_baselines/cam6_3_161: FIELDLIST field lists differ (otherwise bit-for-bit) + - cice history file has attributes that changed with this run + +izumi/nag/aux_cam: + DAE.f45_f45_mg37.FHS94.izumi_nag.cam-dae (Overall: FAIL) details: + - pre-existing failure + +izumi/gnu/aux_cam: + SMS_P48x1_D_Ln9.f19_f19_mg17.FW4madSD.izumi_gnu.cam-outfrq9s (Overall: DIFF) details: + FAIL SMS_P48x1_D_Ln9.f19_f19_mg17.FW4madSD.izumi_gnu.cam-outfrq9s BASELINE /fs/cgd/csm/models/atm/cam/pretag_bl/cam6_3_161_gnu: ERROR BFAIL some baseline files were missing + - This test did not run in previous tag due to CTSM tag failure. Is now running, but no baseline to compare against + +=============================================================== +=============================================================== + Tag name: cam6_3_161 Originator(s): cacraig Date: May 16, 2024 From d1e19d4ef7d72b34cdf20c834147abc7690d6ff4 Mon Sep 17 00:00:00 2001 From: "G. Dylan Dickerson" Date: Tue, 20 Aug 2024 17:14:12 -0600 Subject: [PATCH 161/161] Keep v7.3 MPAS-A for EarthWorks This is to ensure there's no conflicts with other MPAS components (esp. in the framework) and to allow for easier integration when MPAS-A OpenACC is added again. --- .gitmodules | 2 +- src/dynamics/mpas/dycore | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 77e9c2fc56..a0cb5ea697 100644 --- a/.gitmodules +++ b/.gitmodules @@ -80,7 +80,7 @@ url = https://github.com/MPAS-Dev/MPAS-Model.git fxrequired = AlwaysRequired fxsparse = ../.mpas_sparse_checkout - fxtag = b8c33daa + fxtag = ff76a231 fxDONOTUSEurl = https://github.com/MPAS-Dev/MPAS-Model.git [submodule "cosp2"] diff --git a/src/dynamics/mpas/dycore b/src/dynamics/mpas/dycore index b8c33daa4b..ff76a231dd 160000 --- a/src/dynamics/mpas/dycore +++ b/src/dynamics/mpas/dycore @@ -1 +1 @@ -Subproject commit b8c33daa4b124f116b0ba1b1386968c4c15b42e4 +Subproject commit ff76a231ddf6bfd3bdb878afb02821c70ba1a856