From d6ebf8d7275380b7cb35361b15b37625f3b4c96d Mon Sep 17 00:00:00 2001 From: Abel Legese <73869888+Abellegese@users.noreply.github.com> Date: Tue, 3 Dec 2024 13:22:06 +0300 Subject: [PATCH] CLI test integration (#1383) * Update config.yml - fix Circle CI pipeline * Playground Implementation * Playground Implementation * Support CLI batching, and more * Update CLI test * Update CLI test * Update CLI test * CLI test Update * A few fix on the playground test * A few fix on the playground test * A few fix on the playground test * Playground with all command at #1368 * Playground with all command at #1368 * Playground with all command at #1368 * Playground with all command at #1368 * Playground with all command at #1368 * Playground with all command at #1368 * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground * Session Parallelization on CLI test playground with pytest dependency * Rich lib integration pyproject config * Make the serializer use the fields from the header (#1406) --------- Co-authored-by: Dhanshree Arora --- .github/workflows/tests_and_cleanup.yml | 155 +++++++++---- conftest.py | 48 ++++ pyproject.toml | 4 +- test/playground/__init__.py | 0 test/playground/commands.py | 255 +++++++++++++++++++++ test/playground/config.yml | 21 ++ test/playground/files/sample_error_log.txt | 3 + test/playground/noxfile.py | 157 +++++++++++++ test/playground/rules.py | 157 +++++++++++++ test/playground/shared.py | 3 + test/playground/utils.py | 111 +++++++++ 11 files changed, 874 insertions(+), 40 deletions(-) create mode 100644 conftest.py create mode 100644 test/playground/__init__.py create mode 100644 test/playground/commands.py create mode 100644 test/playground/config.yml create mode 100644 test/playground/files/sample_error_log.txt create mode 100644 test/playground/noxfile.py create mode 100644 test/playground/rules.py create mode 100644 test/playground/shared.py create mode 100644 test/playground/utils.py diff --git a/.github/workflows/tests_and_cleanup.yml b/.github/workflows/tests_and_cleanup.yml index 6e193f48d..9d3cb815f 100644 --- a/.github/workflows/tests_and_cleanup.yml +++ b/.github/workflows/tests_and_cleanup.yml @@ -2,9 +2,9 @@ name: Ersilia tests, installation checks, and cleanup of model request template on: pull_request: - branches: [ master ] + branches: [master] push: - branches: [ master ] + branches: [master] workflow_dispatch: jobs: @@ -15,19 +15,19 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - with: - python-version: ${{ matrix.python-version }} - - name: Install Ersilia - run: | - python -m pip install git+https://github.com/ersilia-os/bentoml-ersilia.git - python -m pip install -e . - - name: Test help command - run: | - ersilia --help - ersilia --version + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 + with: + python-version: ${{ matrix.python-version }} + - name: Install Ersilia + run: | + python -m pip install git+https://github.com/ersilia-os/bentoml-ersilia.git + python -m pip install -e . + - name: Test help command + run: | + ersilia --help + ersilia --version test-docker: strategy: @@ -35,21 +35,21 @@ jobs: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 - with: - python-version: ${{ matrix.python-version }} - - name: Install Ersilia without pre-installing BentoML - run: | - python -m pip install -e . - - name: Test help command again - run: | - ersilia --help - ersilia --version - - name: Fetch molecular weight model from DockerHub - run: | - ersilia -v fetch molecular-weight --from_dockerhub + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 + with: + python-version: ${{ matrix.python-version }} + - name: Install Ersilia without pre-installing BentoML + run: | + python -m pip install -e . + - name: Test help command again + run: | + ersilia --help + ersilia --version + - name: Fetch molecular weight model from DockerHub + run: | + ersilia -v fetch molecular-weight --from_dockerhub test-os: strategy: @@ -91,10 +91,9 @@ jobs: - name: Source conda run: source $CONDA/etc/profile.d/conda.sh - + - name: Set Python to 3.10.10 - run: - conda install -y python=3.10.10 + run: conda install -y python=3.10.10 - name: Install dependencies run: | @@ -115,9 +114,89 @@ jobs: - name: Run pytest run: | source activate - pip install pytest pytest-benchmark pytest-asyncio + pip install pytest pytest-benchmark pytest-asyncio nox rich pytest + run-cli-test-single: + needs: run-pytest + runs-on: ubuntu-22.04 + strategy: + matrix: + session: + - setup + - test_from_github + - test_from_dockerhub + - test_auto_fetcher_decider + - test_conventional_run + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v3.5.3 + + - name: Add conda to system path + run: echo $CONDA/bin >> $GITHUB_PATH + + - name: Source conda + run: source $CONDA/etc/profile.d/conda.sh + + - name: Set Python to 3.10.10 + run: conda install -y python=3.10.10 + + - name: Install dependencies + run: | + source activate + conda init + conda install git-lfs -c conda-forge + git-lfs install + conda install gh -c conda-forge + + - name: Install ersilia + run: | + source activate + python --version + echo "After conda init" + conda init + python -m pip install -e .[test] + + - name: Run CLI Test Default + run: | + source activate + nox -f test/playground/noxfile.py -s ${{ matrix.session }} + + run-cli-test-multiple: + needs: run-pytest + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # pin@v3.5.3 + + - name: Add conda to system path + run: echo $CONDA/bin >> $GITHUB_PATH + + - name: Source conda + run: source $CONDA/etc/profile.d/conda.sh + + - name: Set Python to 3.10.10 + run: conda install -y python=3.10.10 + + - name: Install dependencies + run: | + source activate + conda init + conda install git-lfs -c conda-forge + git-lfs install + conda install gh -c conda-forge + + - name: Install ersilia + run: | + source activate + python --version + echo "After conda init" + conda init + python -m pip install -e .[test] + + - name: Run CLI Test Multiple + run: | + source activate + nox -f test/playground/noxfile.py -s setup test_fetch_multiple_models test_serve_multiple_models + test-colab-notebook: runs-on: ubuntu-22.04 steps: @@ -129,8 +208,7 @@ jobs: run: source $CONDA/etc/profile.d/conda.sh - name: Set Python to 3.10.10 - run: - conda install -y python=3.10.10 + run: conda install -y python=3.10.10 - name: Install dependencies run: | @@ -160,7 +238,6 @@ jobs: runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4.2.2 - - name: Path Filter id: filter uses: dorny/paths-filter@v3 @@ -173,7 +250,7 @@ jobs: if: steps.filter.outputs.tag-file == 'true' uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: "3.10" - name: Install dependencies if: steps.filter.outputs.tag-file == 'true' @@ -197,4 +274,4 @@ jobs: repository: "ersilia-os/ersilia" github_token: ${{ secrets.GITHUB_TOKEN }} force: true - branch: "master" \ No newline at end of file + branch: "master" diff --git a/conftest.py b/conftest.py new file mode 100644 index 000000000..9417daa98 --- /dev/null +++ b/conftest.py @@ -0,0 +1,48 @@ +from test.playground.shared import results +from rich.table import Table +from rich.console import Console +from rich.text import Text +from rich import box +from rich.panel import Panel + +def pytest_terminal_summary(terminalreporter, exitstatus, config): + console = Console() + + docker_status = Text("✔", style="green") if any(result["activate_docker"] for result in results) else Text("✘", style="red") + runner = results[0]["runner"] if results else "N/A" + cli_type = results[0]["cli_type"] if results else "N/A" + + header_panel = Panel.fit( + f"Docker Status: {docker_status}\nRunner: {runner}\nCLI Type: {cli_type}", + title="Execution Summary", + border_style="bold", + ) + console.print(header_panel) + + table = Table(title="Command Execution Summary", box=box.SQUARE) + table.add_column("Command", width=50) + table.add_column("Description", width=15) + table.add_column("Time Taken", width=15) + table.add_column("Max Memory", width=15) + table.add_column("Status", width=20) + table.add_column("Checkups", width=30) + + for result in results: + formatted_checkups = [] + for check in result["checkups"]: + if check["status"]: + formatted_checkups.append(Text("✔", style="green") + f" {check['name']}") + else: + formatted_checkups.append(Text("✘", style="red") + f" {check['name']}") + checkups_text = "\n".join(str(checkup) for checkup in formatted_checkups) + + table.add_row( + result["command"], + result["description"], + result["time_taken"], + result["max_memory"], + result["status"], + checkups_text, + ) + + console.print(table) diff --git a/pyproject.toml b/pyproject.toml index b1896fa66..d13f198a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,12 +62,14 @@ fuzzywuzzy = { version = "^0.18.0", optional = true } sphinx = { version = ">=6.0.0", optional = true } # for minimum version and support for Python 3.10 jinja2 = { version = "^3.1.2", optional = true } scipy = { version = "<=1.10.0", optional = true } +nox = { version = "*", optional = true } +rich = { version = "*", optional = true } [tool.poetry.extras] # Instead of using poetry dependency groups, we use extras to make it pip installable lake = ["isaura"] docs = ["sphinx", "jinja2"] -test = ["pytest", "pytest-asyncio", "pytest-benchmark", "fuzzywuzzy", "scipy"] +test = ["pytest", "pytest-asyncio", "pytest-benchmark", "nox", "rich", "fuzzywuzzy", "scipy"] #all = [lake, docs, test] [tool.poetry.scripts] diff --git a/test/playground/__init__.py b/test/playground/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/test/playground/commands.py b/test/playground/commands.py new file mode 100644 index 000000000..5cb58eef9 --- /dev/null +++ b/test/playground/commands.py @@ -0,0 +1,255 @@ +import pytest +import subprocess +import time +import psutil +import re +import json +import yaml +from rich.text import Text +from pathlib import Path +from .shared import results +from .rules import get_rule +from .utils import ( + create_compound_input_csv, + get_command_names, + get_commands, + handle_error_logging, + save_as_json +) + +config = yaml.safe_load(Path("config.yml").read_text()) +delete_model = config.get("delete_model", False) +activate_docker = config.get("activate_docker", False) +runner = config.get("runner", "single") +cli_type = config.get("cli_type", "all") +output_file = config.get("output_file") +input_file = config.get("input_file") +redirect = config.get("output_redirection", False) + +if runner == "single": + model_ids = [config["model_id"]] +else: + model_ids = config["model_ids"] + +base_path = Path.home() / "eos" +max_runtime_minutes = config.get("max_runtime_minutes", None) +from_github = "--from_github" in config.get("fetch_flags", "") +from_dockerhub = "--from_dockerhub" in config.get("fetch_flags", "") + + +def execute_command( + command, + description="", + dest_path=None, + repo_path=None +): + # generating input eg. + create_compound_input_csv(config.get("input_file")) + # docker sys control + docker_activated = False + if config and config.get("activate_docker"): + docker_status = subprocess.run( + ["systemctl", "is-active", "--quiet", "docker"] + ) + if docker_status.returncode != 0: + subprocess.run(["systemctl", "start", "docker"], check=True) + docker_activated = True + else: + subprocess.run(["systemctl", "stop", "docker"], check=True) + + start_time, max_memory, success, result, checkups, = time.time(), 0, False, "", [] + + proc = psutil.Popen( + command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE + ) + + try: + while proc.poll() is None: + max_memory = max(max_memory, proc.memory_info().rss / (1024 * 1024)) + time.sleep(0.1) + + success = proc.returncode == 0 + stdout, stderr = proc.communicate() + + if success: + result = stdout.decode() + else: + result = stderr.decode() + + except Exception as e: + proc.kill() + result = str(e) + + if config.get("log_error", False): + handle_error_logging( + command, + description, + result, + config + ) + + pytest.fail( + f"{description} '{' '.join(command)}' failed with error: {result}" + ) + + if description == "run" and success and config.get("output_redirection"): + save_as_json( + result, + output_file, + remove_list=[input_file] + ) + + checkups = apply_rules( + command, + description, + dest_path, + repo_path, + config + ) + + rules_success = all(check["status"] for check in checkups) + overall_success = success and rules_success + + if not overall_success and config.get("log_error", False): + handle_error_logging( + command, + description, + result, + config, + checkups + ) + + status_text = (Text("PASSED", style="green") if overall_success else Text("FAILED", style="red")) + + results.append( + { + "command": " ".join(command), + "description": description, + "time_taken": f"{(time.time() - start_time) / 60:.2f} min", + "max_memory": f"{max_memory:.2f} MB", + "status": status_text, + "checkups": checkups, + "activate_docker": docker_activated, + "runner": config.get("runner"), + "cli_type": config.get("cli_type"), + } + ) + + return overall_success, (time.time() - start_time) / 60 + +def apply_rules( + command, + description, + dest_path, + repo_path, + config + ): + checkups = [] + try: + if description == "fetch": + if from_github: + checkups.append( + get_rule( + "folder_exists", + folder_path=repo_path, + expected_status=True, + ) + ) + if from_dockerhub: + checkups.append( + get_rule( + "folder_exists", + folder_path=dest_path, + expected_status=True, + ) + ) + checkups.append( + get_rule( + "dockerhub_status", + dest_path=dest_path, + expected_status=True, + ) + ) + elif description == "run": + checkups.append( + get_rule( + "file_exists", + file_path=output_file, + expected_status=True, + ) + ) + checkups.append( + get_rule( + "file_content_check", + file_path=output_file, + expected_status="not null", + ) + ) + elif description == "serve": + if from_dockerhub: + checkups.append( + get_rule( + "dockerhub_status", + dest_path=dest_path, + expected_status=True, + ) + ) + except Exception as rule_error: + handle_error_logging( + command, + description, + rule_error, + config, + checkups + ) + pytest.fail(f"Rule exception occurred: {rule_error}") + + return checkups + + +@pytest.fixture(scope="module", autouse=True) +def delete_model_command(): + """Deletes models if delete_model is True.""" + if delete_model: + for model_id in model_ids: + dest_path = base_path / "dest" / model_id + repo_path = base_path / "repository" / model_id + delete_command = ["ersilia", "-v", "delete", model_id] + success, _ = execute_command( + delete_command, + description="delete", + dest_path=dest_path, + repo_path=repo_path, + ) + assert success, f"Delete command failed for model ID {model_id}" + assert ( + not dest_path.exists() + ), f"Destination folder for {model_id} still exists after delete" + + +@pytest.mark.parametrize( + "model_id", + model_ids if runner == "multiple" else [config.get("model_id")] +) +@pytest.mark.parametrize("command_name", get_command_names(model_ids[0], cli_type, config)) +def test_command(model_id, command_name): + commands = get_commands(model_id, config) + command = commands[command_name] + dest_path = base_path / "dest" / model_id + repo_path = base_path / "repository" / model_id + + success, time_taken = execute_command( + command, + description=command_name, + dest_path=dest_path, + repo_path=repo_path, + ) + + assert success, f"Command '{command_name}' failed for model ID {model_id}" + + if command_name == "run" and max_runtime_minutes is not None: + assert ( + time_taken <= max_runtime_minutes + ), f"Command '{command_name}' for model ID {model_id} exceeded max runtime of {max_runtime_minutes} minutes" diff --git a/test/playground/config.yml b/test/playground/config.yml new file mode 100644 index 000000000..5a8689b8d --- /dev/null +++ b/test/playground/config.yml @@ -0,0 +1,21 @@ +activate_docker: true +cli_type: all +delete_model: true +fetch_flags: --from_github +input_file: files/input.csv +log_error: true +log_path: files/error_log +max_runtime_minutes: 10 +model_id: eos3b5e +model_ids: +- eos3b5e +- eos4e40 +- eos9gg2 +output_file: files/result.csv +output_redirection: false +overwrite_ersilia_repo: false +python_version: 3.10.10 +run_flags: '' +runner: single +serve_flags: '' +use_existing_env: true diff --git a/test/playground/files/sample_error_log.txt b/test/playground/files/sample_error_log.txt new file mode 100644 index 000000000..27469a799 --- /dev/null +++ b/test/playground/files/sample_error_log.txt @@ -0,0 +1,3 @@ +Command: ersilia -v serve eos3b5e +Description: serve +Error: Expectation failed for DockerHubStatusRule: Expected DockerHub status to be True, but it was False. diff --git a/test/playground/noxfile.py b/test/playground/noxfile.py new file mode 100644 index 000000000..1916e0d3a --- /dev/null +++ b/test/playground/noxfile.py @@ -0,0 +1,157 @@ +import nox +import yaml +import shutil +from pathlib import Path +from ersilia.utils.logging import logger + +ORIGINAL_DIR = Path.cwd() +config_path = Path("config.yml") +config = yaml.safe_load(config_path.read_text()) +REPO_URL = "https://github.com/ersilia-os/ersilia.git" +REPO_DIR = Path("ersilia") + + +def update_yaml_values(new_values: dict): + existing_config = yaml.safe_load( + config_path.read_text() + ) + existing_config.update(new_values) + config_path.write_text(yaml.dump(existing_config)) + + +def get_python_version(): + return config.get("python_version", "3.10.10") + + +def install_dependencies(session): + session.install( + "pytest", + "pytest-asyncio", + "pytest-xdist", + "psutil", + "PyYAML", + "rich", + ) + + +def setup_ersilia(session): + if REPO_DIR.exists() and config.get( + "overwrite_ersilia_repo", False + ): + logger.info( + f"Overwriting existing repository directory: {REPO_DIR}" + ) + shutil.rmtree(REPO_DIR) + + if not REPO_DIR.exists(): + session.run( + "git", + "clone", + REPO_URL, + external=True, + ) + else: + logger.info( + f"Using existing repository directory: {REPO_DIR}" + ) + + session.chdir(REPO_DIR) + session.install("-e", ".") + session.chdir(ORIGINAL_DIR) + + +@nox.session( + venv_backend="conda", python=get_python_version() +) +def setup(session): + install_dependencies(session) + setup_ersilia(session) + + +@nox.session( + venv_backend="conda", python=get_python_version() +) +def test_from_github(session): + install_dependencies(session) + logger.info( + f'CLI test for model: {config.get("model_id")} and {config.get("fetch_flags")}' + ) + session.run("pytest", "commands.py", "-v", silent=False) + + +@nox.session( + venv_backend="conda", python=get_python_version() +) +def test_from_dockerhub(session): + install_dependencies(session) + update_yaml_values({"fetch_flags": "--from_dockerhub"}) + logger.info( + f'CLI test for model: {config.get("model_id")} and {config.get("fetch_flags")}' + ) + session.run("pytest", "commands.py", "-v", silent=False) + + +@nox.session( + venv_backend="conda", python=get_python_version() +) +def test_auto_fetcher_decider(session): + install_dependencies(session) + update_yaml_values({"fetch_flags": ""}) + logger.info( + f'CLI test for model: {config.get("model_id")} and auto fetcher decider' + ) + session.run("pytest", "commands.py", "-v", silent=False) + + +@nox.session( + venv_backend="conda", python=get_python_version() +) +def test_fetch_multiple_models(session): + install_dependencies(session) + update_yaml_values( + { + "runner": "multiple", + "cli_type": "fetch", + "fetch_flags": "--from_dockerhub", + } + ) + logger.info( + f"Fetching and Serving Multiple Models: Fetching" + ) + session.run("pytest", "commands.py", "-v", silent=False) + + +@nox.session( + venv_backend="conda", python=get_python_version() +) +def test_serve_multiple_models(session): + install_dependencies(session) + update_yaml_values( + {"runner": "multiple", "cli_type": "serve", "delete_model": False} + ) + logger.info( + f"Fetching and Serving Multiple Models: Serving" + ) + session.run("pytest", "commands.py", "-v", silent=False) + + +@nox.session( + venv_backend="conda", python=get_python_version() +) +def test_conventional_run(session): + """Run pytest for standard and conventional run.""" + install_dependencies(session) + update_yaml_values( + { + "runner": "single", + "cli_type": "all", + "fetch_flags": "--from_dockerhub", + "output_file": "files/output_eos9gg2_0.json", + "output_redirection": "true", + "delete_model": True + } + ) + logger.info( + f"Standard and Conventional Run: Conventional" + ) + session.run("pytest", "commands.py", "-v", silent=False) diff --git a/test/playground/rules.py b/test/playground/rules.py new file mode 100644 index 000000000..f749163fa --- /dev/null +++ b/test/playground/rules.py @@ -0,0 +1,157 @@ +import json +import csv +from pathlib import Path + +RULE_REGISTRY = {} + + +class CommandRule: + def check(self, *args, **kwargs): + raise NotImplementedError( + "Each rule must implement a check method." + ) + + +def register_rule(name): + def decorator(cls): + RULE_REGISTRY[name] = cls + return cls + + return decorator + + +@register_rule("folder_exists") +class FolderExistsRule(CommandRule): + def __init__(self): + pass + + def check(self, folder_path, expected_status): + actual_status = Path(folder_path).exists() and any( + Path(folder_path).iterdir() + ) + if actual_status != expected_status: + raise AssertionError( + f"Expectation failed for FolderExistsRule: " + f"Expected folder to {'exist' if expected_status else 'not exist'}, " + f"but it {'exists' if actual_status else 'does not exist'}." + ) + return { + "name": f"Folder exists at {folder_path}", + "status": actual_status, + } + + +@register_rule("file_exists") +class FileExistsRule(CommandRule): + def __init__(self): + pass + + def check(self, file_path, expected_status): + actual_status = Path(file_path).exists() + if actual_status != expected_status: + raise AssertionError( + f"Expectation failed for FileExistsRule: " + f"Expected file to {'exist' if expected_status else 'not exist'}, " + f"but it {'exists' if actual_status else 'does not exist'}." + ) + return { + "name": f"File exists at {file_path}", + "status": actual_status, + } + + +@register_rule("dockerhub_status") +class DockerHubStatusRule(CommandRule): + def __init__(self): + pass + + def check(self, expected_status, dest_path): + dockerhub_file = Path(dest_path) / "from_dockerhub.json" + if dockerhub_file.exists(): + with open(dockerhub_file, "r") as f: + content = f.read() + actual_status = ( + f'"docker_hub": {str(expected_status).lower()}' + in content + ) + else: + actual_status = False + + if actual_status != expected_status: + raise AssertionError( + f"Expectation failed for DockerHubStatusRule: " + f"Expected DockerHub status to be {expected_status}, but it was {actual_status}." + ) + return { + "name": f"DockerHub status is {actual_status}", + "status": actual_status, + } + + +@register_rule("file_content_check") +class FileContentCheckRule(CommandRule): + def __init__(self): + pass + + def check(self, file_path, expected_status): + if not Path(file_path).exists(): + raise FileNotFoundError( + f"File {file_path} does not exist." + ) + + file_extension = Path(file_path).suffix.lower() + if file_extension not in [".json", ".csv"]: + raise ValueError( + f"Unsupported file type: {file_extension}. Only JSON and CSV are supported." + ) + + if file_extension == ".json": + actual_status = self._check_json_content(file_path) + elif file_extension == ".csv": + actual_status = self._check_csv_content(file_path) + else: + raise ValueError( + f"Unexpected error occurred with file extension: {file_extension}" + ) + + if actual_status != expected_status: + raise AssertionError( + f"Expectation failed for FileContentCheckRule: " + f"Expected file content to be '{expected_status}', " + f"but it was '{actual_status}'." + ) + + return { + "name": f"File content check at {file_path}", + "status": actual_status, + } + + def _check_json_content(self, file_path): + """Checks the content of a JSON file.""" + with open(file_path, "r") as f: + try: + content = json.load(f) + return "not null" if content else "null" + except json.JSONDecodeError as e: + raise ValueError( + f"Invalid JSON content in file {file_path}: {e}" + ) + + def _check_csv_content(self, file_path): + """Checks the content of a CSV file.""" + with open(file_path, "r") as f: + reader = csv.reader(f) + try: + rows = list(reader) + return "not null" if len(rows) > 1 else "null" + except csv.Error as e: + raise ValueError( + f"Invalid CSV content in file {file_path}: {e}" + ) + + +def get_rule(rule_name, *args, **kwargs): + rule_class = RULE_REGISTRY.get(rule_name) + if not rule_class: + raise ValueError(f"Rule '{rule_name}' is not registered.") + return rule_class().check(*args, **kwargs) diff --git a/test/playground/shared.py b/test/playground/shared.py new file mode 100644 index 000000000..c196b394b --- /dev/null +++ b/test/playground/shared.py @@ -0,0 +1,3 @@ +# To store cross module data for +# Pytest print hook +results = [] \ No newline at end of file diff --git a/test/playground/utils.py b/test/playground/utils.py new file mode 100644 index 000000000..409a3beb4 --- /dev/null +++ b/test/playground/utils.py @@ -0,0 +1,111 @@ +import csv +import json +import re +from datetime import datetime + + +def create_compound_input_csv(csv_path): + input_data = [ + "COc1ccc2c(NC(=O)Nc3cccc(C(F)(F)F)n3)ccnc2c1", + "O=C(O)c1ccccc1NC(=O)N1CCC(c2ccccc2C(F)(F)F)CC1", + "O=C(O)c1cc(C(=O)O)c(C(=O)N(Cc2cccc(Oc3ccccc3)c2)[C@H]2CCCc3ccccc32)cc1C(=O)O", + "Cc1ccc(N2CCN(Cc3nc4ccccc4[nH]3)CC2)cc1C", + "Cc1cccc(NC(=O)CN2CCC(c3ccccn3)CC2)c1", + "Clc1cccc(-c2nnnn2Cc2cccnc2)c1Cl", + "CNC(=O)Nc1ccc2c(c1)CC[C@@]21OC(=O)N(CC(=O)N(Cc2ccc(F)cc2)[C@@H](C)C(F)(F)F)C1=O", + "Cc1[nH]nc2ccc(-c3cncc(OC[C@@H](N)Cc4ccccc4)c3)cc12", + "NCCCCCCCCCCNS(=O)(=O)c1cccc2c(Cl)cccc12", + ] + + with open(csv_path, mode="w", newline="") as file: + writer = csv.writer(file) + writer.writerow(["Input"]) + for line in input_data: + writer.writerow([line]) + + +def save_as_json(result, output_file, remove_list=None): + try: + if remove_list: + for item in remove_list: + result = result.replace(item, "") + + stripped, formatted = re.split(r"\}\s*\{", result.strip()), [] + + for i, line in enumerate(stripped): + if i == 0: + line = line + "}" + elif i == len(stripped) - 1: + line = "{" + line + else: + line = "{" + line + "}" + formatted.append(line) + + _data = [] + for obj in formatted: + try: + _data.append(json.loads(obj)) + except json.JSONDecodeError as e: + print(f"Skipping invalid JSON object: {obj}. Error: {e}") + continue + + with open(output_file, "w") as f: + json.dump(_data, f, indent=4) + + except Exception as e: + raise ValueError(f"Error processing result: {e}") + + +def get_commands(model_id, config): + return { + "fetch": ["ersilia", "-v", "fetch", model_id] + + ( + config.get("fetch_flags", "").split() + if config.get("fetch_flags") + else [] + ), + "serve": ["ersilia", "-v", "serve", model_id] + + ( + config.get("serve_flags", "").split() + if config.get("serve_flags") + else [] + ), + "run": [ + "ersilia", + "run", + "-i", + config["input_file"], + ] + + ( + ["-o", config["output_file"]] + if config.get("output_file") + and not config.get("output_redirection") + else [] + ) + + ( + config.get("run_flags", "").split() + if config.get("run_flags") + else [] + ), + "close": ["ersilia", "close"], + } + + +def get_command_names(model_id, cli_type, config): + return ( + list(get_commands(model_id, config).keys()) + if cli_type == "all" + else [cli_type] + ) + + +def handle_error_logging(command, description, result, config, checkups=None): + if config.get("log_error", False): + log_path = f"{config.get('log_path')}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.txt" + with open(log_path, "w") as file: + file.write(f"Command: {' '.join(command)}\n") + file.write(f"Description: {description}\n") + file.write(f"Error: {result}\n") + if checkups: + for check in checkups: + file.write(f"Check '{check['name']}': {check['status']}\n")