diff --git a/.github/workflows/single-binary.yml b/.github/workflows/single-binary.yml index 0f64525860..1e31d703e4 100644 --- a/.github/workflows/single-binary.yml +++ b/.github/workflows/single-binary.yml @@ -197,6 +197,9 @@ jobs: --version ${{ env.FLYTESNACKS_VERSION }} \ flytesnacks/$line; done < flytesnacks/flyte_tests.txt + - name: Install Pytest + run: | + pip install pytest - name: End2End run: | make end2end_execute diff --git a/boilerplate/flyte/end2end/Makefile b/boilerplate/flyte/end2end/Makefile index 98ee63ae7a..983b6e22d9 100644 --- a/boilerplate/flyte/end2end/Makefile +++ b/boilerplate/flyte/end2end/Makefile @@ -7,8 +7,12 @@ end2end_execute: export FLYTESNACKS_PRIORITIES ?= P0 end2end_execute: export FLYTESNACKS_VERSION ?= $(shell curl --silent "https://api.github.com/repos/flyteorg/flytesnacks/releases/latest" | jq -r .tag_name) end2end_execute: - ./boilerplate/flyte/end2end/end2end.sh ./boilerplate/flyte/end2end/functional-test-config.yaml --return_non_zero_on_failure - + pytest ./boilerplate/flyte/end2end/test_run.py \ + --flytesnacks_release_tag=$(FLYTESNACKS_VERSION) \ + --priorities=$(FLYTESNACKS_PRIORITIES) \ + --config_file=./boilerplate/flyte/end2end/functional-test-config.yaml \ + --return_non_zero_on_failure + .PHONY: k8s_integration_execute k8s_integration_execute: echo "pass" diff --git a/boilerplate/flyte/end2end/conftest.py b/boilerplate/flyte/end2end/conftest.py new file mode 100644 index 0000000000..d77fad05d9 --- /dev/null +++ b/boilerplate/flyte/end2end/conftest.py @@ -0,0 +1,47 @@ +import pytest + +def pytest_addoption(parser): + parser.addoption("--flytesnacks_release_tag", required=True) + parser.addoption("--priorities", required=True) + parser.addoption("--config_file", required=True) + parser.addoption( + "--return_non_zero_on_failure", + action="store_true", + default=False, + help="Return a non-zero exit status if any workflow fails", + ) + parser.addoption( + "--terminate_workflow_on_failure", + action="store_true", + default=False, + help="Abort failing workflows upon exit", + ) + parser.addoption( + "--test_project_name", + default="flytesnacks", + help="Name of project to run functional tests on" + ) + parser.addoption( + "--test_project_domain", + default="development", + help="Name of domain in project to run functional tests on" + ) + parser.addoption( + "--cluster_pool_name", + required=False, + type=str, + default=None, + ) + +@pytest.fixture +def setup_flytesnacks_env(pytestconfig): + return { + "flytesnacks_release_tag": pytestconfig.getoption("--flytesnacks_release_tag"), + "priorities": pytestconfig.getoption("--priorities"), + "config_file": pytestconfig.getoption("--config_file"), + "return_non_zero_on_failure": pytestconfig.getoption("--return_non_zero_on_failure"), + "terminate_workflow_on_failure": pytestconfig.getoption("--terminate_workflow_on_failure"), + "test_project_name": pytestconfig.getoption("--test_project_name"), + "test_project_domain": pytestconfig.getoption("--test_project_domain"), + "cluster_pool_name": pytestconfig.getoption("--cluster_pool_name"), + } diff --git a/boilerplate/flyte/end2end/end2end.sh b/boilerplate/flyte/end2end/end2end.sh deleted file mode 100755 index 5dd825c1a0..0000000000 --- a/boilerplate/flyte/end2end/end2end.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -# WARNING: THIS FILE IS MANAGED IN THE 'BOILERPLATE' REPO AND COPIED TO OTHER REPOSITORIES. -# ONLY EDIT THIS FILE FROM WITHIN THE 'FLYTEORG/BOILERPLATE' REPOSITORY: -# -# TO OPT OUT OF UPDATES, SEE https://github.com/flyteorg/boilerplate/blob/master/Readme.rst -set -eu - -CONFIG_FILE=$1; shift -EXTRA_FLAGS=( "$@" ) - -python ./boilerplate/flyte/end2end/run-tests.py $FLYTESNACKS_VERSION $FLYTESNACKS_PRIORITIES $CONFIG_FILE ${EXTRA_FLAGS[@]} diff --git a/boilerplate/flyte/end2end/run-tests.py b/boilerplate/flyte/end2end/test_run.py similarity index 76% rename from boilerplate/flyte/end2end/run-tests.py rename to boilerplate/flyte/end2end/test_run.py index f6558247a6..b300ee974a 100644 --- a/boilerplate/flyte/end2end/run-tests.py +++ b/boilerplate/flyte/end2end/test_run.py @@ -5,7 +5,7 @@ import traceback from typing import Dict, List, Optional -import click +import pytest import requests from flytekit.configuration import Config from flytekit.models.core.execution import WorkflowExecutionPhase @@ -15,7 +15,6 @@ WAIT_TIME = 10 MAX_ATTEMPTS = 200 - def execute_workflow( remote: FlyteRemote, version, @@ -27,7 +26,6 @@ def execute_workflow( wf = remote.fetch_workflow(name=workflow_name, version=version) return remote.execute(wf, inputs=inputs, wait=False, cluster_pool=cluster_pool_name) - def executions_finished( executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]] ) -> bool: @@ -36,7 +34,6 @@ def executions_finished( return False return True - def sync_executions( remote: FlyteRemote, executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]] ): @@ -50,13 +47,11 @@ def sync_executions( print("GOT TO THE EXCEPT") print("COUNT THIS!") - def report_executions(executions_by_wfgroup: Dict[str, List[FlyteWorkflowExecution]]): for executions in executions_by_wfgroup.values(): for execution in executions: print(execution) - def schedule_workflow_groups( tag: str, workflow_groups: List[str], @@ -65,10 +60,6 @@ def schedule_workflow_groups( parsed_manifest: List[dict], cluster_pool_name: Optional[str] = None, ) -> Dict[str, bool]: - """ - Schedule workflows executions for all workflow groups and return True if all executions succeed, otherwise - return False. - """ executions_by_wfgroup = {} # Schedule executions for each workflow group, for wf_group in workflow_groups: @@ -120,7 +111,6 @@ def schedule_workflow_groups( results[wf_group] = len(non_succeeded_executions) == 0 return results - def valid(workflow_group, parsed_manifest): """ Return True if a workflow group is contained in parsed_manifest, @@ -128,22 +118,25 @@ def valid(workflow_group, parsed_manifest): """ return workflow_group in set(wf_group["name"] for wf_group in parsed_manifest) +def test_run(setup_flytesnacks_env): + + env = setup_flytesnacks_env + + flytesnacks_release_tag = env["flytesnacks_release_tag"] + priorities = env["priorities"] + config_file_path = env["config_file"] + terminate_workflow_on_failure = env["terminate_workflow_on_failure"] + test_project_name = env["test_project_name"] + test_project_domain = env["test_project_domain"] + cluster_pool_name = env["cluster_pool_name"] + return_non_zero_on_failure = env["return_non_zero_on_failure"] -def run( - flytesnacks_release_tag: str, - priorities: List[str], - config_file_path, - terminate_workflow_on_failure: bool, - test_project_name: str, - test_project_domain: str, - cluster_pool_name: Optional[str] = None, -) -> List[Dict[str, str]]: remote = FlyteRemote( Config.auto(config_file=config_file_path), test_project_name, test_project_domain, ) - + # For a given release tag and priority, this function filters the workflow groups from the flytesnacks # manifest file. For example, for the release tag "v0.2.224" and the priority "P0" it returns [ "core" ]. manifest_url = ( @@ -210,75 +203,15 @@ def run( "color": background_color, } results.append(result) - return results - - -@click.command() -@click.argument("flytesnacks_release_tag") -@click.argument("priorities") -@click.argument("config_file") -@click.option( - "--return_non_zero_on_failure", - default=False, - is_flag=True, - help="Return a non-zero exit status if any workflow fails", -) -@click.option( - "--terminate_workflow_on_failure", - default=False, - is_flag=True, - help="Abort failing workflows upon exit", -) -@click.option( - "--test_project_name", - default="flytesnacks", - type=str, - is_flag=False, - help="Name of project to run functional tests on", -) -@click.option( - "--test_project_domain", - default="development", - type=str, - is_flag=False, - help="Name of domain in project to run functional tests on", -) -@click.argument( - "cluster_pool_name", - required=False, - type=str, - default=None, -) -def cli( - flytesnacks_release_tag, - priorities, - config_file, - return_non_zero_on_failure, - terminate_workflow_on_failure, - test_project_name, - test_project_domain, - cluster_pool_name, -): - print(f"return_non_zero_on_failure={return_non_zero_on_failure}") - results = run( - flytesnacks_release_tag, - priorities, - config_file, - terminate_workflow_on_failure, - test_project_name, - test_project_domain, - cluster_pool_name, - ) - # Write a json object in its own line describing the result of this run to stdout print(f"Result of run:\n{json.dumps(results)}") - # Return a non-zero exit code if core fails if return_non_zero_on_failure: - for result in results: - if result["status"] not in ("passing", "coming soon"): - sys.exit(1) - - -if __name__ == "__main__": - cli() + fail_results = [result for result in results if result["status"] not in ("passing", "coming soon")] + if fail_results: + fail_msgs = [ + f"Workflow '{r['label']}' failed with status '{r['status']}'" for r in fail_results + ] + pytest.fail("\n".join(fail_msgs)) + + assert results == [{"label": "core", "status": "passing", "color": "green"}]