From 72766b4dc94506b924025281241cd40d83a4b2fb Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Fri, 8 Nov 2024 09:10:13 +0100 Subject: [PATCH 01/22] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20tracing=20can=20be?= =?UTF-8?q?=20disabled=20on=20all=20services=20(#6682)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .env-devel | 18 +++++++++++++++++- services/api-server/tests/unit/conftest.py | 1 + services/autoscaling/tests/unit/conftest.py | 9 +-------- .../test_modules_auto_scaling_computational.py | 1 - services/catalog/tests/unit/conftest.py | 5 ++++- .../clusters-keeper/tests/unit/conftest.py | 1 + .../datcore-adapter/tests/unit/conftest.py | 15 ++++++++++++--- services/director-v2/tests/conftest.py | 1 + services/docker-compose.yml | 11 +++++++++++ services/dynamic-scheduler/tests/conftest.py | 5 ++++- services/invitations/tests/unit/conftest.py | 1 + services/payments/tests/conftest.py | 1 + services/storage/tests/conftest.py | 18 ++++++++++-------- 13 files changed, 64 insertions(+), 23 deletions(-) diff --git a/.env-devel b/.env-devel index 978062f428b..8f979751926 100644 --- a/.env-devel +++ b/.env-devel @@ -21,6 +21,7 @@ AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678 API_SERVER_DEV_FEATURES_ENABLED=0 API_SERVER_LOGLEVEL=INFO API_SERVER_PROFILING=1 +API_SERVER_TRACING={} TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25 AUTOSCALING_DASK=null @@ -33,6 +34,7 @@ AUTOSCALING_LOGLEVEL=INFO AUTOSCALING_NODES_MONITORING=null AUTOSCALING_POLL_INTERVAL=10 AUTOSCALING_SSM_ACCESS=null +AUTOSCALING_TRACING={} AWS_S3_CLI_S3=null @@ -44,6 +46,7 @@ CATALOG_PORT=8000 CATALOG_PROFILING=1 CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}' CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}' +CATALOG_TRACING={} CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}' CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest @@ -57,6 +60,7 @@ CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5 CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null CLUSTERS_KEEPER_TASK_INTERVAL=30 CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null +CLUSTERS_KEEPER_TRACING={} DASK_SCHEDULER_HOST=dask-scheduler DASK_SCHEDULER_PORT=8786 @@ -70,6 +74,7 @@ DIRECTOR_HOST=director DIRECTOR_PORT=8080 DIRECTOR_REGISTRY_CACHING_TTL=900 DIRECTOR_REGISTRY_CACHING=True +DIRECTOR_TRACING={} EFS_USER_ID=8006 EFS_USER_NAME=efs @@ -79,6 +84,10 @@ EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com EFS_MOUNTED_PATH=/tmp/efs EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data EFS_ONLY_ENABLED_FOR_USERIDS=[] +EFS_GUARDIAN_TRACING={} + +# DATCORE_ADAPTER +DATCORE_ADAPTER_TRACING={} # DIRECTOR_V2 ---- COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}' @@ -105,10 +114,13 @@ DYNAMIC_SIDECAR_LOG_LEVEL=DEBUG DYNAMIC_SIDECAR_PROMETHEUS_MONITORING_NETWORKS=[] DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS={} DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT=3600 -# DIRECTOR_V2 ---- +DIRECTOR_V2_TRACING={} + +# DYNAMIC_SCHEDULER ---- DYNAMIC_SCHEDULER_LOGLEVEL=DEBUG DYNAMIC_SCHEDULER_PROFILING=1 DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT=PT1H +DYNAMIC_SCHEDULER_TRACING={} FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "unknown@osparc.io", "affiliation": "unknown"}}' @@ -122,6 +134,7 @@ INVITATIONS_PORT=8000 INVITATIONS_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key=' INVITATIONS_SWAGGER_API_DOC_ENABLED=1 INVITATIONS_USERNAME=admin +INVITATIONS_TRACING={} LOG_FORMAT_LOCAL_DEV_ENABLED=1 LOG_FILTER_MAPPING='{}' @@ -146,6 +159,7 @@ PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret' PAYMENTS_STRIPE_URL=https://api.stripe.com PAYMENTS_SWAGGER_API_DOC_ENABLED=1 PAYMENTS_USERNAME=admin +PAYMENTS_TRACING={} POSTGRES_DB=simcoredb POSTGRES_ENDPOINT=postgres:5432 @@ -185,6 +199,7 @@ RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1 RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6 RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300 RESOURCE_USAGE_TRACKER_S3=null +RESOURCE_USAGE_TRACKER_TRACING={} # NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface. R_CLONE_OPTION_BUFFER_SIZE=0M @@ -218,6 +233,7 @@ STORAGE_HOST=storage STORAGE_LOGLEVEL=INFO STORAGE_PORT=8080 STORAGE_PROFILING=1 +STORAGE_TRACING={} # STORAGE ---- SWARM_STACK_NAME=master-simcore diff --git a/services/api-server/tests/unit/conftest.py b/services/api-server/tests/unit/conftest.py index e8324bcc0b7..d37481cccbf 100644 --- a/services/api-server/tests/unit/conftest.py +++ b/services/api-server/tests/unit/conftest.py @@ -62,6 +62,7 @@ def app_environment( "WEBSERVER_HOST": "webserver", "API_SERVER_POSTGRES": "null", "API_SERVER_RABBITMQ": "null", + "API_SERVER_TRACING": "null", "LOG_LEVEL": "debug", "SC_BOOT_MODE": "production", "API_SERVER_HEALTH_CHECK_TASK_PERIOD_SECONDS": "3", diff --git a/services/autoscaling/tests/unit/conftest.py b/services/autoscaling/tests/unit/conftest.py index 9876a3a1c20..243ccbc429e 100644 --- a/services/autoscaling/tests/unit/conftest.py +++ b/services/autoscaling/tests/unit/conftest.py @@ -226,7 +226,7 @@ def app_environment( "AUTOSCALING_EC2_SECRET_ACCESS_KEY": faker.pystr(), "AUTOSCALING_EC2_INSTANCES": "{}", "AUTOSCALING_SSM_ACCESS": "{}", - "AUTOSCALING_TRACING": "{}", + "AUTOSCALING_TRACING": "null", "SSM_ACCESS_KEY_ID": faker.pystr(), "SSM_SECRET_ACCESS_KEY": faker.pystr(), "EC2_INSTANCES_KEY_NAME": faker.pystr(), @@ -367,13 +367,6 @@ def disabled_ec2(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch) monkeypatch.setenv("AUTOSCALING_EC2_ACCESS", "null") -@pytest.fixture -def disabled_opentelemetry( - app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch -) -> None: - monkeypatch.setenv("AUTOSCALING_TRACING", "null") - - @pytest.fixture def disabled_ssm(app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.setenv("AUTOSCALING_SSM_ACCESS", "null") diff --git a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py index 372546149c1..5811b43b2f0 100644 --- a/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py +++ b/services/autoscaling/tests/unit/test_modules_auto_scaling_computational.py @@ -79,7 +79,6 @@ def minimal_configuration( local_dask_scheduler_server_envs: EnvVarsDict, mocked_ec2_instances_envs: EnvVarsDict, disabled_rabbitmq: None, - disabled_opentelemetry: None, disable_dynamic_service_background_task: None, disable_buffers_pool_background_task: None, mocked_redis_server: None, diff --git a/services/catalog/tests/unit/conftest.py b/services/catalog/tests/unit/conftest.py index 6069514b085..68dfeb604bb 100644 --- a/services/catalog/tests/unit/conftest.py +++ b/services/catalog/tests/unit/conftest.py @@ -86,7 +86,10 @@ def app_environment( """ return setenvs_from_dict( monkeypatch, - {**docker_compose_service_environment_dict}, + { + **docker_compose_service_environment_dict, + "CATALOG_TRACING": "null", + }, ) diff --git a/services/clusters-keeper/tests/unit/conftest.py b/services/clusters-keeper/tests/unit/conftest.py index 43805123c30..ef528f0cfab 100644 --- a/services/clusters-keeper/tests/unit/conftest.py +++ b/services/clusters-keeper/tests/unit/conftest.py @@ -119,6 +119,7 @@ def app_environment( envs = setenvs_from_dict( monkeypatch, { + "CLUSTERS_KEEPER_TRACING": "null", "CLUSTERS_KEEPER_EC2_ACCESS": "{}", "CLUSTERS_KEEPER_EC2_ACCESS_KEY_ID": faker.pystr(), "CLUSTERS_KEEPER_EC2_SECRET_ACCESS_KEY": faker.pystr(), diff --git a/services/datcore-adapter/tests/unit/conftest.py b/services/datcore-adapter/tests/unit/conftest.py index e4fa08204f5..f950caf619a 100644 --- a/services/datcore-adapter/tests/unit/conftest.py +++ b/services/datcore-adapter/tests/unit/conftest.py @@ -15,6 +15,7 @@ from asgi_lifespan import LifespanManager from fastapi.applications import FastAPI from pytest_mock import MockFixture +from pytest_simcore.helpers.monkeypatch_envs import EnvVarsDict, setenvs_from_dict from simcore_service_datcore_adapter.modules.pennsieve import ( PennsieveAuthorizationHeaders, ) @@ -22,6 +23,7 @@ from starlette.testclient import TestClient pytest_plugins = [ + "pytest_simcore.environment_configs", "pytest_simcore.repository_paths", "pytest_simcore.pytest_global_environs", ] @@ -74,9 +76,16 @@ def client(minimal_app: FastAPI) -> TestClient: @pytest.fixture -def app_envs(monkeypatch: pytest.MonkeyPatch): - # disable tracing as together with LifespanManager, it does not remove itself nicely - ... +def app_envs( + mock_env_devel_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +) -> EnvVarsDict: + return setenvs_from_dict( + monkeypatch, + { + **mock_env_devel_environment, + "DATCORE_ADAPTER_TRACING": "null", + }, + ) @pytest.fixture() diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index db64158d6d5..4e415254486 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -190,6 +190,7 @@ def mock_env( "SIMCORE_SERVICES_NETWORK_NAME": "test_network_name", "SWARM_STACK_NAME": "pytest-simcore", "TRAEFIK_SIMCORE_ZONE": "test_traefik_zone", + "DIRECTOR_V2_TRACING": "null", }, ) diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 45e843ad712..35dd3782609 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -42,6 +42,7 @@ services: WEBSERVER_HOST: ${WB_API_WEBSERVER_HOST} WEBSERVER_PORT: ${WB_API_WEBSERVER_PORT} WEBSERVER_SESSION_SECRET_KEY: ${WEBSERVER_SESSION_SECRET_KEY} + API_SERVER_TRACING: ${API_SERVER_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} @@ -127,6 +128,7 @@ services: REGISTRY_URL: ${REGISTRY_URL} REGISTRY_SSL: ${REGISTRY_SSL} REGISTRY_AUTH: ${REGISTRY_AUTH} + AUTOSCALING_TRACING: ${AUTOSCALING_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} volumes: @@ -168,6 +170,7 @@ services: RABBIT_PORT: ${RABBIT_PORT} RABBIT_SECURE: ${RABBIT_SECURE} RABBIT_USER: ${RABBIT_USER} + CATALOG_TRACING: ${CATALOG_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} networks: @@ -236,6 +239,7 @@ services: WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS: ${WORKERS_EC2_INSTANCES_SECURITY_GROUP_IDS} WORKERS_EC2_INSTANCES_SUBNET_ID: ${WORKERS_EC2_INSTANCES_SUBNET_ID} WORKERS_EC2_INSTANCES_CUSTOM_TAGS: ${WORKERS_EC2_INSTANCES_CUSTOM_TAGS} + CLUSTERS_KEEPER_TRACING: ${CLUSTERS_KEEPER_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} secrets: *dask_tls_secrets @@ -278,6 +282,7 @@ services: SIMCORE_SERVICES_NETWORK_NAME: interactive_services_subnet STORAGE_ENDPOINT: ${STORAGE_ENDPOINT} SWARM_STACK_NAME: ${SWARM_STACK_NAME:-simcore} + DIRECTOR_TRACING: ${DIRECTOR_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_EXPORTER_ENDPOINT} TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE:-internal_simcore_stack} @@ -383,6 +388,7 @@ services: SIMCORE_SERVICES_NETWORK_NAME: ${SIMCORE_SERVICES_NETWORK_NAME} SWARM_STACK_NAME: ${SWARM_STACK_NAME} TRAEFIK_SIMCORE_ZONE: ${TRAEFIK_SIMCORE_ZONE} + DIRECTOR_V2_TRACING: ${DIRECTOR_V2_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} @@ -434,6 +440,7 @@ services: EFS_MOUNTED_PATH: ${EFS_MOUNTED_PATH} EFS_ONLY_ENABLED_FOR_USERIDS: ${EFS_ONLY_ENABLED_FOR_USERIDS} EFS_PROJECT_SPECIFIC_DATA_DIRECTORY: ${EFS_PROJECT_SPECIFIC_DATA_DIRECTORY} + EFS_GUARDIAN_TRACING: ${EFS_GUARDIAN_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} invitations: @@ -452,6 +459,7 @@ services: INVITATIONS_USERNAME: ${INVITATIONS_USERNAME} LOG_FORMAT_LOCAL_DEV_ENABLED: ${LOG_FORMAT_LOCAL_DEV_ENABLED} LOG_FILTER_MAPPING : ${LOG_FILTER_MAPPING} + INVITATIONS_TRACING: ${INVITATIONS_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} payments: @@ -496,6 +504,7 @@ services: SMTP_PORT: ${SMTP_PORT} SMTP_PROTOCOL: ${SMTP_PROTOCOL} SMTP_USERNAME: ${SMTP_USERNAME} + PAYMENTS_TRACING: ${PAYMENTS_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} @@ -532,6 +541,7 @@ services: RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC: ${RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC} RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL: ${RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL} RESOURCE_USAGE_TRACKER_S3: ${RESOURCE_USAGE_TRACKER_S3} + RESOURCE_USAGE_TRACKER_TRACING: ${RESOURCE_USAGE_TRACKER_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} RESOURCE_USAGE_TRACKER_PORT: ${RESOURCE_USAGE_TRACKER_PORT} @@ -560,6 +570,7 @@ services: DYNAMIC_SCHEDULER_LOGLEVEL: ${DYNAMIC_SCHEDULER_LOGLEVEL} DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT: ${DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT} DYNAMIC_SCHEDULER_PROFILING: ${DYNAMIC_SCHEDULER_PROFILING} + DYNAMIC_SCHEDULER_TRACING: ${DYNAMIC_SCHEDULER_TRACING} TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} static-webserver: diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index 2cb14086b2a..8b672b0408e 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -74,7 +74,10 @@ def app_environment( ) -> EnvVarsDict: return setenvs_from_dict( monkeypatch, - {**docker_compose_service_dynamic_scheduler_env_vars}, + { + **docker_compose_service_dynamic_scheduler_env_vars, + "DYNAMIC_SCHEDULER_TRACING": "null", + }, ) diff --git a/services/invitations/tests/unit/conftest.py b/services/invitations/tests/unit/conftest.py index 1b6ea4ee6e9..5d952daf83b 100644 --- a/services/invitations/tests/unit/conftest.py +++ b/services/invitations/tests/unit/conftest.py @@ -75,6 +75,7 @@ def app_environment( "INVITATIONS_DEFAULT_PRODUCT": default_product, "INVITATIONS_USERNAME": fake_user_name, "INVITATIONS_PASSWORD": fake_password, + "INVITATIONS_TRACING": "null", }, ) diff --git a/services/payments/tests/conftest.py b/services/payments/tests/conftest.py index 042ac85f968..921b8405d99 100644 --- a/services/payments/tests/conftest.py +++ b/services/payments/tests/conftest.py @@ -83,6 +83,7 @@ def app_environment( "PAYMENTS_ACCESS_TOKEN_SECRET_KEY": secret_key, "PAYMENTS_USERNAME": faker.user_name(), "PAYMENTS_PASSWORD": faker.password(), + "PAYMENTS_TRACING": "null", }, ) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 1b3f634446c..e83cf9ad8eb 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -168,19 +168,21 @@ def app_settings( datcore_adapter_service_mock: aioresponses.aioresponses, monkeypatch: pytest.MonkeyPatch, ) -> Settings: + s3_settings_dict = {} if external_envfile_dict: s3_settings = S3Settings.create_from_envs(**external_envfile_dict) if s3_settings.S3_ENDPOINT is None: monkeypatch.delenv("S3_ENDPOINT") - setenvs_from_dict( - monkeypatch, - s3_settings.dict(exclude={"S3_ENDPOINT"}), - ) + s3_settings_dict = s3_settings.dict(exclude={"S3_ENDPOINT"}) else: - setenvs_from_dict( - monkeypatch, - s3_settings.dict(), - ) + s3_settings_dict = s3_settings.dict() + setenvs_from_dict( + monkeypatch, + { + **s3_settings_dict, + "STORAGE_TRACING": "null", + }, + ) test_app_settings = Settings.create_from_envs() print(f"{test_app_settings.json(indent=2)=}") return test_app_settings From 299c1ac3149aad80ed8899a0d350a3164f670e4f Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Fri, 8 Nov 2024 11:43:00 +0100 Subject: [PATCH 02/22] =?UTF-8?q?=E2=9C=A8=20Trash=20folders=20(#6642)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/specs/web-server/_folders.py | 8 +- api/specs/web-server/_projects_crud.py | 6 +- api/specs/web-server/_trash.py | 41 +++- .../api_schemas_webserver/folders.py | 1 + .../api_schemas_webserver/folders_v2.py | 1 + .../src/models_library/folders.py | 4 + .../src/models_library/projects.py | 1 + ...58751a_project_and_folder_trash_columns.py | 73 ++++++ .../models/folders_v2.py | 16 ++ .../models/projects.py | 13 +- .../helpers/webserver_projects.py | 2 +- services/web/server/VERSION | 2 +- services/web/server/setup.cfg | 2 +- .../api/v0/openapi.yaml | 142 ++++++++--- .../src/simcore_service_webserver/errors.py | 5 +- .../exceptions_handlers.py | 90 +++++++ .../folders/_exceptions_handlers.py | 74 ++++++ .../folders/_folders_api.py | 8 +- .../folders/_folders_db.py | 133 +++++++++-- .../folders/_folders_handlers.py | 126 ++-------- .../folders/_models.py | 85 +++++++ .../folders/_trash_api.py | 177 ++++++++++++++ .../folders/_trash_handlers.py | 66 ++++++ .../folders/plugin.py | 3 +- .../projects/_crud_api_read.py | 4 +- .../projects/_trash_api.py | 8 +- .../projects/_trash_handlers.py | 71 ++---- .../simcore_service_webserver/projects/db.py | 25 +- .../projects/models.py | 4 +- .../workspaces/_workspaces_api.py | 4 +- .../unit/isolated/test_exceptions_handlers.py | 117 ++++++++++ .../02/test_projects_cancellations.py | 3 +- .../02/test_projects_crud_handlers.py | 38 +-- .../02/test_projects_states_handlers.py | 2 +- .../tests/unit/with_dbs/03/test_project_db.py | 1 + .../tests/unit/with_dbs/03/test_trash.py | 221 ++++++++++++++++++ .../unit/with_dbs/04/folders/test_folders.py | 201 +++++++++------- .../test_studies_dispatcher_studies_access.py | 3 +- .../sleepers_project_template_sql.csv | 4 +- 39 files changed, 1421 insertions(+), 364 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/5ad02358751a_project_and_folder_trash_columns.py create mode 100644 services/web/server/src/simcore_service_webserver/exceptions_handlers.py create mode 100644 services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py create mode 100644 services/web/server/src/simcore_service_webserver/folders/_models.py create mode 100644 services/web/server/src/simcore_service_webserver/folders/_trash_api.py create mode 100644 services/web/server/src/simcore_service_webserver/folders/_trash_handlers.py create mode 100644 services/web/server/tests/unit/isolated/test_exceptions_handlers.py diff --git a/api/specs/web-server/_folders.py b/api/specs/web-server/_folders.py index ee529da655c..90f1ad3beb1 100644 --- a/api/specs/web-server/_folders.py +++ b/api/specs/web-server/_folders.py @@ -21,7 +21,7 @@ from models_library.workspaces import WorkspaceID from pydantic import Json from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.folders._folders_handlers import FoldersPathParams +from simcore_service_webserver.folders._models import FolderFilters, FoldersPathParams router = APIRouter( prefix=f"/{API_VTAG}", @@ -30,8 +30,6 @@ ], ) -### Folders - @router.post( "/folders", @@ -57,6 +55,10 @@ async def list_folders( example='{"field": "name", "direction": "desc"}', ), ] = '{"field": "modified_at", "direction": "desc"}', + filters: Annotated[ + Json | None, + Query(description=FolderFilters.schema_json(indent=1)), + ] = None, ): ... diff --git a/api/specs/web-server/_projects_crud.py b/api/specs/web-server/_projects_crud.py index aad8fa82760..4c560464eb8 100644 --- a/api/specs/web-server/_projects_crud.py +++ b/api/specs/web-server/_projects_crud.py @@ -32,6 +32,7 @@ from simcore_service_webserver.projects._common_models import ProjectPathParams from simcore_service_webserver.projects._crud_handlers import ProjectCreateParams from simcore_service_webserver.projects._crud_handlers_models import ( + ProjectFilters, ProjectListFullSearchParams, ProjectListParams, ) @@ -83,7 +84,10 @@ async def list_projects( example='{"field": "last_change_date", "direction": "desc"}', ), ] = '{"field": "last_change_date", "direction": "desc"}', - filters: Annotated[Json | None, Query()] = None, + filters: Annotated[ + Json | None, + Query(description=ProjectFilters.schema_json(indent=1)), + ] = None, ): ... diff --git a/api/specs/web-server/_trash.py b/api/specs/web-server/_trash.py index cdde2b8c32f..cdd883f7cf3 100644 --- a/api/specs/web-server/_trash.py +++ b/api/specs/web-server/_trash.py @@ -9,10 +9,14 @@ from fastapi import APIRouter, Depends, status from simcore_service_webserver._meta import API_VTAG -from simcore_service_webserver.projects._trash_handlers import ( - ProjectPathParams, +from simcore_service_webserver.folders._models import ( + FoldersPathParams, RemoveQueryParams, ) +from simcore_service_webserver.projects._trash_handlers import ProjectPathParams +from simcore_service_webserver.projects._trash_handlers import ( + RemoveQueryParams as RemoveQueryParams_duplicated, +) router = APIRouter( prefix=f"/{API_VTAG}", @@ -59,3 +63,36 @@ def untrash_project( _p: Annotated[ProjectPathParams, Depends()], ): ... + + +_extra_tags = ["folders"] + + +@router.post( + "/folders/{folder_id}:trash", + tags=_extra_tags, + status_code=status.HTTP_204_NO_CONTENT, + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Not such a folder"}, + status.HTTP_409_CONFLICT: { + "description": "One or more projects is in use and cannot be trashed" + }, + status.HTTP_503_SERVICE_UNAVAILABLE: {"description": "Trash service error"}, + }, +) +def trash_folder( + _p: Annotated[FoldersPathParams, Depends()], + _q: Annotated[RemoveQueryParams_duplicated, Depends()], +): + ... + + +@router.post( + "/folders/{folder_id}:untrash", + tags=_extra_tags, + status_code=status.HTTP_204_NO_CONTENT, +) +def untrash_folder( + _p: Annotated[FoldersPathParams, Depends()], +): + ... diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders.py b/packages/models-library/src/models_library/api_schemas_webserver/folders.py index e971b1f8c73..f8a235109a4 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/folders.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/folders.py @@ -18,6 +18,7 @@ class FolderGet(OutputSchema): description: str created_at: datetime modified_at: datetime + trashed_at: datetime | None owner: GroupID my_access_rights: AccessRights access_rights: dict[GroupID, AccessRights] diff --git a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py index 4398f1377f7..4ba77e0e7c3 100644 --- a/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py +++ b/packages/models-library/src/models_library/api_schemas_webserver/folders_v2.py @@ -18,6 +18,7 @@ class FolderGet(OutputSchema): name: str created_at: datetime modified_at: datetime + trashed_at: datetime | None owner: GroupID workspace_id: WorkspaceID | None my_access_rights: AccessRights diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py index 73262e1e647..4d73618750c 100644 --- a/packages/models-library/src/models_library/folders.py +++ b/packages/models-library/src/models_library/folders.py @@ -29,6 +29,10 @@ class FolderDB(BaseModel): ..., description="Timestamp of last modification", ) + trashed_at: datetime | None = Field( + ..., + ) + user_id: UserID | None workspace_id: WorkspaceID | None diff --git a/packages/models-library/src/models_library/projects.py b/packages/models-library/src/models_library/projects.py index af2d99dc003..d59f9b30ad3 100644 --- a/packages/models-library/src/models_library/projects.py +++ b/packages/models-library/src/models_library/projects.py @@ -190,6 +190,7 @@ class Project(BaseProjectModel): default=None, alias="trashedAt", ) + trashed_explicitly: bool = Field(default=False, alias="trashedExplicitly") class Config: description = "Document that stores metadata, pipeline and UI setup of a study" diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/5ad02358751a_project_and_folder_trash_columns.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5ad02358751a_project_and_folder_trash_columns.py new file mode 100644 index 00000000000..2cd8adb00f0 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/5ad02358751a_project_and_folder_trash_columns.py @@ -0,0 +1,73 @@ +"""project and folder trash columns + +Revision ID: 5ad02358751a +Revises: fce5d231e16d +Create Date: 2024-11-07 17:14:01.094583+00:00 + +""" +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "5ad02358751a" +down_revision = "fce5d231e16d" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "folders_v2", + sa.Column( + "trashed_at", + sa.DateTime(timezone=True), + nullable=True, + comment="The date and time when the folder was marked as trashed.Null if the folder has not been trashed [default].", + ), + ) + op.add_column( + "folders_v2", + sa.Column( + "trashed_explicitly", + sa.Boolean(), + server_default=sa.text("false"), + nullable=False, + comment="Indicates whether the folder was explicitly trashed by the user (true) or inherited its trashed status from a parent (false) [default].", + ), + ) + op.add_column( + "projects", + sa.Column( + "trashed_explicitly", + sa.Boolean(), + server_default=sa.text("false"), + nullable=False, + comment="Indicates whether the project was explicitly trashed by the user (true) or inherited its trashed status from a parent (false) [default].", + ), + ) + op.alter_column( + "projects", + "trashed_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + comment="The date and time when the project was marked as trashed. Null if the project has not been trashed [default].", + existing_nullable=True, + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.alter_column( + "projects", + "trashed_at", + existing_type=postgresql.TIMESTAMP(timezone=True), + comment=None, + existing_comment="The date and time when the project was marked as trashed. Null if the project has not been trashed [default].", + existing_nullable=True, + ) + op.drop_column("projects", "trashed_explicitly") + op.drop_column("folders_v2", "trashed_explicitly") + op.drop_column("folders_v2", "trashed_at") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py b/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py index b1393bf5367..fcad0ada76c 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/folders_v2.py @@ -1,4 +1,5 @@ import sqlalchemy as sa +from sqlalchemy.sql import expression from ._common import column_created_datetime, column_modified_datetime from .base import metadata @@ -74,4 +75,19 @@ ), column_created_datetime(timezone=True), column_modified_datetime(timezone=True), + sa.Column( + "trashed_at", + sa.DateTime(timezone=True), + nullable=True, + comment="The date and time when the folder was marked as trashed." + "Null if the folder has not been trashed [default].", + ), + sa.Column( + "trashed_explicitly", + sa.Boolean, + nullable=False, + server_default=expression.false(), + comment="Indicates whether the folder was explicitly trashed by the user (true)" + " or inherited its trashed status from a parent (false) [default].", + ), ) diff --git a/packages/postgres-database/src/simcore_postgres_database/models/projects.py b/packages/postgres-database/src/simcore_postgres_database/models/projects.py index 629113f06dc..778d2b80eb5 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/projects.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/projects.py @@ -5,7 +5,7 @@ import sqlalchemy as sa from sqlalchemy.dialects.postgresql import ARRAY, JSONB -from sqlalchemy.sql import func +from sqlalchemy.sql import expression, func from .base import metadata @@ -145,7 +145,16 @@ class ProjectType(enum.Enum): "trashed_at", sa.DateTime(timezone=True), nullable=True, - doc="Timestamp indicating when the project was marked as trashed, or null otherwise.", + comment="The date and time when the project was marked as trashed. " + "Null if the project has not been trashed [default].", + ), + sa.Column( + "trashed_explicitly", + sa.Boolean, + nullable=False, + server_default=expression.false(), + comment="Indicates whether the project was explicitly trashed by the user (true)" + " or inherited its trashed status from a parent (false) [default].", ), sa.Column( "workspace_id", diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py index b6687e22239..37c9733fd3a 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/webserver_projects.py @@ -186,5 +186,5 @@ async def assert_get_same_project( data, error = await assert_status(resp, expected) if not error: - assert data == project + assert data == {k: project[k] for k in data} return data diff --git a/services/web/server/VERSION b/services/web/server/VERSION index a8ab6c9666a..bcce5d06b8a 100644 --- a/services/web/server/VERSION +++ b/services/web/server/VERSION @@ -1 +1 @@ -0.44.0 +0.45.0 diff --git a/services/web/server/setup.cfg b/services/web/server/setup.cfg index ab412830c97..2b54478220b 100644 --- a/services/web/server/setup.cfg +++ b/services/web/server/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.44.0 +current_version = 0.45.0 commit = True message = services/webserver api version: {current_version} → {new_version} tag = False diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index a49c71acf17..dafb3f8fb08 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2,7 +2,7 @@ openapi: 3.0.2 info: title: simcore-service-webserver description: Main service with an interface (http-API & websockets) to the web front-end - version: 0.44.0 + version: 0.45.0 servers: - url: '' description: webserver @@ -2626,6 +2626,27 @@ paths: example: '{"field": "name", "direction": "desc"}' name: order_by in: query + - description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\ + \ as JSON. Each available filter can have its own logic (should be well\ + \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\ + ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"title\"\ + : \"Trashed\",\n \"description\": \"Set to true to list trashed, false\ + \ to list non-trashed (default), None to list all\",\n \"default\": false,\n\ + \ \"type\": \"boolean\"\n }\n }\n}" + required: false + schema: + title: Filters + type: string + description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\ + \ as JSON. Each available filter can have its own logic (should be well\ + \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\ + ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"\ + title\": \"Trashed\",\n \"description\": \"Set to true to list trashed,\ + \ false to list non-trashed (default), None to list all\",\n \"default\"\ + : false,\n \"type\": \"boolean\"\n }\n }\n}" + format: json-string + name: filters + in: query - required: false schema: title: Limit @@ -3056,10 +3077,24 @@ paths: example: '{"field": "last_change_date", "direction": "desc"}' name: order_by in: query - - required: false + - description: "{\n \"title\": \"ProjectFilters\",\n \"description\": \"Encoded\ + \ as JSON. Each available filter can have its own logic (should be well\ + \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\ + ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"title\"\ + : \"Trashed\",\n \"description\": \"Set to true to list trashed, false\ + \ to list non-trashed (default), None to list all\",\n \"default\": false,\n\ + \ \"type\": \"boolean\"\n }\n }\n}" + required: false schema: title: Filters type: string + description: "{\n \"title\": \"ProjectFilters\",\n \"description\": \"Encoded\ + \ as JSON. Each available filter can have its own logic (should be well\ + \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\ + ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"\ + title\": \"Trashed\",\n \"description\": \"Set to true to list trashed,\ + \ false to list non-trashed (default), None to list all\",\n \"default\"\ + : false,\n \"type\": \"boolean\"\n }\n }\n}" format: json-string name: filters in: query @@ -4328,7 +4363,7 @@ paths: '403': description: ProjectInvalidRightsError '404': - description: ProjectNotFoundError, UserDefaultWalletNotFoundError + description: UserDefaultWalletNotFoundError, ProjectNotFoundError '409': description: ProjectTooManyProjectOpenedError '422': @@ -5410,6 +5445,57 @@ paths: responses: '204': description: Successful Response + /v0/folders/{folder_id}:trash: + post: + tags: + - trash + - folders + summary: Trash Folder + operationId: trash_folder + parameters: + - required: true + schema: + title: Folder Id + exclusiveMinimum: true + type: integer + minimum: 0 + name: folder_id + in: path + - required: false + schema: + title: Force + type: boolean + default: false + name: force + in: query + responses: + '204': + description: Successful Response + '404': + description: Not such a folder + '409': + description: One or more projects is in use and cannot be trashed + '503': + description: Trash service error + /v0/folders/{folder_id}:untrash: + post: + tags: + - trash + - folders + summary: Untrash Folder + operationId: untrash_folder + parameters: + - required: true + schema: + title: Folder Id + exclusiveMinimum: true + type: integer + minimum: 0 + name: folder_id + in: path + responses: + '204': + description: Successful Response /v0/repos/projects: get: tags: @@ -8427,6 +8513,10 @@ components: title: Modifiedat type: string format: date-time + trashedAt: + title: Trashedat + type: string + format: date-time owner: title: Owner exclusiveMinimum: true @@ -12583,28 +12673,25 @@ components: type: string - type: string default: UNDEFINED - id: - title: Id - type: string - read: - title: Read - type: boolean resource_id: - title: Resource ID + title: Resource Id anyOf: - enum: - - "" + - '' type: string - type: string - default: "" + default: '' user_from_id: - title: User ID of the one creating it - anyOf: - - enum: - - None - type: integer - - type: integer - default: None + title: User From Id + exclusiveMinimum: true + type: integer + minimum: 0 + id: + title: Id + type: string + read: + title: Read + type: boolean UserNotificationCreate: title: UserNotificationCreate required: @@ -12645,21 +12732,18 @@ components: - type: string default: UNDEFINED resource_id: - title: Resource ID + title: Resource Id anyOf: - enum: - - "" + - '' type: string - type: string - default: "" + default: '' user_from_id: - title: User ID of the one creating it - anyOf: - - enum: - - None - type: integer - - type: integer - default: None + title: User From Id + exclusiveMinimum: true + type: integer + minimum: 0 UserNotificationPatch: title: UserNotificationPatch required: diff --git a/services/web/server/src/simcore_service_webserver/errors.py b/services/web/server/src/simcore_service_webserver/errors.py index 173699f5888..bc041cc5840 100644 --- a/services/web/server/src/simcore_service_webserver/errors.py +++ b/services/web/server/src/simcore_service_webserver/errors.py @@ -1,8 +1,5 @@ -from typing import Any - from models_library.errors_classes import OsparcErrorMixin class WebServerBaseError(OsparcErrorMixin, Exception): - def __init__(self, **ctx: Any) -> None: - super().__init__(**ctx) + msg_template = "Error in web-server service" diff --git a/services/web/server/src/simcore_service_webserver/exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/exceptions_handlers.py new file mode 100644 index 00000000000..7e1ae0bd3e0 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/exceptions_handlers.py @@ -0,0 +1,90 @@ +import functools +import logging +from collections.abc import Iterable +from typing import NamedTuple, TypeAlias + +from aiohttp import web +from servicelib.aiohttp.typing_extension import Handler +from servicelib.aiohttp.web_exceptions_extension import get_http_error_class_or_none +from servicelib.logging_errors import create_troubleshotting_log_kwargs +from servicelib.status_codes_utils import is_5xx_server_error + +_logger = logging.getLogger(__name__) + + +class HttpErrorInfo(NamedTuple): + status_code: int + msg_template: str + + +ExceptionToHttpErrorMap: TypeAlias = dict[type[BaseException], HttpErrorInfo] + + +class _DefaultDict(dict): + def __missing__(self, key): + return f"'{key}=?'" + + +def _sort_exceptions_by_specificity( + exceptions: Iterable[type[BaseException]], *, concrete_first: bool = True +) -> list[type[BaseException]]: + return sorted( + exceptions, + key=lambda exc: sum(issubclass(e, exc) for e in exceptions if e is not exc), + reverse=not concrete_first, + ) + + +def create_exception_handlers_decorator( + exceptions_catch: type[BaseException] | tuple[type[BaseException], ...], + exc_to_status_map: ExceptionToHttpErrorMap, +): + mapped_classes: tuple[type[BaseException], ...] = tuple( + _sort_exceptions_by_specificity(exc_to_status_map.keys()) + ) + + assert all( # nosec + issubclass(cls, exceptions_catch) for cls in mapped_classes + ), f"Every {mapped_classes=} must inherit by one or more of {exceptions_catch=}" + + def _decorator(handler: Handler): + @functools.wraps(handler) + async def _wrapper(request: web.Request) -> web.StreamResponse: + try: + return await handler(request) + + except exceptions_catch as exc: + if exc_cls := next( + (cls for cls in mapped_classes if isinstance(exc, cls)), None + ): + http_error_info = exc_to_status_map[exc_cls] + + # safe formatting, i.e. does not raise + user_msg = http_error_info.msg_template.format_map( + _DefaultDict(getattr(exc, "__dict__", {})) + ) + + http_error_cls = get_http_error_class_or_none( + http_error_info.status_code + ) + assert http_error_cls # nosec + + if is_5xx_server_error(http_error_info.status_code): + _logger.exception( + **create_troubleshotting_log_kwargs( + user_msg, + error=exc, + error_context={ + "request": request, + "request.remote": f"{request.remote}", + "request.method": f"{request.method}", + "request.path": f"{request.path}", + }, + ) + ) + raise http_error_cls(reason=user_msg) from exc + raise # reraise + + return _wrapper + + return _decorator diff --git a/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py new file mode 100644 index 00000000000..4f83b5e1872 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/folders/_exceptions_handlers.py @@ -0,0 +1,74 @@ +import logging + +from servicelib.aiohttp import status + +from ..exceptions_handlers import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + create_exception_handlers_decorator, +) +from ..projects.exceptions import ( + BaseProjectError, + ProjectRunningConflictError, + ProjectStoppingError, +) +from ..workspaces.errors import ( + WorkspaceAccessForbiddenError, + WorkspaceFolderInconsistencyError, + WorkspaceNotFoundError, + WorkspacesValueError, +) +from .errors import ( + FolderAccessForbiddenError, + FolderNotFoundError, + FoldersValueError, + FolderValueNotPermittedError, +) + +_logger = logging.getLogger(__name__) + + +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { + FolderNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Folder was not found", + ), + WorkspaceNotFoundError: HttpErrorInfo( + status.HTTP_404_NOT_FOUND, + "Workspace was not found", + ), + FolderAccessForbiddenError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Does not have access to this folder", + ), + WorkspaceAccessForbiddenError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "Does not have access to this workspace", + ), + WorkspaceFolderInconsistencyError: HttpErrorInfo( + status.HTTP_403_FORBIDDEN, + "This folder does not exist in this workspace", + ), + FolderValueNotPermittedError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Provided folder value is not permitted: {reason}", + ), + FoldersValueError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "Invalid folder value set: {reason}", + ), + ProjectRunningConflictError: HttpErrorInfo( + status.HTTP_409_CONFLICT, + "One or more studies in this folder are in use and cannot be trashed. Please stop all services first and try again", + ), + ProjectStoppingError: HttpErrorInfo( + status.HTTP_503_SERVICE_UNAVAILABLE, + "Something went wrong while stopping services before trashing. Aborting trash.", + ), +} + + +handle_plugin_requests_exceptions = create_exception_handlers_decorator( + exceptions_catch=(BaseProjectError, FoldersValueError, WorkspacesValueError), + exc_to_status_map=_TO_HTTP_ERROR_MAP, +) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py index a6de1ce842d..0344124abb6 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py @@ -19,7 +19,7 @@ from ..folders.errors import FolderValueNotPermittedError from ..projects.projects_api import submit_delete_project_task from ..users.api import get_user -from ..workspaces._workspaces_api import check_user_workspace_access +from ..workspaces.api import check_user_workspace_access from ..workspaces.errors import ( WorkspaceAccessForbiddenError, WorkspaceFolderInconsistencyError, @@ -92,6 +92,7 @@ async def create_folder( name=folder_db.name, created_at=folder_db.created, modified_at=folder_db.modified, + trashed_at=folder_db.trashed_at, owner=folder_db.created_by_gid, workspace_id=workspace_id, my_access_rights=user_folder_access_rights, @@ -134,6 +135,7 @@ async def get_folder( name=folder_db.name, created_at=folder_db.created, modified_at=folder_db.modified, + trashed_at=folder_db.trashed_at, owner=folder_db.created_by_gid, workspace_id=folder_db.workspace_id, my_access_rights=user_folder_access_rights, @@ -146,6 +148,7 @@ async def list_folders( product_name: ProductName, folder_id: FolderID | None, workspace_id: WorkspaceID | None, + trashed: bool | None, offset: NonNegativeInt, limit: int, order_by: OrderBy, @@ -180,6 +183,7 @@ async def list_folders( user_id=user_id if workspace_is_private else None, workspace_id=workspace_id, product_name=product_name, + trashed=trashed, offset=offset, limit=limit, order_by=order_by, @@ -192,6 +196,7 @@ async def list_folders( name=folder.name, created_at=folder.created, modified_at=folder.modified, + trashed_at=folder.trashed_at, owner=folder.created_by_gid, workspace_id=folder.workspace_id, my_access_rights=user_folder_access_rights, @@ -268,6 +273,7 @@ async def update_folder( name=folder_db.name, created_at=folder_db.created, modified_at=folder_db.modified, + trashed_at=folder_db.trashed_at, owner=folder_db.created_by_gid, workspace_id=folder_db.workspace_id, my_access_rights=user_folder_access_rights, diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py index 5c1dcf4d47f..0ee44c17199 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py @@ -5,7 +5,8 @@ """ import logging -from typing import cast +from datetime import datetime +from typing import Any, Final, cast from aiohttp import web from models_library.folders import FolderDB, FolderID @@ -28,6 +29,17 @@ _logger = logging.getLogger(__name__) +class UnSet: + ... + + +_unset: Final = UnSet() + + +def as_dict_exclude_unset(**params) -> dict[str, Any]: + return {k: v for k, v in params.items() if not isinstance(v, UnSet)} + + _SELECTION_ARGS = ( folders_v2.c.folder_id, folders_v2.c.name, @@ -35,6 +47,7 @@ folders_v2.c.created_by_gid, folders_v2.c.created, folders_v2.c.modified, + folders_v2.c.trashed_at, folders_v2.c.user_id, folders_v2.c.workspace_id, ) @@ -80,14 +93,16 @@ async def list_( user_id: UserID | None, workspace_id: WorkspaceID | None, product_name: ProductName, + trashed: bool | None, offset: NonNegativeInt, limit: int, order_by: OrderBy, ) -> tuple[int, list[FolderDB]]: """ content_of_folder_id - Used to filter in which folder we want to list folders. None means root folder. + trashed - If set to true, it returns folders **explicitly** trashed, if false then non-trashed folders. """ - assert not ( + assert not ( # nosec user_id is not None and workspace_id is not None ), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one." @@ -106,6 +121,16 @@ async def list_( assert workspace_id # nosec base_query = base_query.where(folders_v2.c.workspace_id == workspace_id) + if trashed is not None: + base_query = base_query.where( + ( + (folders_v2.c.trashed_at.is_not(None)) + & (folders_v2.c.trashed_explicitly.is_(True)) + ) + if trashed + else folders_v2.c.trashed_at.is_(None) + ) + # Select total count from base_query subquery = base_query.subquery() count_query = select(func.count()).select_from(subquery) @@ -188,34 +213,91 @@ async def get_for_user_or_workspace( return FolderDB.from_orm(row) -async def update( +async def _update_impl( app: web.Application, - *, - folder_id: FolderID, - name: str, - parent_folder_id: FolderID | None, + folders_id_or_ids: FolderID | set[FolderID], product_name: ProductName, + # updatable columns + name: str | UnSet = _unset, + parent_folder_id: FolderID | None | UnSet = _unset, + trashed_at: datetime | None | UnSet = _unset, + trashed_explicitly: bool | UnSet = _unset, ) -> FolderDB: + """ + Batch/single patch of folder/s + """ + # NOTE: exclude unset can also be done using a pydantic model and dict(exclude_unset=True) + updated = as_dict_exclude_unset( + name=name, + parent_folder_id=parent_folder_id, + trashed_at=trashed_at, + trashed_explicitly=trashed_explicitly, + ) + + query = ( + (folders_v2.update().values(modified=func.now(), **updated)) + .where(folders_v2.c.product_name == product_name) + .returning(*_SELECTION_ARGS) + ) + + if isinstance(folders_id_or_ids, set): + # batch-update + query = query.where(folders_v2.c.folder_id.in_(list(folders_id_or_ids))) + else: + # single-update + query = query.where(folders_v2.c.folder_id == folders_id_or_ids) + async with get_database_engine(app).acquire() as conn: - result = await conn.execute( - folders_v2.update() - .values( - name=name, - parent_folder_id=parent_folder_id, - modified=func.now(), - ) - .where( - (folders_v2.c.folder_id == folder_id) - & (folders_v2.c.product_name == product_name) - ) - .returning(*_SELECTION_ARGS) - ) + result = await conn.execute(query) row = await result.first() if row is None: - raise FolderNotFoundError(reason=f"Folder {folder_id} not found.") + raise FolderNotFoundError(reason=f"Folder {folders_id_or_ids} not found.") return FolderDB.from_orm(row) +async def update_batch( + app: web.Application, + *folder_id: FolderID, + product_name: ProductName, + # updatable columns + name: str | UnSet = _unset, + parent_folder_id: FolderID | None | UnSet = _unset, + trashed_at: datetime | None | UnSet = _unset, + trashed_explicitly: bool | UnSet = _unset, +) -> FolderDB: + return await _update_impl( + app=app, + folders_id_or_ids=set(folder_id), + product_name=product_name, + name=name, + parent_folder_id=parent_folder_id, + trashed_at=trashed_at, + trashed_explicitly=trashed_explicitly, + ) + + +async def update( + app: web.Application, + *, + folder_id: FolderID, + product_name: ProductName, + # updatable columns + name: str | UnSet = _unset, + parent_folder_id: FolderID | None | UnSet = _unset, + trashed_at: datetime | None | UnSet = _unset, + trashed_explicitly: bool | UnSet = _unset, +) -> FolderDB: + return await _update_impl( + app=app, + folders_id_or_ids=folder_id, + product_name=product_name, + name=name, + parent_folder_id=parent_folder_id, + trashed_at=trashed_at, + trashed_explicitly=trashed_explicitly, + ) + + async def delete_recursively( app: web.Application, *, @@ -231,6 +313,7 @@ async def delete_recursively( & (folders_v2.c.product_name == product_name) ) folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True) + # Step 2: Define the recursive case folder_alias = aliased(folders_v2) recursive_query = select( @@ -241,8 +324,10 @@ async def delete_recursively( folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id, ) ) + # Step 3: Combine base and recursive cases into a CTE folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) + # Step 4: Execute the query to get all descendants final_query = select(folder_hierarchy_cte) result = await conn.execute(final_query) @@ -320,8 +405,7 @@ async def get_projects_recursively_only_if_user_is_owner( result = await conn.execute(query) rows = await result.fetchall() or [] - results = [ProjectID(row[0]) for row in rows] - return results + return [ProjectID(row[0]) for row in rows] async def get_folders_recursively( @@ -339,6 +423,7 @@ async def get_folders_recursively( & (folders_v2.c.product_name == product_name) ) folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True) + # Step 2: Define the recursive case folder_alias = aliased(folders_v2) recursive_query = select( @@ -349,8 +434,10 @@ async def get_folders_recursively( folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id, ) ) + # Step 3: Combine base and recursive cases into a CTE folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) + # Step 4: Execute the query to get all descendants final_query = select(folder_hierarchy_cte) result = await conn.execute(final_query) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py index f331c98da4a..e4fffd82fc6 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py @@ -1,4 +1,3 @@ -import functools import logging from aiohttp import web @@ -8,135 +7,42 @@ FolderGetPage, PutFolderBodyParams, ) -from models_library.basic_types import IDStr -from models_library.folders import FolderID -from models_library.rest_ordering import OrderBy, OrderDirection -from models_library.rest_pagination import Page, PageQueryParameters +from models_library.rest_ordering import OrderBy +from models_library.rest_pagination import Page from models_library.rest_pagination_utils import paginate_data -from models_library.users import UserID -from models_library.utils.common_validators import null_or_none_str_to_none_validator -from models_library.workspaces import WorkspaceID -from pydantic import Extra, Field, Json, parse_obj_as, validator +from pydantic import parse_obj_as from servicelib.aiohttp import status from servicelib.aiohttp.requests_validation import ( - RequestParams, - StrictRequestParams, parse_request_body_as, parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler from servicelib.mimetype_constants import MIMETYPE_APPLICATION_JSON -from servicelib.request_keys import RQT_USERID_KEY from servicelib.rest_constants import RESPONSE_MODEL_POLICY -from .._constants import RQ_PRODUCT_KEY from .._meta import API_VTAG as VTAG from ..login.decorators import login_required from ..security.decorators import permission_required from ..utils_aiohttp import envelope_json_response -from ..workspaces.errors import ( - WorkspaceAccessForbiddenError, - WorkspaceFolderInconsistencyError, - WorkspaceNotFoundError, -) from . import _folders_api -from .errors import ( - FolderAccessForbiddenError, - FolderNotFoundError, - FoldersValueError, - FolderValueNotPermittedError, +from ._exceptions_handlers import handle_plugin_requests_exceptions +from ._models import ( + FolderFilters, + FolderListWithJsonStrQueryParams, + FoldersPathParams, + FoldersRequestContext, ) _logger = logging.getLogger(__name__) -def handle_folders_exceptions(handler: Handler): - @functools.wraps(handler) - async def wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except (FolderNotFoundError, WorkspaceNotFoundError) as exc: - raise web.HTTPNotFound(reason=f"{exc}") from exc - - except ( - FolderAccessForbiddenError, - WorkspaceAccessForbiddenError, - WorkspaceFolderInconsistencyError, - ) as exc: - raise web.HTTPForbidden(reason=f"{exc}") from exc - - except (FolderValueNotPermittedError, FoldersValueError) as exc: - raise web.HTTPBadRequest(reason=f"{exc}") from exc - - return wrapper - - -# -# folders COLLECTION ------------------------- -# - routes = web.RouteTableDef() -class FoldersRequestContext(RequestParams): - user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] - product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] - - -class FoldersPathParams(StrictRequestParams): - folder_id: FolderID - - -class FolderListWithJsonStrQueryParams(PageQueryParameters): - # pylint: disable=unsubscriptable-object - order_by: Json[OrderBy] = Field( - default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC), - description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.", - example='{"field": "name", "direction": "desc"}', - alias="order_by", - ) - folder_id: FolderID | None = Field( - default=None, - description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).", - ) - workspace_id: WorkspaceID | None = Field( - default=None, - description="List folders in specific workspace. By default, list in the user private workspace", - ) - - @validator("order_by", check_fields=False) - @classmethod - def validate_order_by_field(cls, v): - if v.field not in { - "modified_at", - "name", - "description", - }: - msg = f"We do not support ordering by provided field {v.field}" - raise ValueError(msg) - if v.field == "modified_at": - v.field = "modified" - return v - - class Config: - extra = Extra.forbid - - # validators - _null_or_none_str_to_none_validator = validator( - "folder_id", allow_reuse=True, pre=True - )(null_or_none_str_to_none_validator) - - _null_or_none_str_to_none_validator2 = validator( - "workspace_id", allow_reuse=True, pre=True - )(null_or_none_str_to_none_validator) - - @routes.post(f"/{VTAG}/folders", name="create_folder") @login_required @permission_required("folder.create") -@handle_folders_exceptions +@handle_plugin_requests_exceptions async def create_folder(request: web.Request): req_ctx = FoldersRequestContext.parse_obj(request) body_params = await parse_request_body_as(CreateFolderBodyParams, request) @@ -156,19 +62,23 @@ async def create_folder(request: web.Request): @routes.get(f"/{VTAG}/folders", name="list_folders") @login_required @permission_required("folder.read") -@handle_folders_exceptions +@handle_plugin_requests_exceptions async def list_folders(request: web.Request): req_ctx = FoldersRequestContext.parse_obj(request) query_params: FolderListWithJsonStrQueryParams = parse_request_query_parameters_as( FolderListWithJsonStrQueryParams, request ) + if not query_params.filters: + query_params.filters = FolderFilters() + folders: FolderGetPage = await _folders_api.list_folders( app=request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name, folder_id=query_params.folder_id, workspace_id=query_params.workspace_id, + trashed=query_params.filters.trashed, offset=query_params.offset, limit=query_params.limit, order_by=parse_obj_as(OrderBy, query_params.order_by), @@ -192,7 +102,7 @@ async def list_folders(request: web.Request): @routes.get(f"/{VTAG}/folders/{{folder_id}}", name="get_folder") @login_required @permission_required("folder.read") -@handle_folders_exceptions +@handle_plugin_requests_exceptions async def get_folder(request: web.Request): req_ctx = FoldersRequestContext.parse_obj(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) @@ -213,7 +123,7 @@ async def get_folder(request: web.Request): ) @login_required @permission_required("folder.update") -@handle_folders_exceptions +@handle_plugin_requests_exceptions async def replace_folder(request: web.Request): req_ctx = FoldersRequestContext.parse_obj(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) @@ -236,7 +146,7 @@ async def replace_folder(request: web.Request): ) @login_required @permission_required("folder.delete") -@handle_folders_exceptions +@handle_plugin_requests_exceptions async def delete_folder_group(request: web.Request): req_ctx = FoldersRequestContext.parse_obj(request) path_params = parse_request_path_parameters_as(FoldersPathParams, request) diff --git a/services/web/server/src/simcore_service_webserver/folders/_models.py b/services/web/server/src/simcore_service_webserver/folders/_models.py new file mode 100644 index 00000000000..fb337b5b199 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/folders/_models.py @@ -0,0 +1,85 @@ +import logging + +from models_library.basic_types import IDStr +from models_library.folders import FolderID +from models_library.rest_filters import Filters, FiltersQueryParameters +from models_library.rest_ordering import OrderBy, OrderDirection +from models_library.rest_pagination import PageQueryParameters +from models_library.users import UserID +from models_library.utils.common_validators import null_or_none_str_to_none_validator +from models_library.workspaces import WorkspaceID +from pydantic import BaseModel, Extra, Field, Json, validator +from servicelib.aiohttp.requests_validation import RequestParams, StrictRequestParams +from servicelib.request_keys import RQT_USERID_KEY + +from .._constants import RQ_PRODUCT_KEY + +_logger = logging.getLogger(__name__) + + +class FoldersRequestContext(RequestParams): + user_id: UserID = Field(..., alias=RQT_USERID_KEY) # type: ignore[literal-required] + product_name: str = Field(..., alias=RQ_PRODUCT_KEY) # type: ignore[literal-required] + + +class FoldersPathParams(StrictRequestParams): + folder_id: FolderID + + +class FolderFilters(Filters): + trashed: bool | None = Field( + default=False, + description="Set to true to list trashed, false to list non-trashed (default), None to list all", + ) + + +class FolderListWithJsonStrQueryParams( + PageQueryParameters, FiltersQueryParameters[FolderFilters] +): + # pylint: disable=unsubscriptable-object + order_by: Json[OrderBy] = Field( + default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC), + description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.", + example='{"field": "name", "direction": "desc"}', + alias="order_by", + ) + folder_id: FolderID | None = Field( + default=None, + description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).", + ) + workspace_id: WorkspaceID | None = Field( + default=None, + description="List folders in specific workspace. By default, list in the user private workspace", + ) + + @validator("order_by", check_fields=False) + @classmethod + def _validate_order_by_field(cls, v): + if v.field not in { + "modified_at", + "name", + "description", + }: + msg = f"We do not support ordering by provided field {v.field}" + raise ValueError(msg) + if v.field == "modified_at": + v.field = "modified" + return v + + class Config: + extra = Extra.forbid + + # validators + _null_or_none_str_to_none_validator = validator( + "folder_id", allow_reuse=True, pre=True + )(null_or_none_str_to_none_validator) + + _null_or_none_str_to_none_validator2 = validator( + "workspace_id", allow_reuse=True, pre=True + )(null_or_none_str_to_none_validator) + + +class RemoveQueryParams(BaseModel): + force: bool = Field( + default=False, description="Force removal (even if resource is active)" + ) diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_api.py b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py new file mode 100644 index 00000000000..1cad0415161 --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py @@ -0,0 +1,177 @@ +import logging +from datetime import datetime + +import arrow +from aiohttp import web +from models_library.folders import FolderID +from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.users import UserID + +from ..projects._trash_api import trash_project, untrash_project +from ..workspaces.api import check_user_workspace_access +from . import _folders_db + +_logger = logging.getLogger(__name__) + + +async def _check_exists_and_access( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + folder_id: FolderID, +) -> bool: + # exists? + # check whether this folder exists + # otherwise raise not-found error + folder_db = await _folders_db.get( + app, folder_id=folder_id, product_name=product_name + ) + + # can? + # check whether user in product has enough permissions to delete this folder + # otherwise raise forbidden error + workspace_is_private = True + if folder_db.workspace_id: + await check_user_workspace_access( + app, + user_id=user_id, + workspace_id=folder_db.workspace_id, + product_name=product_name, + permission="delete", + ) + workspace_is_private = False + + await _folders_db.get_for_user_or_workspace( + app, + folder_id=folder_id, + product_name=product_name, + user_id=user_id if workspace_is_private else None, + workspace_id=folder_db.workspace_id, + ) + return workspace_is_private + + +async def _folders_db_update( + app: web.Application, + *, + product_name: ProductName, + folder_id: FolderID, + trashed_at: datetime | None, +): + # EXPLICIT un/trash + await _folders_db.update( + app, + folder_id=folder_id, + product_name=product_name, + trashed_at=trashed_at, + trashed_explicitly=trashed_at is not None, + ) + + # IMPLICIT un/trash + child_folders: set[FolderID] = { + f + for f in await _folders_db.get_folders_recursively( + app, folder_id=folder_id, product_name=product_name + ) + if f != folder_id + } + + if child_folders: + await _folders_db.update_batch( + app, + *child_folders, + product_name=product_name, + trashed_at=trashed_at, + trashed_explicitly=False, + ) + + +async def trash_folder( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + folder_id: FolderID, + force_stop_first: bool, +): + + workspace_is_private = await _check_exists_and_access( + app, product_name=product_name, user_id=user_id, folder_id=folder_id + ) + + # Trash + trashed_at = arrow.utcnow().datetime + + _logger.debug( + "TODO: Unit of work for all folders and projects and fails if force_stop_first=%s is False", + force_stop_first, + ) + + # 1. Trash folder and children + await _folders_db_update( + app, + folder_id=folder_id, + product_name=product_name, + trashed_at=trashed_at, + ) + + # 2. Trash all child projects that I am an owner + child_projects: list[ + ProjectID + ] = await _folders_db.get_projects_recursively_only_if_user_is_owner( + app, + folder_id=folder_id, + private_workspace_user_id_or_none=user_id if workspace_is_private else None, + user_id=user_id, + product_name=product_name, + ) + + for project_id in child_projects: + await trash_project( + app, + product_name=product_name, + user_id=user_id, + project_id=project_id, + force_stop_first=force_stop_first, + explicit=False, + ) + + +async def untrash_folder( + app: web.Application, + *, + product_name: ProductName, + user_id: UserID, + folder_id: FolderID, +): + workspace_is_private = await _check_exists_and_access( + app, product_name=product_name, user_id=user_id, folder_id=folder_id + ) + + # 3. UNtrash + + # 3.1 UNtrash folder and children + await _folders_db_update( + app, + folder_id=folder_id, + product_name=product_name, + trashed_at=None, + ) + + # 3.2 UNtrash all child projects that I am an owner + child_projects: list[ + ProjectID + ] = await _folders_db.get_projects_recursively_only_if_user_is_owner( + app, + folder_id=folder_id, + private_workspace_user_id_or_none=user_id if workspace_is_private else None, + user_id=user_id, + product_name=product_name, + ) + + for project_id in child_projects: + await untrash_project( + app, product_name=product_name, user_id=user_id, project_id=project_id + ) diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_trash_handlers.py new file mode 100644 index 00000000000..55b53fcd4ee --- /dev/null +++ b/services/web/server/src/simcore_service_webserver/folders/_trash_handlers.py @@ -0,0 +1,66 @@ +import logging + +from aiohttp import web +from servicelib.aiohttp import status +from servicelib.aiohttp.requests_validation import ( + parse_request_path_parameters_as, + parse_request_query_parameters_as, +) + +from .._meta import API_VTAG as VTAG +from ..application_settings_utils import requires_dev_feature_enabled +from ..login.decorators import get_user_id, login_required +from ..products.api import get_product_name +from ..security.decorators import permission_required +from . import _trash_api +from ._exceptions_handlers import handle_plugin_requests_exceptions +from ._models import FoldersPathParams, RemoveQueryParams + +_logger = logging.getLogger(__name__) + + +routes = web.RouteTableDef() + + +@routes.post(f"/{VTAG}/folders/{{folder_id}}:trash", name="trash_folder") +@requires_dev_feature_enabled +@login_required +@permission_required("folder.delete") +@handle_plugin_requests_exceptions +async def trash_folder(request: web.Request): + user_id = get_user_id(request) + product_name = get_product_name(request) + path_params = parse_request_path_parameters_as(FoldersPathParams, request) + query_params: RemoveQueryParams = parse_request_query_parameters_as( + RemoveQueryParams, request + ) + + await _trash_api.trash_folder( + request.app, + product_name=product_name, + user_id=user_id, + folder_id=path_params.folder_id, + force_stop_first=query_params.force, + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) + + +@routes.post(f"/{VTAG}/folders/{{folder_id}}:untrash", name="untrash_folder") +@requires_dev_feature_enabled +@login_required +@permission_required("folder.delete") +@handle_plugin_requests_exceptions +async def untrash_folder(request: web.Request): + user_id = get_user_id(request) + product_name = get_product_name(request) + path_params = parse_request_path_parameters_as(FoldersPathParams, request) + + await _trash_api.untrash_folder( + request.app, + product_name=product_name, + user_id=user_id, + folder_id=path_params.folder_id, + ) + + return web.json_response(status=status.HTTP_204_NO_CONTENT) diff --git a/services/web/server/src/simcore_service_webserver/folders/plugin.py b/services/web/server/src/simcore_service_webserver/folders/plugin.py index bfc0fafb351..8ddef03ec1f 100644 --- a/services/web/server/src/simcore_service_webserver/folders/plugin.py +++ b/services/web/server/src/simcore_service_webserver/folders/plugin.py @@ -7,7 +7,7 @@ from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY from servicelib.aiohttp.application_setup import ModuleCategory, app_module_setup -from . import _folders_handlers +from . import _folders_handlers, _trash_handlers _logger = logging.getLogger(__name__) @@ -24,3 +24,4 @@ def setup_folders(app: web.Application): # routes app.router.add_routes(_folders_handlers.routes) + app.router.add_routes(_trash_handlers.routes) diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py index f6c98c6e08e..f8b6aee4ff9 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py @@ -115,8 +115,8 @@ async def list_projects( # pylint: disable=too-many-arguments # attrs filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), filter_by_services=user_available_services, - trashed=trashed, - hidden=show_hidden, + filter_trashed=trashed, + filter_hidden=show_hidden, # composed attrs search=search, # pagination diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_api.py b/services/web/server/src/simcore_service_webserver/projects/_trash_api.py index 6469375c853..d3bc6092aaf 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_trash_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_api.py @@ -69,6 +69,7 @@ async def trash_project( user_id: UserID, project_id: ProjectID, force_stop_first: bool, + explicit: bool, ): """ @@ -113,13 +114,14 @@ async def _schedule(): product_name=product_name, ) - # mark as trash await projects_api.patch_project( app, user_id=user_id, product_name=product_name, project_uuid=project_id, - project_patch=ProjectPatchExtended(trashed_at=arrow.utcnow().datetime), + project_patch=ProjectPatchExtended( + trashed_at=arrow.utcnow().datetime, trashed_explicitly=explicit + ), ) @@ -136,5 +138,5 @@ async def untrash_project( user_id=user_id, product_name=product_name, project_uuid=project_id, - project_patch=ProjectPatchExtended(trashed_at=None), + project_patch=ProjectPatchExtended(trashed_at=None, trashed_explicitly=False), ) diff --git a/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py b/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py index 2995488c562..4593779e735 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py +++ b/services/web/server/src/simcore_service_webserver/projects/_trash_handlers.py @@ -1,6 +1,4 @@ -import functools import logging -from typing import NamedTuple from aiohttp import web from servicelib.aiohttp import status @@ -8,13 +6,14 @@ parse_request_path_parameters_as, parse_request_query_parameters_as, ) -from servicelib.aiohttp.typing_extension import Handler -from servicelib.aiohttp.web_exceptions_extension import get_http_error_class_or_none -from servicelib.logging_errors import create_troubleshotting_log_kwargs -from servicelib.status_codes_utils import is_5xx_server_error from .._meta import API_VTAG as VTAG from ..application_settings_utils import requires_dev_feature_enabled +from ..exceptions_handlers import ( + ExceptionToHttpErrorMap, + HttpErrorInfo, + create_exception_handlers_decorator, +) from ..login.decorators import get_user_id, login_required from ..products.api import get_product_name from ..projects._common_models import ProjectPathParams @@ -34,12 +33,7 @@ # -class HttpErrorInfo(NamedTuple): - status_code: int - msg_template: str - - -_TO_HTTP_ERROR_MAP: dict[type[Exception], HttpErrorInfo] = { +_TO_HTTP_ERROR_MAP: ExceptionToHttpErrorMap = { ProjectRunningConflictError: HttpErrorInfo( status.HTTP_409_CONFLICT, "Current study is in use and cannot be trashed [project_id={project_uuid}]. Please stop all services first and try again", @@ -51,49 +45,9 @@ class HttpErrorInfo(NamedTuple): } -class _DefaultDict(dict): - def __missing__(self, key): - return f"'{key}=?'" - - -def _handle_request_exceptions(handler: Handler): - @functools.wraps(handler) - async def _wrapper(request: web.Request) -> web.StreamResponse: - try: - return await handler(request) - - except ProjectTrashError as exc: - for exc_cls, http_error_info in _TO_HTTP_ERROR_MAP.items(): - if isinstance(exc, exc_cls): - - # safe formatting, i.e. does not raise - user_msg = http_error_info.msg_template.format_map( - _DefaultDict(getattr(exc, "__dict__", {})) - ) - - http_error_cls = get_http_error_class_or_none( - http_error_info.status_code - ) - assert http_error_cls # nosec - - if is_5xx_server_error(http_error_info.status_code): - _logger.exception( - **create_troubleshotting_log_kwargs( - user_msg, - error=exc, - error_context={ - "request": request, - "request.remote": f"{request.remote}", - "request.method": f"{request.method}", - "request.path": f"{request.path}", - }, - ) - ) - raise http_error_cls(reason=user_msg) from exc - raise - - return _wrapper - +_handle_exceptions = create_exception_handlers_decorator( + exceptions_catch=ProjectTrashError, exc_to_status_map=_TO_HTTP_ERROR_MAP +) # # ROUTES @@ -106,7 +60,7 @@ async def _wrapper(request: web.Request) -> web.StreamResponse: @requires_dev_feature_enabled @login_required @permission_required("project.delete") -@_handle_request_exceptions +@_handle_exceptions async def empty_trash(request: web.Request): user_id = get_user_id(request) product_name = get_product_name(request) @@ -122,7 +76,7 @@ async def empty_trash(request: web.Request): @requires_dev_feature_enabled @login_required @permission_required("project.delete") -@_handle_request_exceptions +@_handle_exceptions async def trash_project(request: web.Request): user_id = get_user_id(request) product_name = get_product_name(request) @@ -137,6 +91,7 @@ async def trash_project(request: web.Request): user_id=user_id, project_id=path_params.project_id, force_stop_first=query_params.force, + explicit=True, ) return web.json_response(status=status.HTTP_204_NO_CONTENT) @@ -146,7 +101,7 @@ async def trash_project(request: web.Request): @requires_dev_feature_enabled @login_required @permission_required("project.delete") -@_handle_request_exceptions +@_handle_exceptions async def untrash_project(request: web.Request): user_id = get_user_id(request) product_name = get_product_name(request) diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py index 6cbe059dfb7..5e0c216f77e 100644 --- a/services/web/server/src/simcore_service_webserver/projects/db.py +++ b/services/web/server/src/simcore_service_webserver/projects/db.py @@ -362,9 +362,9 @@ async def list_projects( # pylint: disable=too-many-arguments search: str | None = None, filter_by_project_type: ProjectType | None = None, filter_by_services: list[dict] | None = None, - published: bool | None = False, - hidden: bool | None = False, - trashed: bool | None = False, + filter_published: bool | None = False, + filter_hidden: bool | None = False, + filter_trashed: bool | None = False, # pagination offset: int | None = 0, limit: int | None = None, @@ -442,16 +442,21 @@ async def list_projects( # pylint: disable=too-many-arguments projects.c.type == filter_by_project_type.value ) - if hidden is not None: - attributes_filters.append(projects.c.hidden.is_(hidden)) + if filter_hidden is not None: + attributes_filters.append(projects.c.hidden.is_(filter_hidden)) - if published is not None: - attributes_filters.append(projects.c.published.is_(published)) + if filter_published is not None: + attributes_filters.append(projects.c.published.is_(filter_published)) - if trashed is not None: + if filter_trashed is not None: attributes_filters.append( - projects.c.trashed_at.is_not(None) - if trashed + # marked explicitly as trashed + ( + projects.c.trashed_at.is_not(None) + & projects.c.trashed_explicitly.is_(True) + ) + if filter_trashed + # not marked as trashed else projects.c.trashed_at.is_(None) ) query = query.where(sa.and_(*attributes_filters)) diff --git a/services/web/server/src/simcore_service_webserver/projects/models.py b/services/web/server/src/simcore_service_webserver/projects/models.py index 37961a9aff4..d3457fb52b0 100644 --- a/services/web/server/src/simcore_service_webserver/projects/models.py +++ b/services/web/server/src/simcore_service_webserver/projects/models.py @@ -53,6 +53,7 @@ class ProjectDB(BaseModel): hidden: bool workspace_id: WorkspaceID | None trashed_at: datetime | None + trashed_explicitly: bool = False class Config: orm_mode = True @@ -101,7 +102,8 @@ class Config: class ProjectPatchExtended(ProjectPatch): # Only used internally - trashed_at: datetime | None = None + trashed_at: datetime | None + trashed_explicitly: bool class Config: allow_population_by_field_name = True diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py index 256b50de114..a645037f5a4 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_api.py @@ -12,11 +12,11 @@ from models_library.users import UserID from models_library.workspaces import UserWorkspaceAccessRightsDB, WorkspaceID from pydantic import NonNegativeInt -from simcore_service_webserver.projects._db_utils import PermissionStr -from simcore_service_webserver.workspaces.errors import WorkspaceAccessForbiddenError +from ..projects._db_utils import PermissionStr from ..users.api import get_user from . import _workspaces_db as db +from .errors import WorkspaceAccessForbiddenError _logger = logging.getLogger(__name__) diff --git a/services/web/server/tests/unit/isolated/test_exceptions_handlers.py b/services/web/server/tests/unit/isolated/test_exceptions_handlers.py new file mode 100644 index 00000000000..27cde72283b --- /dev/null +++ b/services/web/server/tests/unit/isolated/test_exceptions_handlers.py @@ -0,0 +1,117 @@ +# pylint: disable=protected-access +# pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements +# pylint: disable=unused-argument +# pylint: disable=unused-variable + + +import logging + +import pytest +from aiohttp import web +from aiohttp.test_utils import make_mocked_request +from servicelib.aiohttp import status +from simcore_service_webserver.errors import WebServerBaseError +from simcore_service_webserver.exceptions_handlers import ( + HttpErrorInfo, + _sort_exceptions_by_specificity, + create_exception_handlers_decorator, +) + + +class BasePluginError(WebServerBaseError): + ... + + +class OneError(BasePluginError): + ... + + +class OtherError(BasePluginError): + ... + + +def test_sort_concrete_first(): + assert _sort_exceptions_by_specificity([Exception, BasePluginError]) == [ + BasePluginError, + Exception, + ] + + assert _sort_exceptions_by_specificity( + [Exception, BasePluginError], concrete_first=False + ) == [ + Exception, + BasePluginError, + ] + + +def test_sort_exceptions_by_specificity(): + + got_exceptions_cls = _sort_exceptions_by_specificity( + [ + Exception, + OtherError, + OneError, + BasePluginError, + ValueError, + ArithmeticError, + ZeroDivisionError, + ] + ) + + for from_, exc in enumerate(got_exceptions_cls, start=1): + for exc_after in got_exceptions_cls[from_:]: + assert not issubclass(exc_after, exc), f"{got_exceptions_cls=}" + + +async def test_exception_handlers_decorator( + caplog: pytest.LogCaptureFixture, +): + + _handle_exceptions = create_exception_handlers_decorator( + exceptions_catch=BasePluginError, + exc_to_status_map={ + OneError: HttpErrorInfo( + status_code=status.HTTP_503_SERVICE_UNAVAILABLE, + msg_template="This is one error for front-end", + ) + }, + ) + + @_handle_exceptions + async def _rest_handler(request: web.Request) -> web.Response: + if request.query.get("raise") == "OneError": + raise OneError + if request.query.get("raise") == "ArithmeticError": + raise ArithmeticError + + return web.Response(reason="all good") + + with caplog.at_level(logging.ERROR): + + # emulates successful call + resp = await _rest_handler(make_mocked_request("GET", "/foo")) + assert resp.status == status.HTTP_200_OK + assert resp.reason == "all good" + + assert not caplog.records + + # this will be passed and catched by the outermost error middleware + with pytest.raises(ArithmeticError): + await _rest_handler( + make_mocked_request("GET", "/foo?raise=ArithmeticError") + ) + + assert not caplog.records + + # this is a 5XX will be converted to response but is logged as error as well + with pytest.raises(web.HTTPException) as exc_info: + await _rest_handler(make_mocked_request("GET", "/foo?raise=OneError")) + + resp = exc_info.value + assert resp.status == status.HTTP_503_SERVICE_UNAVAILABLE + assert "front-end" in resp.reason + + assert caplog.records + assert caplog.records[0].levelno == logging.ERROR diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py index 6c841fa8650..aa79512a12a 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_cancellations.py @@ -4,7 +4,8 @@ # pylint: disable=unused-variable import asyncio -from typing import Any, Awaitable, Callable +from collections.abc import Awaitable, Callable +from typing import Any from urllib.parse import urlparse import pytest diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index bf69984d6af..8904cead4bf 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -165,7 +165,7 @@ async def _assert_get_same_project( project_permalink = data.pop("permalink", None) folder_id = data.pop("folderId", None) - assert data == project + assert data == {k: project[k] for k in data} if project_state: assert parse_obj_as(ProjectState, project_state) @@ -201,22 +201,24 @@ async def test_list_projects( assert len(data) == 2 # template project - project_state = data[0].pop("state") - project_permalink = data[0].pop("permalink") - folder_id = data[0].pop("folderId") + got = data[0] + project_state = got.pop("state") + project_permalink = got.pop("permalink") + folder_id = got.pop("folderId") - assert data[0] == template_project + assert got == {k: template_project[k] for k in got} assert not ProjectState( **project_state ).locked.value, "Templates are not locked" assert parse_obj_as(ProjectPermalink, project_permalink) # standard project - project_state = data[1].pop("state") - project_permalink = data[1].pop("permalink", None) - folder_id = data[1].pop("folderId") + got = data[1] + project_state = got.pop("state") + project_permalink = got.pop("permalink", None) + folder_id = got.pop("folderId") - assert data[1] == user_project + assert got == {k: user_project[k] for k in got} assert ProjectState(**project_state) assert project_permalink is None assert folder_id is None @@ -227,11 +229,12 @@ async def test_list_projects( assert len(data) == 1 # standad project - project_state = data[0].pop("state") - project_permalink = data[0].pop("permalink", None) - folder_id = data[0].pop("folderId") + got = data[0] + project_state = got.pop("state") + project_permalink = got.pop("permalink", None) + folder_id = got.pop("folderId") - assert data[0] == user_project + assert got == {k: user_project[k] for k in got} assert not ProjectState( **project_state ).locked.value, "Single user does not lock" @@ -244,11 +247,12 @@ async def test_list_projects( assert len(data) == 1 # template project - project_state = data[0].pop("state") - project_permalink = data[0].pop("permalink") - folder_id = data[0].pop("folderId") + got = data[0] + project_state = got.pop("state") + project_permalink = got.pop("permalink") + folder_id = got.pop("folderId") - assert data[0] == template_project + assert got == {k: template_project[k] for k in got} assert not ProjectState( **project_state ).locked.value, "Templates are not locked" diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py index 02285ebb0d5..3514d3b2475 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_states_handlers.py @@ -930,7 +930,7 @@ async def test_get_active_project( data_last_change_date = data.pop("lastChangeDate") assert user_project_last_change_date < data_last_change_date - assert data == user_project + assert data == {k: user_project[k] for k in data} else: mocked_notifications_plugin["subscribe"].assert_not_called() diff --git a/services/web/server/tests/unit/with_dbs/03/test_project_db.py b/services/web/server/tests/unit/with_dbs/03/test_project_db.py index 89a67734b60..fadfe561267 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_project_db.py @@ -98,6 +98,7 @@ def _assert_added_project( "lastChangeDate", "accessRights", # NOTE: access rights were moved away from the projects table "trashedAt", + "trashedExplicitly", ] assert {k: v for k, v in expected_prj.items() if k in _DIFFERENT_KEYS} != { k: v for k, v in added_prj.items() if k in _DIFFERENT_KEYS diff --git a/services/web/server/tests/unit/with_dbs/03/test_trash.py b/services/web/server/tests/unit/with_dbs/03/test_trash.py index 5a760d5f9fd..7d6c701c522 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_trash.py +++ b/services/web/server/tests/unit/with_dbs/03/test_trash.py @@ -14,6 +14,7 @@ import pytest from aiohttp.test_utils import TestClient from aioresponses import aioresponses +from models_library.api_schemas_webserver.folders_v2 import FolderGet from models_library.api_schemas_webserver.projects import ProjectGet, ProjectListItem from models_library.rest_pagination import Page from pytest_mock import MockerFixture @@ -24,6 +25,7 @@ from servicelib.aiohttp import status from simcore_service_webserver.db.models import UserRole from simcore_service_webserver.projects.models import ProjectDict +from yarl import URL @pytest.fixture @@ -174,3 +176,222 @@ async def test_trash_projects( # noqa: PLR0915 await asyncio.sleep(0.1) mock_stop_pipeline.assert_awaited() mock_remove_dynamic_services.assert_awaited() + + +@pytest.mark.acceptance_test( + "For https://github.com/ITISFoundation/osparc-simcore/pull/6642" +) +async def test_trash_single_folder(client: TestClient, logged_user: UserInfoDict): + assert client.app + + # CREATE a folder + resp = await client.post("/v0/folders", json={"name": "My first folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + folder = FolderGet.parse_obj(data) + + # --------------------------------------------------------------------- + + # LIST NOT trashed + resp = await client.get("/v0/folders") + await assert_status(resp, status.HTTP_200_OK) + + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 1 + + assert page.data[0] == folder + + # LIST trashed + resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'}) + await assert_status(resp, status.HTTP_200_OK) + + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 0 + + # TRASH + assert client.app.router["trash_folder"].url_for(folder_id="folder_id") == URL( + "/v0/folders/folder_id:trash" + ) + + trashing_at = arrow.utcnow().datetime + resp = await client.post(f"/v0/folders/{folder.folder_id}:trash") + await assert_status( + resp, + status.HTTP_204_NO_CONTENT, + ) + + # GET + resp = await client.get(f"/v0/folders/{folder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.parse_obj(data) + assert got.folder_id == folder.folder_id + + assert got.trashed_at + assert trashing_at < got.trashed_at + assert got.trashed_at < arrow.utcnow().datetime + + # LIST trashed + resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'}) + await assert_status(resp, status.HTTP_200_OK) + + page = Page[FolderGet].parse_obj(await resp.json()) + + assert page.meta.total == 1 + assert page.data[0].folder_id == folder.folder_id + + # UNTRASH + assert client.app.router["untrash_folder"].url_for(folder_id="folder_id") == URL( + "/v0/folders/folder_id:untrash" + ) + + resp = await client.post(f"/v0/folders/{folder.folder_id}:untrash") + data, _ = await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # GET + resp = await client.get(f"/v0/folders/{folder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.parse_obj(data) + + assert got.folder_id == folder.folder_id + assert got.trashed_at is None + + +@pytest.mark.acceptance_test( + "For https://github.com/ITISFoundation/osparc-simcore/pull/6642" +) +async def test_trash_folder_with_content( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + mocked_catalog: None, + mocked_director_v2: None, +): + assert client.app + project_uuid = UUID(user_project["uuid"]) + + # CREATE a folder + resp = await client.post("/v0/folders", json={"name": "My first folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + folder = FolderGet.parse_obj(data) + + # CREATE a SUB-folder + resp = await client.post( + "/v0/folders", + json={"name": "My subfolder 1", "parentFolderId": folder.folder_id}, + ) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + subfolder = FolderGet.parse_obj(data) + + # MOVE project to SUB-folder + resp = await client.put( + f"/v0/projects/{project_uuid}/folders/{subfolder.folder_id}" + ) + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # CHECK created + resp = await client.get("/v0/folders") + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 1 + assert page.data[0] == folder + + resp = await client.get("/v0/folders", params={"folder_id": f"{folder.folder_id}"}) + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 1 + assert page.data[0] == subfolder + + resp = await client.get( + "/v0/projects", params={"folder_id": f"{subfolder.folder_id}"} + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[ProjectListItem].parse_obj(await resp.json()) + assert page.meta.total == 1 + assert page.data[0].uuid == project_uuid + assert page.data[0].folder_id == subfolder.folder_id + + # --------------------------------------------------------------------- + + # TRASH folder + resp = await client.post(f"/v0/folders/{folder.folder_id}:trash") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # ONLY folder listed in trash. The rest is not listed anymore! + resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'}) + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 1 + assert page.data[0].folder_id == folder.folder_id + + resp = await client.get( + "/v0/folders", + params={"filters": '{"trashed": true}', "folder_id": f"{folder.folder_id}"}, + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 0 + + resp = await client.get( + "/v0/projects", + params={"filters": '{"trashed": true}', "folder_id": f"{subfolder.folder_id}"}, + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[ProjectListItem].parse_obj(await resp.json()) + assert page.meta.total == 0 + + # CHECK marked as trashed + resp = await client.get(f"/v0/folders/{folder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.parse_obj(data) + assert got.trashed_at is not None + + resp = await client.get(f"/v0/folders/{subfolder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.parse_obj(data) + assert got.trashed_at is not None + + resp = await client.get(f"/v0/projects/{project_uuid}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = ProjectGet.parse_obj(data) + assert got.trashed_at is not None + + # UNTRASH folder + resp = await client.post(f"/v0/folders/{folder.folder_id}:untrash") + await assert_status(resp, status.HTTP_204_NO_CONTENT) + + # NO folders listed in trash. + resp = await client.get("/v0/folders", params={"filters": '{"trashed": true}'}) + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 0 + + resp = await client.get( + "/v0/folders", + params={"filters": '{"trashed": true}', "folder_id": f"{folder.folder_id}"}, + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[FolderGet].parse_obj(await resp.json()) + assert page.meta.total == 0 + + resp = await client.get( + "/v0/projects", + params={"filters": '{"trashed": true}', "folder_id": f"{subfolder.folder_id}"}, + ) + await assert_status(resp, status.HTTP_200_OK) + page = Page[ProjectListItem].parse_obj(await resp.json()) + assert page.meta.total == 0 + + # CHECK marked as trashed + resp = await client.get(f"/v0/folders/{folder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.parse_obj(data) + assert got.trashed_at is None + + resp = await client.get(f"/v0/folders/{subfolder.folder_id}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = FolderGet.parse_obj(data) + assert got.trashed_at is None + + resp = await client.get(f"/v0/projects/{project_uuid}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + got = ProjectGet.parse_obj(data) + assert got.trashed_at is None diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py index 345e3875628..d2df6efb6e7 100644 --- a/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py +++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders.py @@ -1,16 +1,18 @@ -import asyncio - # pylint: disable=redefined-outer-name # pylint: disable=unused-argument # pylint: disable=unused-variable # pylint: disable=too-many-arguments # pylint: disable=too-many-statements + + +import asyncio from http import HTTPStatus from unittest import mock import pytest from aiohttp.test_utils import TestClient from models_library.api_schemas_webserver.folders_v2 import FolderGet +from pydantic import parse_obj_as from pytest_mock import MockerFixture from pytest_simcore.helpers.assert_checks import assert_status from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict @@ -21,7 +23,10 @@ from servicelib.aiohttp import status from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY from simcore_service_webserver.db.models import UserRole -from simcore_service_webserver.projects._groups_db import update_or_insert_project_group +from simcore_service_webserver.projects._groups_db import ( + GroupID, + update_or_insert_project_group, +) from simcore_service_webserver.projects.models import ProjectDict @@ -35,7 +40,7 @@ async def test_folders_user_role_permissions( assert client.app url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") await assert_status(resp, expected.ok) @@ -50,68 +55,66 @@ async def test_folders_full_workflow( # list user folders url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert data == [] # create a new folder url = client.app.router["create_folder"].url_for() - resp = await client.post(url.path, json={"name": "My first folder"}) - added_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) - assert FolderGet.parse_obj(added_folder) + resp = await client.post(f"{url}", json={"name": "My first folder"}) + data, _ = await assert_status(resp, status.HTTP_201_CREATED) + added_folder = FolderGet.parse_obj(data) # list user folders url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") data, _, meta, links = await assert_status( resp, status.HTTP_200_OK, include_meta=True, include_links=True ) assert len(data) == 1 - assert data[0]["folderId"] == added_folder["folderId"] - assert data[0]["name"] == "My first folder" + assert data[0]["folderId"] == added_folder.folder_id + assert data[0]["name"] == added_folder.name assert meta["count"] == 1 assert links # get a user folder - url = client.app.router["get_folder"].url_for( - folder_id=f"{added_folder['folderId']}" - ) - resp = await client.get(url) + url = client.app.router["get_folder"].url_for(folder_id=f"{added_folder.folder_id}") + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) - assert FolderGet.parse_obj(data) - assert data["folderId"] == added_folder["folderId"] - assert data["name"] == "My first folder" + got_folder = FolderGet.parse_obj(data) + assert got_folder.folder_id == added_folder.folder_id + assert got_folder.name == added_folder.name # update a folder url = client.app.router["replace_folder"].url_for( - folder_id=f"{added_folder['folderId']}" + folder_id=f"{added_folder.folder_id}" ) resp = await client.put( - url.path, - json={ - "name": "My Second folder", - }, + f"{url}", + json={"name": "My Second folder"}, ) data, _ = await assert_status(resp, status.HTTP_200_OK) - assert FolderGet.parse_obj(data) + updated_folder = FolderGet.parse_obj(data) + assert updated_folder.folder_id == got_folder.folder_id + assert updated_folder.name != got_folder.name # list user folders url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["name"] == "My Second folder" # delete a folder url = client.app.router["delete_folder"].url_for( - folder_id=f"{added_folder['folderId']}" + folder_id=f"{added_folder.folder_id}" ) - resp = await client.delete(url.path) + resp = await client.delete(f"{url}") data, _ = await assert_status(resp, status.HTTP_204_NO_CONTENT) # list user folders url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert data == [] @@ -127,19 +130,19 @@ async def test_sub_folders_full_workflow( # list user folders url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert data == [] # create a new folder url = client.app.router["create_folder"].url_for() - resp = await client.post(url.path, json={"name": "My first folder"}) + resp = await client.post(f"{url}", json={"name": "My first folder"}) root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) # create a subfolder folder url = client.app.router["create_folder"].url_for() resp = await client.post( - url.path, + f"{url}", json={ "name": "My subfolder", "parentFolderId": root_folder["folderId"], @@ -149,22 +152,25 @@ async def test_sub_folders_full_workflow( # list user root folders url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["name"] == "My first folder" # list user specific folder - base_url = client.app.router["list_folders"].url_for() - url = base_url.with_query({"folder_id": f"{subfolder_folder['folderId']}"}) - resp = await client.get(url) + url = ( + client.app.router["list_folders"] + .url_for() + .with_query({"folder_id": f"{subfolder_folder['folderId']}"}) + ) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 0 # create a sub sub folder url = client.app.router["create_folder"].url_for() resp = await client.post( - url.path, + f"{url}", json={ "name": "My sub sub folder", "parentFolderId": subfolder_folder["folderId"], @@ -173,9 +179,12 @@ async def test_sub_folders_full_workflow( subsubfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) # list user subfolder folders - base_url = client.app.router["list_folders"].url_for() - url = base_url.with_query({"folder_id": f"{subfolder_folder['folderId']}"}) - resp = await client.get(url) + url = ( + client.app.router["list_folders"] + .url_for() + .with_query({"folder_id": f"{subfolder_folder['folderId']}"}) + ) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["name"] == "My sub sub folder" @@ -186,20 +195,20 @@ async def test_sub_folders_full_workflow( folder_id=f"{subfolder_folder['folderId']}", ) resp = await client.put( - url.path, + f"{url}", json={ "name": "My Updated Folder", "parentFolderId": f"{subsubfolder_folder['folderId']}", }, ) - await assert_status(resp, status.HTTP_400_BAD_REQUEST) + await assert_status(resp, status.HTTP_409_CONFLICT) # move sub sub folder to root folder url = client.app.router["replace_folder"].url_for( folder_id=f"{subsubfolder_folder['folderId']}" ) resp = await client.put( - url.path, + f"{url}", json={ "name": "My Updated Folder", "parentFolderId": None, @@ -209,9 +218,8 @@ async def test_sub_folders_full_workflow( assert FolderGet.parse_obj(data) # list user root folders - base_url = client.app.router["list_folders"].url_for() - url = base_url.with_query({"folder_id": "null"}) - resp = await client.get(url) + url = client.app.router["list_folders"].url_for().with_query({"folder_id": "null"}) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 2 @@ -227,20 +235,20 @@ async def test_project_folder_movement_full_workflow( # create a new folder url = client.app.router["create_folder"].url_for() - resp = await client.post(url.path, json={"name": "My first folder"}) + resp = await client.post(f"{url}", json={"name": "My first folder"}) root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) # add project to the folder url = client.app.router["replace_project_folder"].url_for( folder_id=f"{root_folder['folderId']}", project_id=f"{user_project['uuid']}" ) - resp = await client.put(url.path) + resp = await client.put(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) # create a sub folder url = client.app.router["create_folder"].url_for() resp = await client.post( - url.path, + f"{url}", json={ "name": "My sub folder", "parentFolderId": root_folder["folderId"], @@ -252,14 +260,14 @@ async def test_project_folder_movement_full_workflow( url = client.app.router["replace_project_folder"].url_for( folder_id=f"{sub_folder['folderId']}", project_id=f"{user_project['uuid']}" ) - resp = await client.put(url.path) + resp = await client.put(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) # move project to the root directory url = client.app.router["replace_project_folder"].url_for( folder_id="null", project_id=f"{user_project['uuid']}" ) - resp = await client.put(url.path) + resp = await client.put(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) @@ -284,7 +292,7 @@ async def test_project_listing_inside_of_private_folder( # create a new folder url = client.app.router["create_folder"].url_for() - resp = await client.post(url.path, json={"name": "My first folder"}) + resp = await client.post(f"{url}", json={"name": "My first folder"}) original_user_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) # add project to the folder @@ -292,13 +300,16 @@ async def test_project_listing_inside_of_private_folder( folder_id=f"{original_user_folder['folderId']}", project_id=f"{user_project['uuid']}", ) - resp = await client.put(url.path) + resp = await client.put(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) # list project in user private folder - base_url = client.app.router["list_projects"].url_for() - url = base_url.with_query({"folder_id": f"{original_user_folder['folderId']}"}) - resp = await client.get(url) + url = ( + client.app.router["list_projects"] + .url_for() + .with_query({"folder_id": f"{original_user_folder['folderId']}"}) + ) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["uuid"] == user_project["uuid"] @@ -308,29 +319,32 @@ async def test_project_listing_inside_of_private_folder( # Create new user async with LoggedUser(client) as new_logged_user: # Try to list folder that user doesn't have access to - base_url = client.app.router["list_projects"].url_for() - url = base_url.with_query({"folder_id": f"{original_user_folder['folderId']}"}) - resp = await client.get(url) - _, errors = await assert_status( - resp, - status.HTTP_403_FORBIDDEN, + url = ( + client.app.router["list_projects"] + .url_for() + .with_query({"folder_id": f"{original_user_folder['folderId']}"}) ) + resp = await client.get(f"{url}") + _, errors = await assert_status(resp, status.HTTP_403_FORBIDDEN) assert errors # Now we will share the project with the new user await update_or_insert_project_group( client.app, project_id=user_project["uuid"], - group_id=new_logged_user["primary_gid"], + group_id=parse_obj_as(GroupID, new_logged_user["primary_gid"]), read=True, write=True, delete=False, ) # list new user root folder - base_url = client.app.router["list_projects"].url_for() - url = base_url.with_query({"folder_id": "null"}) - resp = await client.get(url) + url = ( + client.app.router["list_projects"] + .url_for() + .with_query({"folder_id": "null"}) + ) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["uuid"] == user_project["uuid"] @@ -339,7 +353,7 @@ async def test_project_listing_inside_of_private_folder( # create a new folder url = client.app.router["create_folder"].url_for() - resp = await client.post(url.path, json={"name": "New user folder"}) + resp = await client.post(f"{url}", json={"name": "New user folder"}) new_user_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) # add project to the folder @@ -347,13 +361,16 @@ async def test_project_listing_inside_of_private_folder( folder_id=f"{new_user_folder['folderId']}", project_id=f"{user_project['uuid']}", ) - resp = await client.put(url.path) + resp = await client.put(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) # list new user specific folder - base_url = client.app.router["list_projects"].url_for() - url = base_url.with_query({"folder_id": f"{new_user_folder['folderId']}"}) - resp = await client.get(url) + url = ( + client.app.router["list_projects"] + .url_for() + .with_query({"folder_id": f"{new_user_folder['folderId']}"}) + ) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["uuid"] == user_project["uuid"] @@ -394,14 +411,14 @@ async def test_folders_deletion( # create a new folder url = client.app.router["create_folder"].url_for() - resp = await client.post(url.path, json={"name": "My first folder"}) + resp = await client.post(f"{url}", json={"name": "My first folder"}) root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) assert FolderGet.parse_obj(root_folder) # create a subfolder folder url = client.app.router["create_folder"].url_for() resp = await client.post( - url.path, + f"{url}", json={ "name": "My subfolder 1", "parentFolderId": root_folder["folderId"], @@ -412,7 +429,7 @@ async def test_folders_deletion( # create a subfolder folder url = client.app.router["create_folder"].url_for() resp = await client.post( - url.path, + f"{url}", json={ "name": "My subfolder 2", "parentFolderId": root_folder["folderId"], @@ -425,13 +442,13 @@ async def test_folders_deletion( folder_id=f"{subfolder_2['folderId']}", project_id=f"{user_project['uuid']}", ) - resp = await client.put(url.path) + resp = await client.put(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) # create a sub sub folder folder url = client.app.router["create_folder"].url_for() resp = await client.post( - url.path, + f"{url}", json={ "name": "My sub sub folder", "parentFolderId": subfolder_1["folderId"], @@ -441,21 +458,24 @@ async def test_folders_deletion( # list user folders url = client.app.router["list_folders"].url_for() - resp = await client.get(url.path) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 # list subfolder projects - base_url = client.app.router["list_projects"].url_for() - url = base_url.with_query({"folder_id": f"{subfolder_2['folderId']}"}) - resp = await client.get(url) + url = ( + client.app.router["list_projects"] + .url_for() + .with_query({"folder_id": f"{subfolder_2['folderId']}"}) + ) + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 1 assert data[0]["uuid"] == user_project["uuid"] # list root projects - base_url = client.app.router["list_projects"].url_for() - resp = await client.get(base_url) + url = client.app.router["list_projects"].url_for() + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 0 @@ -463,24 +483,27 @@ async def test_folders_deletion( url = client.app.router["delete_folder"].url_for( folder_id=f"{subfolder_1['folderId']}" ) - resp = await client.delete(url.path) + resp = await client.delete(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) # delete a root folder url = client.app.router["delete_folder"].url_for( folder_id=f"{root_folder['folderId']}" ) - resp = await client.delete(url.path) + resp = await client.delete(f"{url}") await assert_status(resp, status.HTTP_204_NO_CONTENT) - fire_and_forget_tasks = client.app[APP_FIRE_AND_FORGET_TASKS_KEY] - t: asyncio.Task = list(fire_and_forget_tasks)[0] - assert t.get_name().startswith("fire_and_forget_task_delete_project_task_") - await t + fire_and_forget_task: asyncio.Task = next( + iter(client.app[APP_FIRE_AND_FORGET_TASKS_KEY]) + ) + assert fire_and_forget_task.get_name().startswith( + "fire_and_forget_task_delete_project_task_" + ) + await fire_and_forget_task assert len(client.app[APP_FIRE_AND_FORGET_TASKS_KEY]) == 0 # list root projects (The project should have been deleted) - base_url = client.app.router["list_projects"].url_for() - resp = await client.get(base_url) + url = client.app.router["list_projects"].url_for() + resp = await client.get(f"{url}") data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 0 diff --git a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py index 330913490ae..3cb82c2bf20 100644 --- a/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py +++ b/services/web/server/tests/unit/with_dbs/04/studies_dispatcher/test_studies_dispatcher_studies_access.py @@ -62,10 +62,11 @@ def _assert_same_projects(got: dict, expected: dict): "workbench", "accessRights", "ui", + "trashedExplicitly", } for key in expected: if key not in exclude: - assert got[key] == expected[key], "Failed in %s" % key + assert got[key] == expected[key], f"Failed in {key}" def _is_user_authenticated(session: ClientSession) -> bool: diff --git a/tests/e2e/tutorials/sleepers_project_template_sql.csv b/tests/e2e/tutorials/sleepers_project_template_sql.csv index 29c16a6f416..6dbcd7d2a26 100644 --- a/tests/e2e/tutorials/sleepers_project_template_sql.csv +++ b/tests/e2e/tutorials/sleepers_project_template_sql.csv @@ -1,2 +1,2 @@ -id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,dev,ui,classifiers,quality,hidden,workspace_id,trashed_at -10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""027e3ff9-3119-45dd-b8a2-2e31661a7385"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""bf405067-d168-44ba-b6dc-bb3e08542f92"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_2""}}, ""inputNodes"": [""562aaea9-95ff-46f3-8e84-db8f3c9e3a39""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""de2578c5-431e-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_2""}}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""de2578c5-431e-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""de2578c5-431e-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""de2578c5-431e-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": {""read"":true, ""write"":false, ""delete"":false}}", "{}", "{}", "{}", "{}",false,, +id,type,uuid,name,description,thumbnail,prj_owner,creation_date,last_change_date,workbench,published,access_rights,dev,ui,classifiers,quality,hidden,workspace_id,trashed_at,trashed_explicitly +10,TEMPLATE,ed6c2f58-dc16-445d-bb97-e989e2611603,Sleepers,5 sleepers interconnected,"",,2019-06-06 14:34:19.631,2019-06-06 14:34:28.647,"{""027e3ff9-3119-45dd-b8a2-2e31661a7385"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 0"", ""inputs"": {""in_2"": 2}, ""inputAccess"": {""in_1"": ""Invisible"", ""in_2"": ""ReadOnly""}, ""inputNodes"": [], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 50, ""y"": 300}}, ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 1"", ""inputs"": {""in_1"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_1""}, ""in_2"": 2}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 300, ""y"": 200}}, ""bf405067-d168-44ba-b6dc-bb3e08542f92"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 2"", ""inputs"": {""in_1"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""562aaea9-95ff-46f3-8e84-db8f3c9e3a39"", ""output"": ""out_2""}}, ""inputNodes"": [""562aaea9-95ff-46f3-8e84-db8f3c9e3a39""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 550, ""y"": 200}}, ""de2578c5-431e-5065-a079-a5a0476e3c10"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 3"", ""inputs"": {""in_2"": {""nodeUuid"": ""027e3ff9-3119-45dd-b8a2-2e31661a7385"", ""output"": ""out_2""}}, ""inputNodes"": [""027e3ff9-3119-45dd-b8a2-2e31661a7385""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 420, ""y"": 400}}, ""de2578c5-431e-559d-aa19-dc9293e10e4c"": {""key"": ""simcore/services/comp/itis/sleeper"", ""version"": ""1.0.0"", ""label"": ""sleeper 4"", ""inputs"": {""in_1"": {""nodeUuid"": ""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""output"": ""out_1""}, ""in_2"": {""nodeUuid"": ""de2578c5-431e-5065-a079-a5a0476e3c10"", ""output"": ""out_2""}}, ""inputNodes"": [""bf405067-d168-44ba-b6dc-bb3e08542f92"", ""de2578c5-431e-5065-a079-a5a0476e3c10""], ""outputs"": {}, ""progress"": 0, ""thumbnail"": """", ""position"": {""x"": 800, ""y"": 300}}}",true,"{""1"": {""read"":true, ""write"":false, ""delete"":false}}", "{}", "{}", "{}", "{}",false,,,false From 1564d50e3e8566fbf876ea2f6c49f2929f233fcf Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Fri, 8 Nov 2024 13:21:17 +0100 Subject: [PATCH 03/22] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Enhance:=20sy?= =?UTF-8?q?ncing=20tree=20(#6687)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/dashboard/MoveResourceTo.js | 9 +- .../dashboard/WorkspacesAndFoldersTree.js | 30 ++++- .../class/osparc/data/model/IframeHandler.js | 14 +- .../desktop/organizations/MembersList.js | 3 +- .../organizations/OrganizationsList.js | 5 +- .../class/osparc/editor/OrganizationEditor.js | 5 + .../source/class/osparc/info/ServiceLarge.js | 126 +++++++++++------- .../class/osparc/metadata/ServicesInStudy.js | 7 +- .../osparc/node/slideshow/BaseNodeView.js | 10 +- .../class/osparc/service/ServiceListItem.js | 7 +- .../source/class/osparc/widget/NodesTree.js | 8 +- .../class/osparc/workbench/ServiceCatalog.js | 7 +- .../class/osparc/workbench/WorkbenchUI.js | 10 +- 13 files changed, 144 insertions(+), 97 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js b/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js index 11a744ba9ea..cd9a98d1d6f 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/MoveResourceTo.js @@ -39,7 +39,14 @@ qx.Class.define("osparc.dashboard.MoveResourceTo", { const item = selection.getItem(0); this.__selectedWorkspaceId = item.getWorkspaceId(); this.__selectedFolderId = item.getFolderId(); - moveButton.setEnabled(this.__currentWorkspaceId !== this.__selectedWorkspaceId || this.__currentFolderId !== this.__selectedFolderId); + if (this.__selectedWorkspaceId === -1) { + // "Shared Workspaces" + moveButton.setEnabled(false); + } else { + // In principle, valid location + // disable if it's the current location + moveButton.setEnabled(this.__currentWorkspaceId !== this.__selectedWorkspaceId || this.__currentFolderId !== this.__selectedFolderId); + } } }, this); moveButton.addListener("execute", () => { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js index 93f1125049e..c65318bfcd3 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js @@ -74,7 +74,7 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { osparc.store.Workspaces.getInstance().addListener("workspaceRemoved", e => { const workspace = e.getData(); - this.__removeWorkspace(workspace); + this.__workspaceRemoved(workspace); }, this); this.getSelection().addListener("change", () => { @@ -227,11 +227,21 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { this.__populateFolder(workspaceModel, workspace.getWorkspaceId(), null); }, - __removeWorkspace: function(workspace) { + __workspaceRemoved: function(workspace) { + // remove it from the tree const sharedWorkspaceModel = this.__getModel(-1, null); const idx = sharedWorkspaceModel.getChildren().toArray().findIndex(w => workspace.getWorkspaceId() === w.getWorkspaceId()); if (idx > -1) { - sharedWorkspaceModel.getChildren().toArray().splice(idx, 1); + sharedWorkspaceModel.getChildren().removeAt(idx); + } + + // remove it from the cached models + const modelFound = this.__getModel(workspace.getWorkspaceId(), null); + if (modelFound) { + const index = this.__models.indexOf(modelFound); + if (index > -1) { // only splice array when item is found + this.__models.splice(index, 1); // 2nd parameter means remove one item only + } } }, @@ -283,7 +293,19 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { if (parentModel) { const idx = parentModel.getChildren().toArray().findIndex(c => folder.getWorkspaceId() === c.getWorkspaceId() && folder.getFolderId() === c.getFolderId()); if (idx > -1) { - parentModel.getChildren().toArray().splice(idx, 1); + parentModel.getChildren().removeAt(idx); + } + } + + if (oldParentFolderId === undefined) { + // it was removed, not moved + // remove it from the cached models + const modelFound = this.__getModel(folder.getWorkspaceId(), folder.getParentFolderId()); + if (modelFound) { + const index = this.__models.indexOf(modelFound); + if (index > -1) { // only splice array when item is found + this.__models.splice(index, 1); // 2nd parameter means remove one item only + } } } }, diff --git a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js index 9620c80daf1..fa037642af4 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js +++ b/services/static-webserver/client/source/class/osparc/data/model/IframeHandler.js @@ -84,7 +84,9 @@ qx.Class.define("osparc.data.model.IframeHandler", { this.__unresponsiveRetries = 5; this.__nodeState(); - this.getIFrame().resetSource(); + if (this.getIFrame()) { + this.getIFrame().resetSource(); + } }, __initIFrame: function() { @@ -365,7 +367,9 @@ qx.Class.define("osparc.data.model.IframeHandler", { // will switch to the loading page node.resetServiceUrl(); - this.getIFrame().resetSource(); + if (this.getIFrame()) { + this.getIFrame().resetSource(); + } this.fireEvent("iframeChanged"); } }, @@ -396,8 +400,10 @@ qx.Class.define("osparc.data.model.IframeHandler", { const status = node.getStatus().getInteractive(); // it might have been stopped if (["running", "ready"].includes(status)) { - this.getIFrame().resetSource(); - this.getIFrame().setSource(node.getServiceUrl()); + if (this.getIFrame()) { + this.getIFrame().resetSource(); + this.getIFrame().setSource(node.getServiceUrl()); + } // fire event to force switching to iframe's content: // it is required in those cases where the native 'load' event isn't triggered (voila) diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js index e5cb935cdf8..eb694304233 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/MembersList.js @@ -358,7 +358,8 @@ qx.Class.define("osparc.desktop.organizations.MembersList", { } }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong adding the user"), "ERROR"); + const errorMessage = err["message"] || this.tr("Something went wrong adding the user"); + osparc.FlashMessenger.getInstance().logAs(errorMessage, "ERROR"); console.error(err); }); }, diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js index 705e943ef5a..740f54211fa 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js @@ -101,7 +101,7 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { createOrgBtn.addListener("execute", function() { const newOrg = true; const orgEditor = new osparc.editor.OrganizationEditor(newOrg); - const title = this.tr("Organization Details Editor"); + const title = this.tr("New Organization"); const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250); orgEditor.addListener("createOrg", () => { this.__createOrganization(win, orgEditor.getChildControl("create"), orgEditor); @@ -298,7 +298,8 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { }); }) .catch(err => { - osparc.FlashMessenger.getInstance().logAs(this.tr("Something went wrong creating ") + name, "ERROR"); + const errorMessage = err["message"] || this.tr("Something went wrong creating ") + name; + osparc.FlashMessenger.getInstance().logAs(errorMessage, "ERROR"); button.setFetching(false); console.error(err); }) diff --git a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js index b817d11a1d0..f4be5233d2f 100644 --- a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js @@ -30,6 +30,11 @@ qx.Class.define("osparc.editor.OrganizationEditor", { this.getChildControl("description"); this.getChildControl("thumbnail"); newOrg ? this.getChildControl("create") : this.getChildControl("save"); + + this.addListener("appear", () => { + title.focus(); + title.activate(); + }); }, properties: { diff --git a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js index a95b78be639..217c13e58e9 100644 --- a/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/ServiceLarge.js @@ -20,14 +20,14 @@ qx.Class.define("osparc.info.ServiceLarge", { extend: osparc.info.CardLarge, /** - * @param serviceData {Object} Serialized Service Object + * @param metadata {Object} Serialized Service Object * @param instance {Object} instance related data * @param openOptions {Boolean} open edit options in new window or fire event */ - construct: function(serviceData, instance = null, openOptions = true) { + construct: function(metadata, instance = null, openOptions = true) { this.base(arguments); - this.setService(serviceData); + this.setService(metadata); if (instance) { if ("nodeId" in instance) { @@ -79,6 +79,19 @@ qx.Class.define("osparc.info.ServiceLarge", { } }, + statics: { + popUpInWindow: function(serviceLarge) { + const metadata = serviceLarge.getService(); + const versionDisplay = osparc.service.Utils.extractVersionDisplay(metadata); + const title = `${metadata["name"]} ${versionDisplay}`; + const width = osparc.info.CardLarge.WIDTH; + const height = osparc.info.CardLarge.HEIGHT; + osparc.ui.window.Window.popUpInWindow(serviceLarge, title, width, height).set({ + maxHeight: height + }); + }, + }, + members: { _rebuildLayout: function() { this._removeAll(); @@ -90,72 +103,85 @@ qx.Class.define("osparc.info.ServiceLarge", { vBox.add(deprecated); } - const title = this.__createTitle(); - const titleLayout = this.__createViewWithEdit(title, this.__openTitleEditor); - - const extraInfo = this.__extraInfo(); - const extraInfoLayout = this.__createExtraInfo(extraInfo); - - const bounds = this.getBounds(); - const offset = 30; - const maxThumbnailHeight = extraInfo.length*20; - let widgetWidth = bounds ? bounds.width - offset : 500 - offset; - let thumbnailWidth = widgetWidth - 2 * osparc.info.CardLarge.PADDING - osparc.info.CardLarge.EXTRA_INFO_WIDTH; - thumbnailWidth = Math.min(thumbnailWidth - 20, osparc.info.CardLarge.THUMBNAIL_MAX_WIDTH); - const thumbnail = this.__createThumbnail(thumbnailWidth, maxThumbnailHeight); - const thumbnailLayout = this.__createViewWithEdit(thumbnail, this.__openThumbnailEditor); - thumbnailLayout.getLayout().set({ - alignX: "center" - }); - - const infoAndThumbnail = new qx.ui.container.Composite(new qx.ui.layout.HBox(3).set({ - alignX: "center" - })); - infoAndThumbnail.add(extraInfoLayout); - infoAndThumbnail.add(thumbnailLayout, { - flex: 1 - }); - - let descriptionUi = null; - if (osparc.service.Utils.canIWrite(this.getService()["accessRights"])) { - descriptionUi = this.__createDescriptionUi(); - } - const description = this.__createDescription(); const editInTitle = this.__createViewWithEdit(description.getChildren()[0], this.__openDescriptionEditor); description.addAt(editInTitle, 0); - let resources = null; - if (!osparc.desktop.credits.Utils.areWalletsEnabled()) { - resources = this.__createResources(); - } - const copyMetadataButton = new qx.ui.form.Button(this.tr("Copy Raw metadata"), "@FontAwesome5Solid/copy/12").set({ allowGrowX: false }); copyMetadataButton.addListener("execute", () => osparc.utils.Utils.copyTextToClipboard(osparc.utils.Utils.prettifyJson(this.getService())), this); - if ( this.getService()["descriptionUi"] && !osparc.service.Utils.canIWrite(this.getService()["accessRights"]) && description.getChildren().length > 1 ) { - // Show description only - vBox.add(description.getChildren()[1]); + // Show also the copy Id buttons too + const buttonsLayout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)); + if (this.getNodeId()) { + const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); + const copyStudyIdButton = new qx.ui.form.Button(this.tr(`Copy ${studyAlias} Id`), "@FontAwesome5Solid/copy/12").set({ + toolTipText: qx.locale.Manager.tr("Copy to clipboard"), + }); + copyStudyIdButton.addListener("execute", this.__copyStudyIdToClipboard, this); + buttonsLayout.add(copyStudyIdButton); + vBox.add(buttonsLayout); + + const copyNodeIdButton = new qx.ui.form.Button(this.tr("Copy Service Id"), "@FontAwesome5Solid/copy/12").set({ + toolTipText: qx.locale.Manager.tr("Copy to clipboard"), + }); + copyNodeIdButton.addListener("execute", this.__copyNodeIdToClipboard, this); + buttonsLayout.add(copyNodeIdButton); + vBox.add(buttonsLayout); + } + // Also copyMetadataButton if tester if (osparc.data.Permissions.getInstance().isTester()) { - // Also copyMetadataButton if tester - vBox.add(copyMetadataButton); + buttonsLayout.add(copyMetadataButton); + vBox.add(buttonsLayout); } + // Show description only + vBox.add(description.getChildren()[1]); } else { + const title = this.__createTitle(); + const titleLayout = this.__createViewWithEdit(title, this.__openTitleEditor); vBox.add(titleLayout); + + const extraInfo = this.__extraInfo(); + const extraInfoLayout = this.__createExtraInfo(extraInfo); + const bounds = this.getBounds(); + const offset = 30; + const maxThumbnailHeight = extraInfo.length*20; + let widgetWidth = bounds ? bounds.width - offset : 500 - offset; + let thumbnailWidth = widgetWidth - 2 * osparc.info.CardLarge.PADDING - osparc.info.CardLarge.EXTRA_INFO_WIDTH; + thumbnailWidth = Math.min(thumbnailWidth - 20, osparc.info.CardLarge.THUMBNAIL_MAX_WIDTH); + const thumbnail = this.__createThumbnail(thumbnailWidth, maxThumbnailHeight); + const thumbnailLayout = this.__createViewWithEdit(thumbnail, this.__openThumbnailEditor); + thumbnailLayout.getLayout().set({ + alignX: "center" + }); + const infoAndThumbnail = new qx.ui.container.Composite(new qx.ui.layout.HBox(3).set({ + alignX: "center" + })); + infoAndThumbnail.add(extraInfoLayout); + infoAndThumbnail.add(thumbnailLayout, { + flex: 1 + }); vBox.add(infoAndThumbnail); - if (descriptionUi) { - vBox.add(descriptionUi); + + if (osparc.service.Utils.canIWrite(this.getService()["accessRights"])) { + const descriptionUi = this.__createDescriptionUi(); + if (descriptionUi) { + vBox.add(descriptionUi); + } } vBox.add(description); - if (resources) { - vBox.add(resources); + + if (!osparc.desktop.credits.Utils.areWalletsEnabled()) { + const resources = this.__createResources(); + if (resources) { + vBox.add(resources); + } } vBox.add(copyMetadataButton); } @@ -429,6 +455,10 @@ qx.Class.define("osparc.info.ServiceLarge", { titleEditor.open(); }, + __copyStudyIdToClipboard: function() { + osparc.utils.Utils.copyTextToClipboard(this.getStudyId()); + }, + __copyNodeIdToClipboard: function() { osparc.utils.Utils.copyTextToClipboard(this.getNodeId()); }, diff --git a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js index b51dc1c7515..eae2df3f1b9 100644 --- a/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js +++ b/services/static-webserver/client/source/class/osparc/metadata/ServicesInStudy.js @@ -147,12 +147,7 @@ qx.Class.define("osparc.metadata.ServicesInStudy", { studyId: this._studyData["uuid"], label: node["label"] }); - const title = this.tr("Service information"); - const width = osparc.info.CardLarge.WIDTH; - const height = osparc.info.CardLarge.HEIGHT; - osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({ - maxHeight: height - }); + osparc.info.ServiceLarge.popUpInWindow(serviceDetails); }, this); this._servicesGrid.add(infoButton, { row: i, diff --git a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js index e7de026cd94..a2ee4daab00 100644 --- a/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js +++ b/services/static-webserver/client/source/class/osparc/node/slideshow/BaseNodeView.js @@ -217,17 +217,13 @@ qx.Class.define("osparc.node.slideshow.BaseNodeView", { __openServiceDetails: function() { const node = this.getNode(); - const serviceDetails = new osparc.info.ServiceLarge(node.getMetaData(), { + const metadata = node.getMetaData(); + const serviceDetails = new osparc.info.ServiceLarge(metadata, { nodeId: node.getNodeId(), label: node.getLabel(), studyId: node.getStudy().getUuid() }); - const title = this.tr("Service information"); - const width = osparc.info.CardLarge.WIDTH; - const height = osparc.info.CardLarge.HEIGHT; - osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({ - maxHeight: height - }); + osparc.info.ServiceLarge.popUpInWindow(serviceDetails); }, __openInstructions: function() { diff --git a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js index 959859389ac..c970c2df3a9 100644 --- a/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js +++ b/services/static-webserver/client/source/class/osparc/service/ServiceListItem.js @@ -161,12 +161,7 @@ qx.Class.define("osparc.service.ServiceListItem", { osparc.store.Services.getService(key, version) .then(serviceMetadata => { const serviceDetails = new osparc.info.ServiceLarge(serviceMetadata); - const title = this.tr("Service information"); - const width = osparc.info.CardLarge.WIDTH; - const height = osparc.info.CardLarge.HEIGHT; - osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({ - maxHeight: height - }); + osparc.info.ServiceLarge.popUpInWindow(serviceDetails); }); }, diff --git a/services/static-webserver/client/source/class/osparc/widget/NodesTree.js b/services/static-webserver/client/source/class/osparc/widget/NodesTree.js index 0b543b6a158..e88930b09c5 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodesTree.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodesTree.js @@ -290,15 +290,13 @@ qx.Class.define("osparc.widget.NodesTree", { }); } else { const node = study.getWorkbench().getNode(nodeId); - const serviceDetails = new osparc.info.ServiceLarge(node.getMetaData(), { + const metadata = node.getMetaData(); + const serviceDetails = new osparc.info.ServiceLarge(metadata, { nodeId, label: node.getLabel(), studyId: study.getUuid() }); - const title = this.tr("Service information"); - osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({ - maxHeight: height - }); + osparc.info.ServiceLarge.popUpInWindow(serviceDetails); } } }, diff --git a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js index faf60dd0034..b9dd0867a4c 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js +++ b/services/static-webserver/client/source/class/osparc/workbench/ServiceCatalog.js @@ -300,12 +300,7 @@ qx.Class.define("osparc.workbench.ServiceCatalog", { __showServiceDetails: async function() { const serviceMetadata = await this.__getSelectedService(); const serviceDetails = new osparc.info.ServiceLarge(serviceMetadata); - const title = this.tr("Service information"); - const width = osparc.info.CardLarge.WIDTH; - const height = osparc.info.CardLarge.HEIGHT; - osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({ - maxHeight: height, - }); + osparc.info.ServiceLarge.popUpInWindow(serviceDetails); }, __onCancel: function() { diff --git a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js index 21c55e487d1..504faf3c33f 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js +++ b/services/static-webserver/client/source/class/osparc/workbench/WorkbenchUI.js @@ -1665,17 +1665,13 @@ qx.Class.define("osparc.workbench.WorkbenchUI", { __openNodeInfo: function(nodeId) { if (nodeId) { const node = this.getStudy().getWorkbench().getNode(nodeId); - const serviceDetails = new osparc.info.ServiceLarge(node.getMetaData(), { + const metadata = node.getMetaData(); + const serviceDetails = new osparc.info.ServiceLarge(metadata, { nodeId, label: node.getLabel(), studyId: this.getStudy().getUuid() }); - const title = this.tr("Service information"); - const width = osparc.info.CardLarge.WIDTH; - const height = osparc.info.CardLarge.HEIGHT; - osparc.ui.window.Window.popUpInWindow(serviceDetails, title, width, height).set({ - maxHeight: height - }); + osparc.info.ServiceLarge.popUpInWindow(serviceDetails); } }, From e32787bc1b99c07668a189403e6672f3634138ab Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Mon, 11 Nov 2024 08:44:11 +0100 Subject: [PATCH 04/22] =?UTF-8?q?=E2=9C=A8Computational=20backend:=20persi?= =?UTF-8?q?st=20cancellation=20request=20(#6694)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../8bfe65a5e294_add_cancellation_mark.py | 29 +++++ .../models/comp_runs.py | 6 + .../models/comp_runs.py | 32 ++++- .../modules/comp_scheduler/_base_scheduler.py | 26 +++-- .../comp_scheduler/_scheduler_factory.py | 2 +- .../modules/db/repositories/comp_runs.py | 13 ++- services/director-v2/tests/conftest.py | 2 +- .../tests/unit/with_dbs/conftest.py | 47 +++++++- ...t_modules_comp_scheduler_dask_scheduler.py | 110 ++++++++++++++++-- 9 files changed, 240 insertions(+), 27 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py new file mode 100644 index 00000000000..ecbe20b40e8 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/8bfe65a5e294_add_cancellation_mark.py @@ -0,0 +1,29 @@ +"""add cancellation mark + +Revision ID: 8bfe65a5e294 +Revises: 5ad02358751a +Create Date: 2024-11-08 14:40:59.266181+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "8bfe65a5e294" +down_revision = "5ad02358751a" +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "comp_runs", sa.Column("cancelled", sa.DateTime(timezone=True), nullable=True) + ) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("comp_runs", "cancelled") + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py index e402a171562..eb84cefaa76 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/comp_runs.py @@ -99,6 +99,12 @@ nullable=True, doc="When the run was finished", ), + sa.Column( + "cancelled", + sa.DateTime(timezone=True), + nullable=True, + doc="If filled, when cancellation was requested", + ), sa.Column("metadata", JSONB, nullable=True, doc="the run optional metadata"), sa.Column( "use_on_demand_clusters", diff --git a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py index 1d7800b9788..2af0646c3d3 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/models/comp_runs.py @@ -46,6 +46,7 @@ class CompRunsAtDB(BaseModel): modified: datetime.datetime started: datetime.datetime | None ended: datetime.datetime | None + cancelled: datetime.datetime | None metadata: RunMetadataDict = RunMetadataDict() use_on_demand_clusters: bool @@ -72,7 +73,7 @@ def convert_null_to_default_cluster_id(cls, v): @classmethod def ensure_utc(cls, v: datetime.datetime | None) -> datetime.datetime | None: if v is not None and v.tzinfo is None: - v = v.replace(tzinfo=datetime.timezone.utc) + v = v.replace(tzinfo=datetime.UTC) return v @validator("metadata", pre=True) @@ -93,9 +94,22 @@ class Config: "user_id": 132, "cluster_id": 0, "iteration": 42, + "result": "UNKNOWN", + "created": "2021-03-01 13:07:34.19161", + "modified": "2021-03-01 13:07:34.19161", + "cancelled": None, + "use_on_demand_clusters": False, + }, + { + "run_id": 432, + "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5", + "user_id": 132, + "cluster_id": None, # this default to DEFAULT_CLUSTER_ID + "iteration": 42, "result": "NOT_STARTED", "created": "2021-03-01 13:07:34.19161", "modified": "2021-03-01 13:07:34.19161", + "cancelled": None, "use_on_demand_clusters": False, }, { @@ -109,6 +123,7 @@ class Config: "modified": "2021-03-01 13:07:34.19161", "started": "2021-03-01 8:07:34.19161", "ended": "2021-03-01 13:07:34.10", + "cancelled": None, "metadata": { "node_id_names_map": {}, "product_name": "osparc", @@ -118,5 +133,20 @@ class Config: }, "use_on_demand_clusters": False, }, + { + "run_id": 43243, + "project_uuid": "65fee9d2-e030-452c-a29c-45d288577ca5", + "user_id": 132, + "cluster_id": 123, + "iteration": 12, + "result": "SUCCESS", + "created": "2021-03-01 13:07:34.19161", + "modified": "2021-03-01 13:07:34.19161", + "started": "2021-03-01 8:07:34.19161", + "ended": "2021-03-01 13:07:34.10", + "cancelled": None, + "metadata": None, + "use_on_demand_clusters": False, + }, ] } diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py index 08396686e43..cae539596d4 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py @@ -47,7 +47,7 @@ ) from ...core.settings import ComputationalBackendSettings from ...models.comp_pipelines import CompPipelineAtDB -from ...models.comp_runs import CompRunsAtDB, RunMetadataDict +from ...models.comp_runs import RunMetadataDict from ...models.comp_tasks import CompTaskAtDB from ...utils.comp_scheduler import ( COMPLETED_STATES, @@ -131,7 +131,7 @@ async def _triage_changed_tasks( class ScheduledPipelineParams: cluster_id: ClusterID run_metadata: RunMetadataDict - mark_for_cancellation: bool = False + mark_for_cancellation: datetime.datetime | None use_on_demand_clusters: bool @@ -169,7 +169,7 @@ async def run_new_pipeline( return runs_repo = CompRunsRepository.instance(self.db_engine) - new_run: CompRunsAtDB = await runs_repo.create( + new_run = await runs_repo.create( user_id=user_id, project_id=project_id, cluster_id=cluster_id, @@ -182,6 +182,7 @@ async def run_new_pipeline( cluster_id=cluster_id, run_metadata=new_run.metadata, use_on_demand_clusters=use_on_demand_clusters, + mark_for_cancellation=None, ) await publish_project_log( self.rabbitmq_client, @@ -212,11 +213,18 @@ async def stop_pipeline( selected_iteration = iteration # mark the scheduled pipeline for stopping - self.scheduled_pipelines[ - (user_id, project_id, selected_iteration) - ].mark_for_cancellation = True - # ensure the scheduler starts right away - self._wake_up_scheduler_now() + updated_comp_run = await CompRunsRepository.instance( + self.db_engine + ).mark_for_cancellation( + user_id=user_id, project_id=project_id, iteration=selected_iteration + ) + if updated_comp_run: + assert updated_comp_run.cancelled is not None # nosec + self.scheduled_pipelines[ + (user_id, project_id, selected_iteration) + ].mark_for_cancellation = updated_comp_run.cancelled + # ensure the scheduler starts right away + self._wake_up_scheduler_now() async def schedule_all_pipelines(self) -> None: self.wake_up_event.clear() @@ -343,7 +351,7 @@ def _need_heartbeat(task: CompTaskAtDB) -> bool: if task.last_heartbeat is None: assert task.start # nosec return bool( - (utc_now - task.start.replace(tzinfo=datetime.timezone.utc)) + (utc_now - task.start.replace(tzinfo=datetime.UTC)) > self.service_runtime_heartbeat_interval ) return bool( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py index 458950e9798..f8b648eaf48 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py @@ -47,7 +47,7 @@ async def create_from_db(app: FastAPI) -> BaseCompScheduler: r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID ), run_metadata=r.metadata, - mark_for_cancellation=False, + mark_for_cancellation=r.cancelled, use_on_demand_clusters=r.use_on_demand_clusters, ) for r in runs diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 4f9a8e42b53..955b9dd5858 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -3,6 +3,7 @@ from collections import deque from typing import Any +import arrow import sqlalchemy as sa from aiopg.sa.result import RowProxy from models_library.clusters import DEFAULT_CLUSTER_ID, ClusterID @@ -146,10 +147,20 @@ async def set_run_result( ) -> CompRunsAtDB | None: values: dict[str, Any] = {"result": RUNNING_STATE_TO_DB[result_state]} if final_state: - values.update({"ended": datetime.datetime.now(tz=datetime.UTC)}) + values.update({"ended": arrow.utcnow().datetime}) return await self.update( user_id, project_id, iteration, **values, ) + + async def mark_for_cancellation( + self, *, user_id: UserID, project_id: ProjectID, iteration: PositiveInt + ) -> CompRunsAtDB | None: + return await self.update( + user_id, + project_id, + iteration, + cancelled=arrow.utcnow().datetime, + ) diff --git a/services/director-v2/tests/conftest.py b/services/director-v2/tests/conftest.py index 4e415254486..63abe3d0984 100644 --- a/services/director-v2/tests/conftest.py +++ b/services/director-v2/tests/conftest.py @@ -218,7 +218,7 @@ async def initialized_app(mock_env: EnvVarsDict) -> AsyncIterable[FastAPI]: @pytest.fixture() async def async_client(initialized_app: FastAPI) -> AsyncIterable[httpx.AsyncClient]: async with httpx.AsyncClient( - app=initialized_app, + transport=httpx.ASGITransport(app=initialized_app), base_url="http://director-v2.testserver.io", headers={"Content-Type": "application/json"}, ) as client: diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 8dd5527f00a..516730d4e14 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -11,6 +11,7 @@ from typing import Any, cast from uuid import uuid4 +import arrow import pytest import sqlalchemy as sa from _helpers import PublishedProject, RunningProject @@ -318,6 +319,7 @@ async def running_project( ) -> RunningProject: user = registered_user() created_project = await project(user, workbench=fake_workbench_without_outputs) + now_time = arrow.utcnow().datetime return RunningProject( project=created_project, pipeline=pipeline( @@ -329,9 +331,50 @@ async def running_project( project=created_project, state=StateType.RUNNING, progress=0.0, - start=datetime.datetime.now(tz=datetime.UTC), + start=now_time, + ), + runs=runs( + user=user, + project=created_project, + started=now_time, + result=StateType.RUNNING, + ), + ) + + +@pytest.fixture +async def running_project_mark_for_cancellation( + registered_user: Callable[..., dict[str, Any]], + project: Callable[..., Awaitable[ProjectAtDB]], + pipeline: Callable[..., CompPipelineAtDB], + tasks: Callable[..., list[CompTaskAtDB]], + runs: Callable[..., CompRunsAtDB], + fake_workbench_without_outputs: dict[str, Any], + fake_workbench_adjacency: dict[str, Any], +) -> RunningProject: + user = registered_user() + created_project = await project(user, workbench=fake_workbench_without_outputs) + now_time = arrow.utcnow().datetime + return RunningProject( + project=created_project, + pipeline=pipeline( + project_id=f"{created_project.uuid}", + dag_adjacency_list=fake_workbench_adjacency, + ), + tasks=tasks( + user=user, + project=created_project, + state=StateType.RUNNING, + progress=0.0, + start=now_time, + ), + runs=runs( + user=user, + project=created_project, + result=StateType.RUNNING, + started=now_time, + cancelled=now_time + datetime.timedelta(seconds=5), ), - runs=runs(user=user, project=created_project, result=StateType.RUNNING), ) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index fbc90204f83..f9e5ff33c4b 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -103,9 +103,9 @@ def _assert_dask_client_correctly_initialized( ) mocked_dask_client.register_handlers.assert_called_once_with( TaskHandlers( - cast( + cast( # noqa: SLF001 DaskScheduler, scheduler - )._task_progress_change_handler, # noqa: SLF001 + )._task_progress_change_handler, cast(DaskScheduler, scheduler)._task_log_change_handler, # noqa: SLF001 ) ) @@ -280,9 +280,10 @@ def test_scheduler_raises_exception_for_missing_dependencies( settings = AppSettings.create_from_envs() app = init_app(settings) - with pytest.raises(ConfigurationError): - with TestClient(app, raise_server_exceptions=True) as _: - pass + with pytest.raises(ConfigurationError), TestClient( + app, raise_server_exceptions=True + ) as _: + pass async def test_empty_pipeline_is_not_scheduled( @@ -367,7 +368,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( assert u_id == user["id"] assert p_id == sleepers_project.uuid assert it > 0 - assert params.mark_for_cancellation is False + assert params.mark_for_cancellation is None # check the database was properly updated async with aiopg_engine.acquire() as conn: result = await conn.execute( @@ -418,7 +419,7 @@ async def _assert_start_pipeline( assert u_id == published_project.project.prj_owner assert p_id == published_project.project.uuid assert it > 0 - assert params.mark_for_cancellation is False + assert params.mark_for_cancellation is None assert params.run_metadata == run_metadata # check the database is correctly updated, the run is published @@ -1029,11 +1030,9 @@ async def test_task_progress_triggers( parent_project_id=None, ), ) - await cast( + await cast( # noqa: SLF001 DaskScheduler, scheduler - )._task_progress_change_handler( # noqa: SLF001 - progress_event.json() - ) + )._task_progress_change_handler(progress_event.json()) # NOTE: not sure whether it should switch to STARTED.. it would make sense await _assert_comp_tasks_db( aiopg_engine, @@ -1207,7 +1206,7 @@ async def test_handling_scheduling_after_reboot( mocked_clean_task_output_fct: mock.MagicMock, reboot_state: RebootState, ): - """After the dask client is rebooted, or that the director-v2 reboots the scheduler + """After the dask client is rebooted, or that the director-v2 reboots the dv-2 internal scheduler shall continue scheduling correctly. Even though the task might have continued to run in the dask-scheduler.""" @@ -1279,6 +1278,93 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData: ) +async def test_handling_cancellation_of_jobs_after_reboot( + with_disabled_scheduler_task: None, + mocked_dask_client: mock.MagicMock, + aiopg_engine: aiopg.sa.engine.Engine, + running_project_mark_for_cancellation: RunningProject, + scheduler: BaseCompScheduler, + mocked_parse_output_data_fct: mock.MagicMock, + mocked_clean_task_output_fct: mock.MagicMock, +): + """A running pipeline was cancelled by a user and the DV-2 was restarted BEFORE + It could actually cancel the task. On reboot the DV-2 shall recover + and actually cancel the pipeline properly""" + + # check initial status + await _assert_comp_run_db( + aiopg_engine, running_project_mark_for_cancellation, RunningState.STARTED + ) + await _assert_comp_tasks_db( + aiopg_engine, + running_project_mark_for_cancellation.project.uuid, + [t.node_id for t in running_project_mark_for_cancellation.tasks], + expected_state=RunningState.STARTED, + expected_progress=0, + ) + + # the backend shall report the tasks as running + async def mocked_get_tasks_status(job_ids: list[str]) -> list[DaskClientTaskState]: + return [DaskClientTaskState.PENDING_OR_STARTED for j in job_ids] + + mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status + # Running the scheduler, should actually cancel the run now + await run_comp_scheduler(scheduler) + mocked_dask_client.abort_computation_task.assert_called() + assert mocked_dask_client.abort_computation_task.call_count == len( + [ + t.node_id + for t in running_project_mark_for_cancellation.tasks + if t.node_class == NodeClass.COMPUTATIONAL + ] + ) + # in the DB they are still running, they will be stopped in the next iteration + await _assert_comp_tasks_db( + aiopg_engine, + running_project_mark_for_cancellation.project.uuid, + [ + t.node_id + for t in running_project_mark_for_cancellation.tasks + if t.node_class == NodeClass.COMPUTATIONAL + ], + expected_state=RunningState.STARTED, + expected_progress=0, + ) + await _assert_comp_run_db( + aiopg_engine, running_project_mark_for_cancellation, RunningState.STARTED + ) + + # the backend shall now report the tasks as aborted + async def mocked_get_tasks_status_aborted( + job_ids: list[str], + ) -> list[DaskClientTaskState]: + return [DaskClientTaskState.ABORTED for j in job_ids] + + mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status_aborted + + async def _return_random_task_result(job_id) -> TaskOutputData: + raise TaskCancelledError + + mocked_dask_client.get_task_result.side_effect = _return_random_task_result + await run_comp_scheduler(scheduler) + # now should be stopped + await _assert_comp_tasks_db( + aiopg_engine, + running_project_mark_for_cancellation.project.uuid, + [ + t.node_id + for t in running_project_mark_for_cancellation.tasks + if t.node_class == NodeClass.COMPUTATIONAL + ], + expected_state=RunningState.ABORTED, + expected_progress=1, + ) + await _assert_comp_run_db( + aiopg_engine, running_project_mark_for_cancellation, RunningState.ABORTED + ) + mocked_clean_task_output_fct.assert_called() + + @pytest.fixture def with_fast_service_heartbeat_s(monkeypatch: pytest.MonkeyPatch) -> int: seconds = 1 From ce9d3a590704ee8a5b4ab01dddd9bd59db1f6e0f Mon Sep 17 00:00:00 2001 From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com> Date: Mon, 11 Nov 2024 12:41:58 +0100 Subject: [PATCH 05/22] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor=20project?= =?UTF-8?q?=20listing=20DB=20function=20(=F0=9F=9A=A8=20=20We=20no=20longe?= =?UTF-8?q?r=20list=20projects=20that=20do=20not=20have=20a=20product=20as?= =?UTF-8?q?signed)=20(#6692)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/models_library/folders.py | 34 +- .../src/models_library/workspaces.py | 34 +- .../projects/_crud_api_read.py | 57 +-- .../simcore_service_webserver/projects/db.py | 454 ++++++++---------- .../02/test_projects_crud_handlers.py | 9 +- 5 files changed, 291 insertions(+), 297 deletions(-) diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py index 4d73618750c..485e74b86c8 100644 --- a/packages/models-library/src/models_library/folders.py +++ b/packages/models-library/src/models_library/folders.py @@ -1,13 +1,41 @@ from datetime import datetime +from enum import auto from typing import TypeAlias -from models_library.users import GroupID, UserID -from models_library.workspaces import WorkspaceID -from pydantic import BaseModel, Field, PositiveInt +from pydantic import BaseModel, Field, PositiveInt, validator + +from .users import GroupID, UserID +from .utils.enums import StrAutoEnum +from .workspaces import WorkspaceID FolderID: TypeAlias = PositiveInt +class FolderScope(StrAutoEnum): + ROOT = auto() + SPECIFIC = auto() + ALL = auto() + + +class FolderQuery(BaseModel): + folder_scope: FolderScope + folder_id: PositiveInt | None = None + + @validator("folder_id", pre=True, always=True) + @classmethod + def validate_folder_id(cls, value, values): + scope = values.get("folder_scope") + if scope == FolderScope.SPECIFIC and value is None: + raise ValueError( + "folder_id must be provided when folder_scope is SPECIFIC." + ) + if scope != FolderScope.SPECIFIC and value is not None: + raise ValueError( + "folder_id should be None when folder_scope is not SPECIFIC." + ) + return value + + # # DB # diff --git a/packages/models-library/src/models_library/workspaces.py b/packages/models-library/src/models_library/workspaces.py index c08e02501cb..e5b816623fe 100644 --- a/packages/models-library/src/models_library/workspaces.py +++ b/packages/models-library/src/models_library/workspaces.py @@ -1,13 +1,41 @@ from datetime import datetime +from enum import auto from typing import TypeAlias -from models_library.access_rights import AccessRights -from models_library.users import GroupID -from pydantic import BaseModel, Field, PositiveInt +from pydantic import BaseModel, Field, PositiveInt, validator + +from .access_rights import AccessRights +from .users import GroupID +from .utils.enums import StrAutoEnum WorkspaceID: TypeAlias = PositiveInt +class WorkspaceScope(StrAutoEnum): + PRIVATE = auto() + SHARED = auto() + ALL = auto() + + +class WorkspaceQuery(BaseModel): + workspace_scope: WorkspaceScope + workspace_id: PositiveInt | None = None + + @validator("workspace_id", pre=True, always=True) + @classmethod + def validate_workspace_id(cls, value, values): + scope = values.get("workspace_scope") + if scope == WorkspaceScope.SHARED and value is None: + raise ValueError( + "workspace_id must be provided when workspace_scope is SHARED." + ) + if scope != WorkspaceScope.SHARED and value is not None: + raise ValueError( + "workspace_id should be None when workspace_scope is not SHARED." + ) + return value + + # # DB # diff --git a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py index f8b6aee4ff9..4d4352d5229 100644 --- a/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py +++ b/services/web/server/src/simcore_service_webserver/projects/_crud_api_read.py @@ -6,16 +6,16 @@ """ from aiohttp import web -from models_library.access_rights import AccessRights from models_library.api_schemas_webserver._base import OutputSchema from models_library.api_schemas_webserver.projects import ProjectListItem -from models_library.folders import FolderID +from models_library.folders import FolderID, FolderQuery, FolderScope from models_library.projects import ProjectID from models_library.rest_ordering import OrderBy -from models_library.users import GroupID, UserID -from models_library.workspaces import WorkspaceID +from models_library.users import UserID +from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope from pydantic import NonNegativeInt from servicelib.utils import logged_gather +from simcore_postgres_database.models.projects import ProjectType from simcore_postgres_database.webserver_models import ProjectType as ProjectTypeDB from simcore_service_webserver.workspaces._workspaces_api import ( check_user_workspace_access, @@ -23,7 +23,6 @@ from ..catalog.client import get_services_for_user_in_product from ..folders import _folders_db as folders_db -from ..workspaces import _workspaces_db as workspaces_db from . import projects_api from ._permalink_api import update_or_pop_permalink_in_project from .db import ProjectDBAPI @@ -36,7 +35,6 @@ async def _append_fields( user_id: UserID, project: ProjectDict, is_template: bool, - workspace_access_rights: dict[GroupID, AccessRights] | None, model_schema_cls: type[OutputSchema], ): # state @@ -50,12 +48,6 @@ async def _append_fields( # permalink await update_or_pop_permalink_in_project(request, project) - # replace project access rights (if project is in workspace) - if workspace_access_rights: - project["accessRights"] = { - gid: access.dict() for gid, access in workspace_access_rights.items() - } - # validate return model_schema_cls.parse_obj(project).data(exclude_unset=True) @@ -110,15 +102,25 @@ async def list_projects( # pylint: disable=too-many-arguments db_projects, db_project_types, total_number_projects = await db.list_projects( product_name=product_name, user_id=user_id, - workspace_id=workspace_id, - folder_id=folder_id, + workspace_query=( + WorkspaceQuery( + workspace_scope=WorkspaceScope.SHARED, workspace_id=workspace_id + ) + if workspace_id + else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE) + ), + folder_query=( + FolderQuery(folder_scope=FolderScope.SPECIFIC, folder_id=folder_id) + if folder_id + else FolderQuery(folder_scope=FolderScope.ROOT) + ), # attrs filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type), filter_by_services=user_available_services, filter_trashed=trashed, filter_hidden=show_hidden, # composed attrs - search=search, + filter_by_text=search, # pagination offset=offset, limit=limit, @@ -126,14 +128,6 @@ async def list_projects( # pylint: disable=too-many-arguments order_by=order_by, ) - # If workspace, override project access rights - workspace_access_rights = None - if workspace_id: - workspace_db = await workspaces_db.get_workspace_for_user( - app, user_id=user_id, workspace_id=workspace_id, product_name=product_name - ) - workspace_access_rights = workspace_db.access_rights - projects: list[ProjectDict] = await logged_gather( *( _append_fields( @@ -141,7 +135,6 @@ async def list_projects( # pylint: disable=too-many-arguments user_id=user_id, project=prj, is_template=prj_type == ProjectTypeDB.TEMPLATE, - workspace_access_rights=workspace_access_rights, model_schema_cls=ProjectListItem, ) for prj, prj_type in zip(db_projects, db_project_types) @@ -170,19 +163,18 @@ async def list_projects_full_search( request.app, user_id, product_name, only_key_versions=True ) - ( - db_projects, - db_project_types, - total_number_projects, - ) = await db.list_projects_full_search( - user_id=user_id, + (db_projects, db_project_types, total_number_projects,) = await db.list_projects( product_name=product_name, + user_id=user_id, + workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL), + folder_query=FolderQuery(folder_scope=FolderScope.ALL), filter_by_services=user_available_services, - text=text, + filter_by_text=text, + filter_tag_ids_list=tag_ids_list, + filter_by_project_type=ProjectType.STANDARD, offset=offset, limit=limit, order_by=order_by, - tag_ids_list=tag_ids_list, ) projects: list[ProjectDict] = await logged_gather( @@ -192,7 +184,6 @@ async def list_projects_full_search( user_id=user_id, project=prj, is_template=prj_type == ProjectTypeDB.TEMPLATE, - workspace_access_rights=None, model_schema_cls=ProjectListItem, ) for prj, prj_type in zip(db_projects, db_project_types) diff --git a/services/web/server/src/simcore_service_webserver/projects/db.py b/services/web/server/src/simcore_service_webserver/projects/db.py index 5e0c216f77e..2281b807a71 100644 --- a/services/web/server/src/simcore_service_webserver/projects/db.py +++ b/services/web/server/src/simcore_service_webserver/projects/db.py @@ -16,7 +16,7 @@ from aiopg.sa.connection import SAConnection from aiopg.sa.result import ResultProxy, RowProxy from models_library.basic_types import IDStr -from models_library.folders import FolderID +from models_library.folders import FolderQuery, FolderScope from models_library.products import ProductName from models_library.projects import ProjectID, ProjectIDStr from models_library.projects_comments import CommentID, ProjectsCommentsDB @@ -31,7 +31,7 @@ from models_library.users import UserID from models_library.utils.fastapi_encoders import jsonable_encoder from models_library.wallets import WalletDB, WalletID -from models_library.workspaces import WorkspaceID +from models_library.workspaces import WorkspaceQuery, WorkspaceScope from pydantic import parse_obj_as from pydantic.types import PositiveInt from servicelib.aiohttp.application_keys import APP_AIOPG_ENGINE_KEY @@ -59,7 +59,7 @@ from sqlalchemy import func, literal_column from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER from sqlalchemy.dialects.postgresql import insert as pg_insert -from sqlalchemy.sql import and_ +from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, and_ from tenacity import TryAgain from tenacity.asyncio import AsyncRetrying from tenacity.retry import retry_if_exception_type @@ -350,21 +350,22 @@ async def upsert_project_linked_product( ).group_by(project_to_groups.c.project_uuid) ).subquery("access_rights_subquery") - async def list_projects( # pylint: disable=too-many-arguments + async def list_projects( # pylint: disable=too-many-arguments,too-many-statements,too-many-branches self, *, - # hierarchy filters - product_name: str, + product_name: ProductName, user_id: PositiveInt, - workspace_id: WorkspaceID | None, - folder_id: FolderID | None = None, + # hierarchy filters + workspace_query: WorkspaceQuery, + folder_query: FolderQuery, # attribute filters - search: str | None = None, filter_by_project_type: ProjectType | None = None, filter_by_services: list[dict] | None = None, filter_published: bool | None = False, filter_hidden: bool | None = False, filter_trashed: bool | None = False, + filter_by_text: str | None = None, + filter_tag_ids_list: list[int] | None = None, # pagination offset: int | None = 0, limit: int | None = None, @@ -373,156 +374,9 @@ async def list_projects( # pylint: disable=too-many-arguments field=IDStr("last_change_date"), direction=OrderDirection.DESC ), ) -> tuple[list[dict[str, Any]], list[ProjectType], int]: - """ - If workspace_id is provided, then listing in workspace is considered/preffered - """ - assert ( - order_by.field in projects.columns - ), "Guaranteed by ProjectListWithJsonStrParams" # nosec + if filter_tag_ids_list is None: + filter_tag_ids_list = [] - # helper - private_workspace_user_id_or_none: UserID | None = ( - None if workspace_id else user_id - ) - - async with self.engine.acquire() as conn: - - _join_query = ( - projects.join(projects_to_products, isouter=True) - .join(self.access_rights_subquery, isouter=True) - .join( - projects_to_folders, - ( - (projects_to_folders.c.project_uuid == projects.c.uuid) - & ( - projects_to_folders.c.user_id - == private_workspace_user_id_or_none - ) - ), - isouter=True, - ) - ) - - query = ( - sa.select( - *[ - col - for col in projects.columns - if col.name not in ["access_rights"] - ], - self.access_rights_subquery.c.access_rights, - projects_to_products.c.product_name, - projects_to_folders.c.folder_id, - ) - .select_from(_join_query) - .where( - ( - (projects_to_products.c.product_name == product_name) - # This was added for backward compatibility, including old projects not in the projects_to_products table. - | (projects_to_products.c.product_name.is_(None)) - ) - & ( - projects_to_folders.c.folder_id == folder_id - if folder_id - else projects_to_folders.c.folder_id.is_(None) - ) - & ( - projects.c.workspace_id == workspace_id # <-- Shared workspace - if workspace_id - else projects.c.workspace_id.is_(None) # <-- Private workspace - ) - ) - ) - - # attributes filters - # None, true, false = all, attribute, !attribute - attributes_filters = [] - if filter_by_project_type is not None: - attributes_filters.append( - projects.c.type == filter_by_project_type.value - ) - - if filter_hidden is not None: - attributes_filters.append(projects.c.hidden.is_(filter_hidden)) - - if filter_published is not None: - attributes_filters.append(projects.c.published.is_(filter_published)) - - if filter_trashed is not None: - attributes_filters.append( - # marked explicitly as trashed - ( - projects.c.trashed_at.is_not(None) - & projects.c.trashed_explicitly.is_(True) - ) - if filter_trashed - # not marked as trashed - else projects.c.trashed_at.is_(None) - ) - query = query.where(sa.and_(*attributes_filters)) - - if private_workspace_user_id_or_none: - # If Private workspace we check to which projects user has access - user_groups: list[RowProxy] = await self._list_user_groups( - conn, user_id - ) - query = query.where( - (projects.c.prj_owner == user_id) - | sa.text( - f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})" - ) - ) - - if search: - query = query.join( - users, users.c.id == projects.c.prj_owner, isouter=True - ) - query = query.where( - (projects.c.name.ilike(f"%{search}%")) - | (projects.c.description.ilike(f"%{search}%")) - | (projects.c.uuid.ilike(f"%{search}%")) - | (users.c.name.ilike(f"%{search}%")) - ) - - if order_by.direction == OrderDirection.ASC: - query = query.order_by(sa.asc(getattr(projects.c, order_by.field))) - else: - query = query.order_by(sa.desc(getattr(projects.c, order_by.field))) - - # page meta - total_number_of_projects = await conn.scalar( - query.with_only_columns(func.count()).order_by(None) - ) - assert total_number_of_projects is not None # nosec - - # page data - prjs, prj_types = await self._execute_without_permission_check( - conn, - user_id=user_id, - select_projects_query=query.offset(offset).limit(limit), - filter_by_services=filter_by_services, - ) - - return ( - prjs, - prj_types, - total_number_of_projects, - ) - - async def list_projects_full_search( - self, - *, - user_id: PositiveInt, - product_name: ProductName, - filter_by_services: list[dict] | None = None, - text: str | None = None, - offset: int | None = 0, - limit: int | None = None, - tag_ids_list: list[int], - order_by: OrderBy = OrderBy( - field=IDStr("last_change_date"), direction=OrderDirection.DESC - ), - ) -> tuple[list[dict[str, Any]], list[ProjectType], int]: async with self.engine.acquire() as conn: user_groups: list[RowProxy] = await self._list_user_groups(conn, user_id) @@ -552,124 +406,212 @@ async def list_projects_full_search( ).group_by(projects_tags.c.project_id) ).subquery("project_tags_subquery") - private_workspace_query = ( - sa.select( - *[ - col - for col in projects.columns - if col.name not in ["access_rights"] - ], - self.access_rights_subquery.c.access_rights, - projects_to_products.c.product_name, - projects_to_folders.c.folder_id, - sa.func.coalesce( - project_tags_subquery.c.tags, - sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)), - ).label("tags"), + ### + # Private workspace query + ### + + if workspace_query.workspace_scope is not WorkspaceScope.SHARED: + assert workspace_query.workspace_scope in ( # nosec + WorkspaceScope.PRIVATE, + WorkspaceScope.ALL, ) - .select_from( - projects.join(self.access_rights_subquery, isouter=True) - .join(projects_to_products) - .join( - projects_to_folders, + + private_workspace_query = ( + sa.select( + *[ + col + for col in projects.columns + if col.name not in ["access_rights"] + ], + self.access_rights_subquery.c.access_rights, + projects_to_products.c.product_name, + projects_to_folders.c.folder_id, + sa.func.coalesce( + project_tags_subquery.c.tags, + sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)), + ).label("tags"), + ) + .select_from( + projects.join(self.access_rights_subquery, isouter=True) + .join(projects_to_products) + .join( + projects_to_folders, + ( + (projects_to_folders.c.project_uuid == projects.c.uuid) + & (projects_to_folders.c.user_id == user_id) + ), + isouter=True, + ) + .join(project_tags_subquery, isouter=True) + ) + .where( ( - (projects_to_folders.c.project_uuid == projects.c.uuid) - & (projects_to_folders.c.user_id == user_id) - ), - isouter=True, + (projects.c.prj_owner == user_id) + | sa.text( + f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})" + ) + ) + & (projects.c.workspace_id.is_(None)) # <-- Private workspace + & (projects_to_products.c.product_name == product_name) ) - .join(project_tags_subquery, isouter=True) ) - .where( - ( - (projects.c.prj_owner == user_id) - | sa.text( - f"jsonb_exists_any(access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})" + if filter_by_text is not None: + private_workspace_query = private_workspace_query.join( + users, users.c.id == projects.c.prj_owner, isouter=True + ) + else: + private_workspace_query = None + + ### + # Shared workspace query + ### + + if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE: + assert workspace_query.workspace_scope in ( + WorkspaceScope.SHARED, + WorkspaceScope.ALL, + ) # nosec + + shared_workspace_query = ( + sa.select( + *[ + col + for col in projects.columns + if col.name not in ["access_rights"] + ], + workspace_access_rights_subquery.c.access_rights, + projects_to_products.c.product_name, + projects_to_folders.c.folder_id, + sa.func.coalesce( + project_tags_subquery.c.tags, + sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)), + ).label("tags"), + ) + .select_from( + projects.join( + workspace_access_rights_subquery, + projects.c.workspace_id + == workspace_access_rights_subquery.c.workspace_id, ) + .join(projects_to_products) + .join( + projects_to_folders, + ( + (projects_to_folders.c.project_uuid == projects.c.uuid) + & (projects_to_folders.c.user_id.is_(None)) + ), + isouter=True, + ) + .join(project_tags_subquery, isouter=True) ) - & (projects.c.workspace_id.is_(None)) - & (projects_to_products.c.product_name == product_name) - & (projects.c.hidden.is_(False)) - & (projects.c.type == ProjectType.STANDARD) - & ( - (projects.c.name.ilike(f"%{text}%")) - | (projects.c.description.ilike(f"%{text}%")) - | (projects.c.uuid.ilike(f"%{text}%")) + .where( + ( + sa.text( + f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})" + ) + ) + & (projects_to_products.c.product_name == product_name) ) ) - ) - - if tag_ids_list: - private_workspace_query = private_workspace_query.where( - sa.func.coalesce( - project_tags_subquery.c.tags, - sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)), - ).op("@>")(tag_ids_list) - ) + if workspace_query.workspace_scope == WorkspaceScope.ALL: + shared_workspace_query = shared_workspace_query.where( + projects.c.workspace_id.is_not( + None + ) # <-- All shared workspaces + ) + if filter_by_text is not None: + shared_workspace_query = shared_workspace_query.join( + users, users.c.id == projects.c.prj_owner, isouter=True + ) - shared_workspace_query = ( - sa.select( - *[ - col - for col in projects.columns - if col.name not in ["access_rights"] - ], - workspace_access_rights_subquery.c.access_rights, - projects_to_products.c.product_name, - projects_to_folders.c.folder_id, - sa.func.coalesce( - project_tags_subquery.c.tags, - sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)), - ).label("tags"), - ) - .select_from( - projects.join( - workspace_access_rights_subquery, + else: + assert ( + workspace_query.workspace_scope == WorkspaceScope.SHARED + ) # nosec + shared_workspace_query = shared_workspace_query.where( projects.c.workspace_id - == workspace_access_rights_subquery.c.workspace_id, - ) - .join(projects_to_products) - .join( - projects_to_folders, - ( - (projects_to_folders.c.project_uuid == projects.c.uuid) - & (projects_to_folders.c.user_id.is_(None)) - ), - isouter=True, + == workspace_query.workspace_id # <-- Specific shared workspace ) - .join(project_tags_subquery, isouter=True) + + else: + shared_workspace_query = None + + ### + # Attributes Filters + ### + + attributes_filters: list[ColumnElement] = [] + if filter_by_project_type is not None: + attributes_filters.append( + projects.c.type == filter_by_project_type.value ) - .where( + + if filter_hidden is not None: + attributes_filters.append(projects.c.hidden.is_(filter_hidden)) + + if filter_published is not None: + attributes_filters.append(projects.c.published.is_(filter_published)) + + if filter_trashed is not None: + attributes_filters.append( + # marked explicitly as trashed ( - sa.text( - f"jsonb_exists_any(workspace_access_rights_subquery.access_rights, {assemble_array_groups(user_groups)})" - ) - ) - & (projects.c.workspace_id.is_not(None)) - & (projects_to_products.c.product_name == product_name) - & (projects.c.hidden.is_(False)) - & (projects.c.type == ProjectType.STANDARD) - & ( - (projects.c.name.ilike(f"%{text}%")) - | (projects.c.description.ilike(f"%{text}%")) - | (projects.c.uuid.ilike(f"%{text}%")) + projects.c.trashed_at.is_not(None) + & projects.c.trashed_explicitly.is_(True) ) + if filter_trashed + # not marked as trashed + else projects.c.trashed_at.is_(None) ) - ) - - if tag_ids_list: - shared_workspace_query = shared_workspace_query.where( + if filter_by_text is not None: + attributes_filters.append( + (projects.c.name.ilike(f"%{filter_by_text}%")) + | (projects.c.description.ilike(f"%{filter_by_text}%")) + | (projects.c.uuid.ilike(f"%{filter_by_text}%")) + | (users.c.name.ilike(f"%{filter_by_text}%")) + ) + if filter_tag_ids_list: + attributes_filters.append( sa.func.coalesce( project_tags_subquery.c.tags, sa.cast(sa.text("'{}'"), sa.ARRAY(sa.Integer)), - ).op("@>")(tag_ids_list) + ).op("@>")(filter_tag_ids_list) + ) + if folder_query.folder_scope is not FolderScope.ALL: + if folder_query.folder_scope == FolderScope.SPECIFIC: + attributes_filters.append( + projects_to_folders.c.folder_id == folder_query.folder_id + ) + else: + assert folder_query.folder_scope == FolderScope.ROOT # nosec + attributes_filters.append(projects_to_folders.c.folder_id.is_(None)) + + ### + # Combined + ### + + combined_query: CompoundSelect | Select | None = None + if ( + private_workspace_query is not None + and shared_workspace_query is not None + ): + combined_query = sa.union_all( + private_workspace_query.where(sa.and_(*attributes_filters)), + shared_workspace_query.where(sa.and_(*attributes_filters)), + ) + elif private_workspace_query is not None: + combined_query = private_workspace_query.where( + sa.and_(*attributes_filters) + ) + elif shared_workspace_query is not None: + combined_query = shared_workspace_query.where( + sa.and_(*attributes_filters) ) - combined_query = sa.union_all( - private_workspace_query, shared_workspace_query - ) - - count_query = sa.select(func.count()).select_from(combined_query) + if combined_query is None: + msg = f"No valid queries were provided to combine. Workspace scope: {workspace_query.workspace_scope}" + raise ValueError(msg) + count_query = sa.select(func.count()).select_from(combined_query.subquery()) total_count = await conn.scalar(count_query) if order_by.direction == OrderDirection.ASC: diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py index 8904cead4bf..3cda6804797 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_crud_handlers.py @@ -364,9 +364,14 @@ async def test_list_projects_with_innaccessible_services( data, *_ = await _list_and_assert_projects( client, expected, headers=s4l_product_headers ) - assert len(data) == 2 + # UPDATE (use-case 4): 11.11.2024 - This test was checking backwards compatibility for listing + # projects that were not in the projects_to_products table. After refactoring the project listing, + # we no longer support this. MD double-checked the last_modified_timestamp on projects + # that do not have any product assigned (all of them were before 01-11-2022 with the exception of two + # `4b001ad2-8450-11ec-b105-02420a0b02c7` and `d952cbf4-d838-11ec-af92-02420a0bdad4` which were added to osparc product). + assert len(data) == 0 data, *_ = await _list_and_assert_projects(client, expected) - assert len(data) == 2 + assert len(data) == 0 @pytest.mark.parametrize( From 2af7f218f196141a18fef4bf6ef75f0248bd7857 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Mon, 11 Nov 2024 14:28:22 +0100 Subject: [PATCH 06/22] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20UX:=20New=20W?= =?UTF-8?q?orkspace=20and=20New=20Organization=20(#6699)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../osparc/dashboard/ResourceBrowserBase.js | 6 +- .../class/osparc/dashboard/ResourceDetails.js | 2 +- .../class/osparc/dashboard/StudyBrowser.js | 17 +- .../osparc/dashboard/StudyBrowserHeader.js | 6 +- .../osparc/dashboard/WorkspaceButtonItem.js | 2 +- .../osparc/dashboard/WorkspaceButtonNew.js | 29 +-- .../dashboard/WorkspacesAndFoldersTree.js | 2 +- .../organizations/OrganizationDetails.js | 12 +- .../organizations/OrganizationsList.js | 26 ++- .../class/osparc/editor/OrganizationEditor.js | 34 +++- .../class/osparc/editor/WorkspaceEditor.js | 170 +++++++++++------- .../source/class/osparc/store/Workspaces.js | 4 + .../source/class/osparc/study/StudyOptions.js | 69 ++++--- .../client/source/class/osparc/study/Utils.js | 5 +- .../client/source/class/osparc/utils/Utils.js | 7 +- .../resource/osparc/tours/s4l_tours.json | 4 +- 16 files changed, 231 insertions(+), 164 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index 9334861f11c..31524310535 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -116,7 +116,11 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { }; osparc.data.Resources.fetch("studies", "getWallet", params) .then(wallet => { - if (isStudyCreation || wallet === null || osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null) { + if ( + isStudyCreation || + wallet === null || + osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null + ) { // pop up study options if the study was just created or if it has no wallet assigned or user has no access to it const resourceSelector = new osparc.study.StudyOptions(studyId); const win = osparc.study.StudyOptions.popUpInWindow(resourceSelector); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js index a1ae4d742fa..76e9f628829 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceDetails.js @@ -364,7 +364,7 @@ qx.Class.define("osparc.dashboard.ResourceDetails", { const resourceData = this.__resourceData; if (osparc.utils.Resources.isStudy(resourceData)) { const id = "Billing"; - const title = this.tr("Billing Settings"); + const title = this.tr("Tier Settings"); const iconSrc = "@FontAwesome5Solid/cogs/22"; const page = this.__billingSettings = new osparc.dashboard.resources.pages.BasePage(title, iconSrc, id); this.__addOpenButton(page); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 7349d7d46b5..288290b06df 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -374,12 +374,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { newWorkspaceCard.setCardKey("new-workspace"); newWorkspaceCard.subscribeToFilterGroup("searchBarFilter"); [ - "createWorkspace", - "updateWorkspace" + "workspaceCreated", + "workspaceDeleted", + "workspaceUpdated", ].forEach(e => { - newWorkspaceCard.addListener(e, () => { - this.__reloadWorkspaces(); - }); + newWorkspaceCard.addListener(e, () => this.__reloadWorkspaces()); }); this._resourcesContainer.addNewWorkspaceCard(newWorkspaceCard); }, @@ -1170,7 +1169,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __newStudyBtnClicked: function(button) { button.setValue(false); const minStudyData = osparc.data.model.Study.createMinStudyObject(); - const title = osparc.utils.Utils.getUniqueStudyName(minStudyData.name, this._resourcesList); + const existingNames = this._resourcesList.map(study => study["name"]); + const title = osparc.utils.Utils.getUniqueName(minStudyData.name, existingNames); minStudyData["name"] = title; minStudyData["workspaceId"] = this.getCurrentWorkspaceId(); minStudyData["folderId"] = this.getCurrentFolderId(); @@ -1190,7 +1190,8 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __newPlanBtnClicked: function(templateData, newStudyName) { // do not override cached template data const templateCopyData = osparc.utils.Utils.deepCloneObject(templateData); - const title = osparc.utils.Utils.getUniqueStudyName(newStudyName, this._resourcesList); + const existingNames = this._resourcesList.map(study => study["name"]); + const title = osparc.utils.Utils.getUniqueName(newStudyName, existingNames); templateCopyData.name = title; this._showLoadingPage(this.tr("Creating ") + (newStudyName || osparc.product.Utils.getStudyAlias())); const contextProps = { @@ -1411,7 +1412,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, __getBillingMenuButton: function(card) { - const text = osparc.utils.Utils.capitalize(this.tr("Billing Settings...")); + const text = osparc.utils.Utils.capitalize(this.tr("Tier Settings...")); const studyBillingSettingsButton = new qx.ui.menu.Button(text); studyBillingSettingsButton["billingSettingsButton"] = true; studyBillingSettingsButton.addListener("tap", () => card.openBilling(), this); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js index 9e2ca51b434..87a6a366b58 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowserHeader.js @@ -339,10 +339,10 @@ qx.Class.define("osparc.dashboard.StudyBrowserHeader", { __editWorkspace: function() { const workspace = osparc.store.Workspaces.getInstance().getWorkspace(this.getCurrentWorkspaceId()); - const permissionsView = new osparc.editor.WorkspaceEditor(workspace); + const workspaceEditor = new osparc.editor.WorkspaceEditor(workspace); const title = this.tr("Edit Workspace"); - const win = osparc.ui.window.Window.popUpInWindow(permissionsView, title, 300, 200); - permissionsView.addListener("workspaceUpdated", () => { + const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 150); + workspaceEditor.addListener("workspaceUpdated", () => { win.close(); this.__buildLayout(); }, this); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js index 5581ec3212b..4d5253410bf 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonItem.js @@ -185,7 +185,7 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonItem", { const workspace = this.getWorkspace(); const workspaceEditor = new osparc.editor.WorkspaceEditor(workspace); const title = this.tr("Edit Workspace"); - const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 200); + const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 300, 150); workspaceEditor.addListener("workspaceUpdated", () => { win.close(); this.fireDataEvent("workspaceUpdated", workspace.getWorkspaceId()); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js index fc1526b387d..ac87579355e 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspaceButtonNew.js @@ -46,26 +46,29 @@ qx.Class.define("osparc.dashboard.WorkspaceButtonNew", { }, events: { - "createWorkspace": "qx.event.type.Data", - "updateWorkspace": "qx.event.type.Data" + "workspaceCreated": "qx.event.type.Event", + "workspaceDeleted": "qx.event.type.Event", + "workspaceUpdated": "qx.event.type.Event", }, members: { __itemSelected: function(newVal) { if (newVal) { - const workspaceCreator = new osparc.editor.WorkspaceEditor(); + const workspaceEditor = new osparc.editor.WorkspaceEditor(); const title = this.tr("New Workspace"); - const win = osparc.ui.window.Window.popUpInWindow(workspaceCreator, title, 300, 200); - workspaceCreator.addListener("workspaceCreated", e => { - win.close(); - const newWorkspace = e.getData(); - this.fireDataEvent("createWorkspace", newWorkspace.getWorkspaceId(), this); - const permissionsView = new osparc.share.CollaboratorsWorkspace(newWorkspace); - const title2 = qx.locale.Manager.tr("Share Workspace"); - osparc.ui.window.Window.popUpInWindow(permissionsView, title2, 500, 500); - permissionsView.addListener("updateAccessRights", () => this.fireDataEvent("updateWorkspace", newWorkspace.getWorkspaceId()), this); + const win = osparc.ui.window.Window.popUpInWindow(workspaceEditor, title, 500, 500).set({ + modal: true, + clickAwayClose: false, }); - workspaceCreator.addListener("cancel", () => win.close()); + workspaceEditor.addListener("workspaceCreated", () => this.fireEvent("workspaceCreated")); + workspaceEditor.addListener("workspaceDeleted", () => this.fireEvent("workspaceDeleted")); + workspaceEditor.addListener("workspaceUpdated", () => { + win.close(); + this.fireEvent("workspaceUpdated"); + }, this); + workspaceEditor.addListener("updateAccessRights", () => this.fireEvent("workspaceUpdated")); + win.getChildControl("close-button").addListener("tap", () => workspaceEditor.cancel()); + workspaceEditor.addListener("cancel", () => win.close()); } this.setValue(false); } diff --git a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js index c65318bfcd3..7f35c3ff320 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/WorkspacesAndFoldersTree.js @@ -300,7 +300,7 @@ qx.Class.define("osparc.dashboard.WorkspacesAndFoldersTree", { if (oldParentFolderId === undefined) { // it was removed, not moved // remove it from the cached models - const modelFound = this.__getModel(folder.getWorkspaceId(), folder.getParentFolderId()); + const modelFound = this.__getModel(folder.getWorkspaceId(), folder.getFolderId()); if (modelFound) { const index = this.__models.indexOf(modelFound); if (index > -1) { // only splice array when item is found diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js index 6871348d8a0..c9d0501c0cd 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationDetails.js @@ -94,17 +94,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationDetails", { __openEditOrganization: function() { const org = this.__orgModel; - - const newOrg = false; - const orgEditor = new osparc.editor.OrganizationEditor(newOrg); - org.bind("gid", orgEditor, "gid"); - org.bind("label", orgEditor, "label"); - org.bind("description", orgEditor, "description"); - org.bind("thumbnail", orgEditor, "thumbnail", { - converter: val => val ? val : "" - }); const title = this.tr("Organization Details Editor"); - const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250); + const orgEditor = new osparc.editor.OrganizationEditor(org); + const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200); orgEditor.addListener("updateOrg", () => { this.__updateOrganization(win, orgEditor.getChildControl("save"), orgEditor); }); diff --git a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js index 740f54211fa..c2f8656ed83 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js +++ b/services/static-webserver/client/source/class/osparc/desktop/organizations/OrganizationsList.js @@ -99,10 +99,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { allowGrowX: false }); createOrgBtn.addListener("execute", function() { - const newOrg = true; - const orgEditor = new osparc.editor.OrganizationEditor(newOrg); const title = this.tr("New Organization"); - const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250); + const orgEditor = new osparc.editor.OrganizationEditor(); + const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200); orgEditor.addListener("createOrg", () => { this.__createOrganization(win, orgEditor.getChildControl("create"), orgEditor); }); @@ -176,7 +175,7 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { } }, - reloadOrganizations: function() { + reloadOrganizations: function(orgId) { this.__orgsUIList.resetSelection(); const orgsModel = this.__orgsModel; orgsModel.removeAll(); @@ -199,6 +198,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { orgsList.sort(this.self().sortOrganizations); orgsList.forEach(org => orgsModel.append(qx.data.marshal.Json.createModel(org))); this.setOrganizationsLoaded(true); + if (orgId) { + this.fireDataEvent("organizationSelected", orgId); + } }); }, @@ -208,16 +210,9 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { return; } - const newOrg = false; - const orgEditor = new osparc.editor.OrganizationEditor(newOrg); - org.bind("gid", orgEditor, "gid"); - org.bind("label", orgEditor, "label"); - org.bind("description", orgEditor, "description"); - org.bind("thumbnail", orgEditor, "thumbnail", { - converter: val => val ? val : "" - }); const title = this.tr("Organization Details Editor"); - const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 250); + const orgEditor = new osparc.editor.OrganizationEditor(org); + const win = osparc.ui.window.Window.popUpInWindow(orgEditor, title, 400, 200); orgEditor.addListener("updateOrg", () => { this.__updateOrganization(win, orgEditor.getChildControl("save"), orgEditor); }); @@ -287,14 +282,15 @@ qx.Class.define("osparc.desktop.organizations.OrganizationsList", { } }; osparc.data.Resources.fetch("organizations", "post", params) - .then(() => { + .then(org => { osparc.FlashMessenger.getInstance().logAs(name + this.tr(" successfully created")); button.setFetching(false); osparc.store.Store.getInstance().reset("organizations"); // reload "profile", "organizations" are part of the information in this endpoint osparc.data.Resources.getOne("profile", {}, null, false) .then(() => { - this.reloadOrganizations(); + // open it + this.reloadOrganizations(org["gid"]); }); }) .catch(err => { diff --git a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js index f4be5233d2f..b528e760c01 100644 --- a/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/OrganizationEditor.js @@ -18,7 +18,7 @@ qx.Class.define("osparc.editor.OrganizationEditor", { extend: qx.ui.core.Widget, - construct: function(newOrg = true) { + construct: function(organization) { this.base(arguments); this._setLayout(new qx.ui.layout.VBox(8)); @@ -29,7 +29,27 @@ qx.Class.define("osparc.editor.OrganizationEditor", { manager.add(title); this.getChildControl("description"); this.getChildControl("thumbnail"); - newOrg ? this.getChildControl("create") : this.getChildControl("save"); + organization ? this.getChildControl("save") : this.getChildControl("create"); + + if (organization) { + organization.bind("gid", this, "gid"); + organization.bind("label", this, "label"); + organization.bind("description", this, "description"); + organization.bind("thumbnail", this, "thumbnail", { + converter: val => val ? val : "" + }); + } else { + osparc.store.Store.getInstance().getGroupsOrganizations() + .then(orgs => { + const existingNames = orgs.map(org => org["label"]); + const defaultName = osparc.utils.Utils.getUniqueName("New Organization", existingNames) + title.setValue(defaultName); + }) + .catch(err => { + console.error(err); + title.setValue("New Organization"); + }); + } this.addListener("appear", () => { title.focus(); @@ -82,7 +102,7 @@ qx.Class.define("osparc.editor.OrganizationEditor", { font: "text-14", backgroundColor: "background-main", placeholder: this.tr("Title"), - height: 35 + height: 30, }); this.bind("label", control, "value"); control.bind("value", this, "label"); @@ -90,12 +110,10 @@ qx.Class.define("osparc.editor.OrganizationEditor", { break; } case "description": { - control = new qx.ui.form.TextArea().set({ + control = new qx.ui.form.TextField().set({ font: "text-14", placeholder: this.tr("Description"), - autoSize: true, - minHeight: 70, - maxHeight: 140 + height: 30, }); this.bind("description", control, "value"); control.bind("value", this, "description"); @@ -106,7 +124,7 @@ qx.Class.define("osparc.editor.OrganizationEditor", { control = new qx.ui.form.TextField().set({ font: "text-14", placeholder: this.tr("Thumbnail"), - height: 35 + height: 30, }); this.bind("thumbnail", control, "value"); control.bind("value", this, "thumbnail"); diff --git a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js index 6b89ee2af78..dab5a9807c3 100644 --- a/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js +++ b/services/static-webserver/client/source/class/osparc/editor/WorkspaceEditor.js @@ -33,20 +33,33 @@ qx.Class.define("osparc.editor.WorkspaceEditor", { manager.add(title); this.getChildControl("description"); this.getChildControl("thumbnail"); - workspace ? this.getChildControl("save") : this.getChildControl("create"); + this.getChildControl("cancel"); + this.getChildControl("save"); if (workspace) { - this.__workspaceId = workspace.getWorkspaceId(); - this.set({ - label: workspace.getName(), - description: workspace.getDescription(), - thumbnail: workspace.getThumbnail(), - }); + // editing + this.setWorkspace(workspace); + } else { + // creating + this.__creatingWorkspace = true; + this.__createWorkspace() + .then(newWorkspace => { + this.setWorkspace(newWorkspace); + this.fireDataEvent("workspaceCreated"); + this.getChildControl("sharing"); + }); } this.addListener("appear", this.__onAppear, this); }, properties: { + workspace: { + check: "osparc.data.model.Workspace", + init: null, + nullable: false, + apply: "__applyWorkspace" + }, + label: { check: "String", init: "", @@ -70,13 +83,26 @@ qx.Class.define("osparc.editor.WorkspaceEditor", { }, events: { - "workspaceCreated": "qx.event.type.Data", + "workspaceCreated": "qx.event.type.Event", + "workspaceDeleted": "qx.event.type.Event", "workspaceUpdated": "qx.event.type.Event", + "updateAccessRights": "qx.event.type.Event", "cancel": "qx.event.type.Event" }, + statics: { + POS: { + INTRO: 0, + TITLE: 1, + DESCRIPTION: 2, + THUMBNAIL: 3, + SHARING: 4, + BUTTONS: 5, + } + }, + members: { - __workspaceId: null, + __creatingWorkspace: null, _createChildControlImpl: function(id) { let control; @@ -89,7 +115,7 @@ qx.Class.define("osparc.editor.WorkspaceEditor", { rich: true, wrap: true }); - this._add(control); + this._addAt(control, this.self().POS.INTRO); break; } case "title": { @@ -97,71 +123,64 @@ qx.Class.define("osparc.editor.WorkspaceEditor", { font: "text-14", backgroundColor: "background-main", placeholder: this.tr("Title"), - minHeight: 27 + height: 30, }); this.bind("label", control, "value"); control.bind("value", this, "label"); - this._add(control); + this._addAt(control, this.self().POS.TITLE); break; } case "description": { - control = new qx.ui.form.TextArea().set({ + control = new qx.ui.form.TextField().set({ font: "text-14", placeholder: this.tr("Description"), - autoSize: true, - minHeight: 70, + height: 30, }); this.bind("description", control, "value"); control.bind("value", this, "description"); - this._add(control); + this._addAt(control, this.self().POS.DESCRIPTION); break; } case "thumbnail": { control = new qx.ui.form.TextField().set({ font: "text-14", placeholder: this.tr("Thumbnail"), + height: 30, }); this.bind("thumbnail", control, "value"); control.bind("value", this, "thumbnail"); - this._add(control); + this._addAt(control, this.self().POS.THUMBNAIL); break; } - case "create": { - const buttons = this.getChildControl("buttonsLayout"); - control = new osparc.ui.form.FetchButton(this.tr("Create")).set({ - appearance: "form-button" - }); - control.addListener("execute", () => { - if (this.__validator.validate()) { - this.__createWorkspace(control); - } - }, this); - buttons.addAt(control, 1); + case "sharing": { + control = new osparc.share.CollaboratorsWorkspace(this.getWorkspace()); + control.addListener("updateAccessRights", () => this.fireDataEvent("updateAccessRights", this.getWorkspace().getWorkspaceId()), this); + this._addAt(control, this.self().POS.SHARING); + break; + } + case "buttons-layout": { + control = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({ + alignX: "right" + })); + this._addAt(control, this.self().POS.BUTTONS); break; } case "save": { - const buttons = this.getChildControl("buttonsLayout"); + const buttons = this.getChildControl("buttons-layout"); control = new osparc.ui.form.FetchButton(this.tr("Save")).set({ appearance: "form-button" }); - control.addListener("execute", () => { - if (this.__validator.validate()) { - this.__editWorkspace(control); - } - }, this); + control.addListener("execute", () => this.__saveWorkspace(control), this); buttons.addAt(control, 1); break; } - case "buttonsLayout": { - control = new qx.ui.container.Composite(new qx.ui.layout.HBox(8).set({ - alignX: "right" - })); - const cancelButton = new qx.ui.form.Button(this.tr("Cancel")).set({ + case "cancel": { + const buttons = this.getChildControl("buttons-layout"); + control = new qx.ui.form.Button(this.tr("Cancel")).set({ appearance: "form-button-text" }); - cancelButton.addListener("execute", () => this.fireEvent("cancel"), this); - control.addAt(cancelButton, 0); - this._add(control); + control.addListener("execute", () => this.cancel(), this); + buttons.addAt(control, 0); break; } } @@ -169,36 +188,55 @@ qx.Class.define("osparc.editor.WorkspaceEditor", { return control || this.base(arguments, id); }, - __createWorkspace: function(createButton) { - createButton.setFetching(true); + __applyWorkspace: function(workspace) { + this.set({ + label: workspace.getName(), + description: workspace.getDescription(), + thumbnail: workspace.getThumbnail(), + }); + }, + + __createWorkspace: function() { + const workspaceStore = osparc.store.Workspaces.getInstance(); + const workspaces = workspaceStore.getWorkspaces(); + const existingNames = workspaces.map(workspace => workspace.getName()); + const defaultName = osparc.utils.Utils.getUniqueName("New Workspace", existingNames) const newWorkspaceData = { - name: this.getLabel(), + name: this.getLabel() || defaultName, description: this.getDescription(), thumbnail: this.getThumbnail(), }; - osparc.store.Workspaces.getInstance().postWorkspace(newWorkspaceData) - .then(newWorkspace => this.fireDataEvent("workspaceCreated", newWorkspace)) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) - .finally(() => createButton.setFetching(false)); + return workspaceStore.postWorkspace(newWorkspaceData) }, - __editWorkspace: function(editButton) { - editButton.setFetching(true); - const updateData = { - name: this.getLabel(), - description: this.getDescription(), - thumbnail: this.getThumbnail(), - }; - osparc.store.Workspaces.getInstance().putWorkspace(this.__workspaceId, updateData) - .then(() => this.fireEvent("workspaceUpdated")) - .catch(err => { - console.error(err); - osparc.FlashMessenger.logAs(err.message, "ERROR"); - }) - .finally(() => editButton.setFetching(false)); + __saveWorkspace: function(editButton) { + if (this.__validator.validate()) { + editButton.setFetching(true); + const updateData = { + name: this.getLabel(), + description: this.getDescription(), + thumbnail: this.getThumbnail(), + }; + osparc.store.Workspaces.getInstance().putWorkspace(this.getWorkspace().getWorkspaceId(), updateData) + .then(() => this.fireEvent("workspaceUpdated")) + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + }) + .finally(() => editButton.setFetching(false)); + } + }, + + cancel: function() { + if (this.__creatingWorkspace) { + osparc.store.Workspaces.getInstance().deleteWorkspace(this.getWorkspace().getWorkspaceId()) + .then(() => this.fireEvent("workspaceDeleted")) + .catch(err => { + console.error(err); + osparc.FlashMessenger.logAs(err.message, "ERROR"); + }); + } + this.fireEvent("cancel"); }, __onAppear: function() { diff --git a/services/static-webserver/client/source/class/osparc/store/Workspaces.js b/services/static-webserver/client/source/class/osparc/store/Workspaces.js index 8d803de0af5..253ac714a1d 100644 --- a/services/static-webserver/client/source/class/osparc/store/Workspaces.js +++ b/services/static-webserver/client/source/class/osparc/store/Workspaces.js @@ -197,6 +197,10 @@ qx.Class.define("osparc.store.Workspaces", { return this.workspacesCached.find(w => w.getWorkspaceId() === workspaceId); }, + getWorkspaces: function() { + return this.workspacesCached; + }, + __addToCache: function(workspace) { const found = this.workspacesCached.find(w => w.getWorkspaceId() === workspace.getWorkspaceId()); if (!found) { diff --git a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js index 54ba001d6d6..9922ec017e3 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js @@ -23,28 +23,17 @@ qx.Class.define("osparc.study.StudyOptions", { this._setLayout(new qx.ui.layout.VBox(15)); - this.__studyId = studyId; - - const params = { - url: { - studyId - } - }; - Promise.all([ - osparc.data.Resources.getOne("studies", params), - osparc.data.Resources.fetch("studies", "getWallet", params) - ]) - .then(values => { - const studyData = values[0]; - this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData); - if (values[1] && "walletId" in values[1]) { - this.__projectWalletId = values[1]["walletId"]; - } - this.__buildLayout(); - }); + this.setStudyId(studyId); }, properties: { + studyId: { + check: "String", + init: null, + nullable: false, + apply: "__fetchStudy" + }, + wallet: { check: "osparc.data.model.Wallet", init: null, @@ -93,9 +82,8 @@ qx.Class.define("osparc.study.StudyOptions", { }, members: { - __studyId: null, __studyData: null, - __projectWalletId: null, + __studyWalletId: null, _createChildControlImpl: function(id) { let control; @@ -105,7 +93,7 @@ qx.Class.define("osparc.study.StudyOptions", { this._addAt(control, 0); break; case "title-field": - control = new qx.ui.form.TextField(this.__studyData["name"]).set({ + control = new qx.ui.form.TextField().set({ maxWidth: 220 }); this.getChildControl("title-layout").add(control); @@ -192,6 +180,27 @@ qx.Class.define("osparc.study.StudyOptions", { return control || this.base(arguments, id); }, + __fetchStudy: function(studyId) { + const params = { + url: { + studyId + } + }; + Promise.all([ + osparc.data.Resources.getOne("studies", params), + osparc.data.Resources.fetch("studies", "getWallet", params) + ]) + .then(values => { + const studyData = values[0]; + this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData); + + if (values[1] && "walletId" in values[1]) { + this.__studyWalletId = values[1]["walletId"]; + } + this.__buildLayout(); + }); + }, + __applyWallet: function(wallet) { if (wallet) { const walletSelector = this.getChildControl("wallet-selector"); @@ -214,15 +223,16 @@ qx.Class.define("osparc.study.StudyOptions", { __buildTopSummaryLayout: function() { const store = osparc.store.Store.getInstance(); - this._createChildControlImpl("title-label"); const titleField = this.getChildControl("title-field"); + if (this.__studyData) { + titleField.setValue(this.__studyData["name"]); + } titleField.addListener("appear", () => { titleField.focus(); titleField.activate(); }); // Wallet Selector - this._createChildControlImpl("wallet-selector-label"); const walletSelector = this.getChildControl("wallet-selector"); const wallets = store.getWallets(); @@ -241,8 +251,8 @@ qx.Class.define("osparc.study.StudyOptions", { } }); const preferredWallet = store.getPreferredWallet(); - if (wallets.find(wallet => wallet.getWalletId() === parseInt(this.__projectWalletId))) { - selectWallet(this.__projectWalletId); + if (wallets.find(wallet => wallet.getWalletId() === parseInt(this.__studyWalletId))) { + selectWallet(this.__studyWalletId); } else if (preferredWallet) { selectWallet(preferredWallet.getWalletId()); } else if (!osparc.desktop.credits.Utils.autoSelectActiveWallet(walletSelector)) { @@ -283,17 +293,18 @@ qx.Class.define("osparc.study.StudyOptions", { // first, update the name if necessary const titleSelection = this.getChildControl("title-field").getValue(); - if (this.__studyData["name"] !== titleSelection) { + if (this.__studyData && this.__studyData["name"] !== titleSelection) { await this.__updateName(this.__studyData, titleSelection); } // second, update the wallet if necessary const store = osparc.store.Store.getInstance(); const walletSelection = this.getChildControl("wallet-selector").getSelection(); - if (walletSelection.length && walletSelection[0]["walletId"]) { + const studyId = this.getStudyId(); + if (studyId && walletSelection.length && walletSelection[0]["walletId"]) { const params = { url: { - "studyId": this.__studyData["uuid"], + studyId, "walletId": walletSelection[0]["walletId"] } }; diff --git a/services/static-webserver/client/source/class/osparc/study/Utils.js b/services/static-webserver/client/source/class/osparc/study/Utils.js index dab2bd53bd8..0240d263e47 100644 --- a/services/static-webserver/client/source/class/osparc/study/Utils.js +++ b/services/static-webserver/client/source/class/osparc/study/Utils.js @@ -116,7 +116,8 @@ qx.Class.define("osparc.study.Utils", { newStudyLabel = metadata["name"]; } if (existingStudies) { - const title = osparc.utils.Utils.getUniqueStudyName(newStudyLabel, existingStudies); + const existingNames = existingStudies.map(study => study["name"]); + const title = osparc.utils.Utils.getUniqueName(newStudyLabel, existingNames); minStudyData["name"] = title; } else { minStudyData["name"] = newStudyLabel; @@ -234,7 +235,7 @@ qx.Class.define("osparc.study.Utils", { // update task osparc.widget.ProgressSequence.updateTaskProgress(existingTask, { value: percent, - progressLabel: percent*100 + "%" + progressLabel: parseFloat((percent*100).toFixed(2)) + "%" }); } else { // new task diff --git a/services/static-webserver/client/source/class/osparc/utils/Utils.js b/services/static-webserver/client/source/class/osparc/utils/Utils.js index 5c751c2ee8f..b095d95eee2 100644 --- a/services/static-webserver/client/source/class/osparc/utils/Utils.js +++ b/services/static-webserver/client/source/class/osparc/utils/Utils.js @@ -277,12 +277,11 @@ qx.Class.define("osparc.utils.Utils", { return reloadButton; }, - getUniqueStudyName: function(preferredName, list) { + getUniqueName: function(preferredName, existingNames) { let title = preferredName; - const existingTitles = list.map(study => study.name); - if (existingTitles.includes(title)) { + if (existingNames.includes(title)) { let cont = 1; - while (existingTitles.includes(`${title} (${cont})`)) { + while (existingNames.includes(`${title} (${cont})`)) { cont++; } title += ` (${cont})`; diff --git a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json index cacb9ffb83d..492544fa598 100644 --- a/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json +++ b/services/static-webserver/client/source/resource/osparc/tours/s4l_tours.json @@ -7,7 +7,7 @@ "steps": [{ "anchorEl": "osparc-test-id=dashboardTabs", "title": "Dashboard Menu", - "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials, Services and Data.", + "text": "The menu tabs give you quick access to a set of core elements of the platform, namely Projects, Tutorials and Services.", "placement": "bottom" }, { "beforeClick": { @@ -28,7 +28,7 @@ "selector": "osparc-test-id=servicesTabBtn" }, "anchorEl": "osparc-test-id=servicesTabBtn", - "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Studies and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", + "text": "Every Project in Sim4Life is composed of at lease one so-called Service.
Services are building blocks for Projects and can provide data/files, visualize results (2D, 3D), implement code in Jupyter notebooks or perform computations to execute simulations within a Project.", "placement": "bottom" }] }, From 8f182d3c07fd261f56f0e915204f4acf50da8504 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Mon, 11 Nov 2024 18:59:50 +0100 Subject: [PATCH 07/22] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20Migrates=20folders?= =?UTF-8?q?=20and=20workspaces=20repositories=20to=20asyncpg=20(#6688)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../folders/_folders_api.py | 2 +- .../folders/_folders_db.py | 130 +++++++----------- .../folders/_trash_api.py | 70 +++++----- .../workspaces/_groups_db.py | 40 ++++-- .../workspaces/_workspaces_db.py | 54 +++++--- 5 files changed, 145 insertions(+), 151 deletions(-) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py index 0344124abb6..043527d2def 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py @@ -262,7 +262,7 @@ async def update_folder( folder_db = await folders_db.update( app, - folder_id=folder_id, + folders_id_or_ids=folder_id, name=name, parent_folder_id=parent_folder_id, product_name=product_name, diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py index 0ee44c17199..561bcb64c9e 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py @@ -19,11 +19,16 @@ from simcore_postgres_database.models.folders_v2 import folders_v2 from simcore_postgres_database.models.projects import projects from simcore_postgres_database.models.projects_to_folders import projects_to_folders +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) from sqlalchemy import func +from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.orm import aliased from sqlalchemy.sql import asc, desc, select -from ..db.plugin import get_database_engine +from ..db.plugin import get_asyncpg_engine from .errors import FolderAccessForbiddenError, FolderNotFoundError _logger = logging.getLogger(__name__) @@ -55,6 +60,7 @@ def as_dict_exclude_unset(**params) -> dict[str, Any]: async def create( app: web.Application, + connection: AsyncConnection | None = None, *, created_by_gid: GroupID, folder_name: str, @@ -67,8 +73,8 @@ async def create( user_id is not None and workspace_id is not None ), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one." - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( folders_v2.insert() .values( name=folder_name, @@ -88,6 +94,7 @@ async def create( async def list_( app: web.Application, + connection: AsyncConnection | None = None, *, content_of_folder_id: FolderID | None, user_id: UserID | None, @@ -142,18 +149,17 @@ async def list_( list_query = base_query.order_by(desc(getattr(folders_v2.c, order_by.field))) list_query = list_query.offset(offset).limit(limit) - async with get_database_engine(app).acquire() as conn: - count_result = await conn.execute(count_query) - total_count = await count_result.scalar() + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) - result = await conn.execute(list_query) - rows = await result.fetchall() or [] - results: list[FolderDB] = [FolderDB.from_orm(row) for row in rows] - return cast(int, total_count), results + result = await conn.stream(list_query) + folders: list[FolderDB] = [FolderDB.from_orm(row) async for row in result] + return cast(int, total_count), folders async def get( app: web.Application, + connection: AsyncConnection | None = None, *, folder_id: FolderID, product_name: ProductName, @@ -167,8 +173,8 @@ async def get( ) ) - async with get_database_engine(app).acquire() as conn: - result = await conn.execute(query) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(query) row = await result.first() if row is None: raise FolderAccessForbiddenError( @@ -179,6 +185,7 @@ async def get( async def get_for_user_or_workspace( app: web.Application, + connection: AsyncConnection | None = None, *, folder_id: FolderID, product_name: ProductName, @@ -203,8 +210,8 @@ async def get_for_user_or_workspace( else: query = query.where(folders_v2.c.workspace_id == workspace_id) - async with get_database_engine(app).acquire() as conn: - result = await conn.execute(query) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(query) row = await result.first() if row is None: raise FolderAccessForbiddenError( @@ -213,8 +220,10 @@ async def get_for_user_or_workspace( return FolderDB.from_orm(row) -async def _update_impl( +async def update( app: web.Application, + connection: AsyncConnection | None = None, + *, folders_id_or_ids: FolderID | set[FolderID], product_name: ProductName, # updatable columns @@ -247,64 +256,22 @@ async def _update_impl( # single-update query = query.where(folders_v2.c.folder_id == folders_id_or_ids) - async with get_database_engine(app).acquire() as conn: - result = await conn.execute(query) + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(query) row = await result.first() if row is None: raise FolderNotFoundError(reason=f"Folder {folders_id_or_ids} not found.") return FolderDB.from_orm(row) -async def update_batch( - app: web.Application, - *folder_id: FolderID, - product_name: ProductName, - # updatable columns - name: str | UnSet = _unset, - parent_folder_id: FolderID | None | UnSet = _unset, - trashed_at: datetime | None | UnSet = _unset, - trashed_explicitly: bool | UnSet = _unset, -) -> FolderDB: - return await _update_impl( - app=app, - folders_id_or_ids=set(folder_id), - product_name=product_name, - name=name, - parent_folder_id=parent_folder_id, - trashed_at=trashed_at, - trashed_explicitly=trashed_explicitly, - ) - - -async def update( - app: web.Application, - *, - folder_id: FolderID, - product_name: ProductName, - # updatable columns - name: str | UnSet = _unset, - parent_folder_id: FolderID | None | UnSet = _unset, - trashed_at: datetime | None | UnSet = _unset, - trashed_explicitly: bool | UnSet = _unset, -) -> FolderDB: - return await _update_impl( - app=app, - folders_id_or_ids=folder_id, - product_name=product_name, - name=name, - parent_folder_id=parent_folder_id, - trashed_at=trashed_at, - trashed_explicitly=trashed_explicitly, - ) - - async def delete_recursively( app: web.Application, + connection: AsyncConnection | None = None, *, folder_id: FolderID, product_name: ProductName, ) -> None: - async with get_database_engine(app).acquire() as conn, conn.begin(): + async with transaction_context(get_asyncpg_engine(app), connection) as conn: # Step 1: Define the base case for the recursive CTE base_query = select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id @@ -330,10 +297,9 @@ async def delete_recursively( # Step 4: Execute the query to get all descendants final_query = select(folder_hierarchy_cte) - result = await conn.execute(final_query) - rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] - await result.fetchall() or [] - ) + result = await conn.stream(final_query) + # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] + rows = [row async for row in result] # Sort folders so that child folders come first sorted_folders = sorted( @@ -347,6 +313,7 @@ async def delete_recursively( async def get_projects_recursively_only_if_user_is_owner( app: web.Application, + connection: AsyncConnection | None = None, *, folder_id: FolderID, private_workspace_user_id_or_none: UserID | None, @@ -361,7 +328,8 @@ async def get_projects_recursively_only_if_user_is_owner( or the `users_to_groups` table for private workspace projects. """ - async with get_database_engine(app).acquire() as conn, conn.begin(): + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + # Step 1: Define the base case for the recursive CTE base_query = select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id @@ -370,6 +338,7 @@ async def get_projects_recursively_only_if_user_is_owner( & (folders_v2.c.product_name == product_name) ) folder_hierarchy_cte = base_query.cte(name="folder_hierarchy", recursive=True) + # Step 2: Define the recursive case folder_alias = aliased(folders_v2) recursive_query = select( @@ -380,16 +349,15 @@ async def get_projects_recursively_only_if_user_is_owner( folder_alias.c.parent_folder_id == folder_hierarchy_cte.c.folder_id, ) ) + # Step 3: Combine base and recursive cases into a CTE folder_hierarchy_cte = folder_hierarchy_cte.union_all(recursive_query) + # Step 4: Execute the query to get all descendants final_query = select(folder_hierarchy_cte) - result = await conn.execute(final_query) - rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] - await result.fetchall() or [] - ) - - folder_ids = [item[0] for item in rows] + result = await conn.stream(final_query) + # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] + folder_ids = [item[0] async for item in result] query = ( select(projects_to_folders.c.project_uuid) @@ -402,19 +370,19 @@ async def get_projects_recursively_only_if_user_is_owner( if private_workspace_user_id_or_none is not None: query = query.where(projects.c.prj_owner == user_id) - result = await conn.execute(query) - - rows = await result.fetchall() or [] - return [ProjectID(row[0]) for row in rows] + result = await conn.stream(query) + return [ProjectID(row[0]) async for row in result] async def get_folders_recursively( app: web.Application, + connection: AsyncConnection | None = None, *, folder_id: FolderID, product_name: ProductName, ) -> list[FolderID]: - async with get_database_engine(app).acquire() as conn, conn.begin(): + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + # Step 1: Define the base case for the recursive CTE base_query = select( folders_v2.c.folder_id, folders_v2.c.parent_folder_id @@ -440,9 +408,5 @@ async def get_folders_recursively( # Step 4: Execute the query to get all descendants final_query = select(folder_hierarchy_cte) - result = await conn.execute(final_query) - rows = ( # list of tuples [(folder_id, parent_folder_id), ...] ex. [(1, None), (2, 1)] - await result.fetchall() or [] - ) - - return [FolderID(row[0]) for row in rows] + result = await conn.stream(final_query) + return [FolderID(row[0]) async for row in result] diff --git a/services/web/server/src/simcore_service_webserver/folders/_trash_api.py b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py index 1cad0415161..b3e1823369a 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_trash_api.py +++ b/services/web/server/src/simcore_service_webserver/folders/_trash_api.py @@ -7,7 +7,10 @@ from models_library.products import ProductName from models_library.projects import ProjectID from models_library.users import UserID +from simcore_postgres_database.utils_repos import transaction_context +from sqlalchemy.ext.asyncio import AsyncConnection +from ..db.plugin import get_asyncpg_engine from ..projects._trash_api import trash_project, untrash_project from ..workspaces.api import check_user_workspace_access from . import _folders_db @@ -55,6 +58,7 @@ async def _check_exists_and_access( async def _folders_db_update( app: web.Application, + connection: AsyncConnection | None = None, *, product_name: ProductName, folder_id: FolderID, @@ -63,7 +67,8 @@ async def _folders_db_update( # EXPLICIT un/trash await _folders_db.update( app, - folder_id=folder_id, + connection, + folders_id_or_ids=folder_id, product_name=product_name, trashed_at=trashed_at, trashed_explicitly=trashed_at is not None, @@ -73,15 +78,16 @@ async def _folders_db_update( child_folders: set[FolderID] = { f for f in await _folders_db.get_folders_recursively( - app, folder_id=folder_id, product_name=product_name + app, connection, folder_id=folder_id, product_name=product_name ) if f != folder_id } if child_folders: - await _folders_db.update_batch( + await _folders_db.update( app, - *child_folders, + connection, + folders_id_or_ids=child_folders, product_name=product_name, trashed_at=trashed_at, trashed_explicitly=False, @@ -104,40 +110,40 @@ async def trash_folder( # Trash trashed_at = arrow.utcnow().datetime - _logger.debug( - "TODO: Unit of work for all folders and projects and fails if force_stop_first=%s is False", - force_stop_first, - ) - - # 1. Trash folder and children - await _folders_db_update( - app, - folder_id=folder_id, - product_name=product_name, - trashed_at=trashed_at, - ) - - # 2. Trash all child projects that I am an owner - child_projects: list[ - ProjectID - ] = await _folders_db.get_projects_recursively_only_if_user_is_owner( - app, - folder_id=folder_id, - private_workspace_user_id_or_none=user_id if workspace_is_private else None, - user_id=user_id, - product_name=product_name, - ) + async with transaction_context(get_asyncpg_engine(app)) as connection: - for project_id in child_projects: - await trash_project( + # 1. Trash folder and children + await _folders_db_update( app, + connection, + folder_id=folder_id, product_name=product_name, + trashed_at=trashed_at, + ) + + # 2. Trash all child projects that I am an owner + child_projects: list[ + ProjectID + ] = await _folders_db.get_projects_recursively_only_if_user_is_owner( + app, + connection, + folder_id=folder_id, + private_workspace_user_id_or_none=user_id if workspace_is_private else None, user_id=user_id, - project_id=project_id, - force_stop_first=force_stop_first, - explicit=False, + product_name=product_name, ) + for project_id in child_projects: + await trash_project( + app, + # NOTE: this needs to be included in the unit-of-work, i.e. connection, + product_name=product_name, + user_id=user_id, + project_id=project_id, + force_stop_first=force_stop_first, + explicit=False, + ) + async def untrash_folder( app: web.Application, diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py index daeba51ae80..019ec5530b0 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_groups_db.py @@ -13,10 +13,15 @@ from simcore_postgres_database.models.workspaces_access_rights import ( workspaces_access_rights, ) +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) from sqlalchemy import func, literal_column +from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.sql import select -from ..db.plugin import get_database_engine +from ..db.plugin import get_asyncpg_engine from .errors import WorkspaceGroupNotFoundError _logger = logging.getLogger(__name__) @@ -41,15 +46,16 @@ class Config: async def create_workspace_group( app: web.Application, + connection: AsyncConnection | None = None, + *, workspace_id: WorkspaceID, group_id: GroupID, - *, read: bool, write: bool, delete: bool, ) -> WorkspaceGroupGetDB: - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( workspaces_access_rights.insert() .values( workspace_id=workspace_id, @@ -68,6 +74,8 @@ async def create_workspace_group( async def list_workspace_groups( app: web.Application, + connection: AsyncConnection | None = None, + *, workspace_id: WorkspaceID, ) -> list[WorkspaceGroupGetDB]: stmt = ( @@ -83,14 +91,15 @@ async def list_workspace_groups( .where(workspaces_access_rights.c.workspace_id == workspace_id) ) - async with get_database_engine(app).acquire() as conn: - result = await conn.execute(stmt) - rows = await result.fetchall() or [] - return [WorkspaceGroupGetDB.from_orm(row) for row in rows] + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(stmt) + return [WorkspaceGroupGetDB.from_orm(row) async for row in result] async def get_workspace_group( app: web.Application, + connection: AsyncConnection | None = None, + *, workspace_id: WorkspaceID, group_id: GroupID, ) -> WorkspaceGroupGetDB: @@ -110,8 +119,8 @@ async def get_workspace_group( ) ) - async with get_database_engine(app).acquire() as conn: - result = await conn.execute(stmt) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(stmt) row = await result.first() if row is None: raise WorkspaceGroupNotFoundError( @@ -122,15 +131,16 @@ async def get_workspace_group( async def update_workspace_group( app: web.Application, + connection: AsyncConnection | None = None, + *, workspace_id: WorkspaceID, group_id: GroupID, - *, read: bool, write: bool, delete: bool, ) -> WorkspaceGroupGetDB: - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( workspaces_access_rights.update() .values( read=read, @@ -153,10 +163,12 @@ async def update_workspace_group( async def delete_workspace_group( app: web.Application, + connection: AsyncConnection | None = None, + *, workspace_id: WorkspaceID, group_id: GroupID, ) -> None: - async with get_database_engine(app).acquire() as conn: + async with transaction_context(get_asyncpg_engine(app), connection) as conn: await conn.execute( workspaces_access_rights.delete().where( (workspaces_access_rights.c.workspace_id == workspace_id) diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py index 23de15c3b19..a959843a969 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py @@ -22,11 +22,16 @@ from simcore_postgres_database.models.workspaces_access_rights import ( workspaces_access_rights, ) +from simcore_postgres_database.utils_repos import ( + pass_or_acquire_connection, + transaction_context, +) from sqlalchemy import asc, desc, func from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER +from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.sql import Subquery, select -from ..db.plugin import get_database_engine +from ..db.plugin import get_asyncpg_engine from .errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError _logger = logging.getLogger(__name__) @@ -45,14 +50,16 @@ async def create_workspace( app: web.Application, + connection: AsyncConnection | None = None, + *, product_name: ProductName, owner_primary_gid: GroupID, name: str, description: str | None, thumbnail: str | None, ) -> WorkspaceDB: - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( workspaces.insert() .values( name=name, @@ -69,7 +76,7 @@ async def create_workspace( return WorkspaceDB.from_orm(row) -access_rights_subquery = ( +_access_rights_subquery = ( select( workspaces_access_rights.c.workspace_id, func.jsonb_object_agg( @@ -116,6 +123,7 @@ def _create_my_access_rights_subquery(user_id: UserID) -> Subquery: async def list_workspaces_for_user( app: web.Application, + connection: AsyncConnection | None = None, *, user_id: UserID, product_name: ProductName, @@ -128,11 +136,11 @@ async def list_workspaces_for_user( base_query = ( select( *_SELECTION_ARGS, - access_rights_subquery.c.access_rights, + _access_rights_subquery.c.access_rights, my_access_rights_subquery.c.my_access_rights, ) .select_from( - workspaces.join(access_rights_subquery).join(my_access_rights_subquery) + workspaces.join(_access_rights_subquery).join(my_access_rights_subquery) ) .where(workspaces.c.product_name == product_name) ) @@ -148,21 +156,21 @@ async def list_workspaces_for_user( list_query = base_query.order_by(desc(getattr(workspaces.c, order_by.field))) list_query = list_query.offset(offset).limit(limit) - async with get_database_engine(app).acquire() as conn: - count_result = await conn.execute(count_query) - total_count = await count_result.scalar() + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + total_count = await conn.scalar(count_query) - result = await conn.execute(list_query) - rows = await result.fetchall() or [] - results: list[UserWorkspaceAccessRightsDB] = [ - UserWorkspaceAccessRightsDB.from_orm(row) for row in rows + result = await conn.stream(list_query) + items: list[UserWorkspaceAccessRightsDB] = [ + UserWorkspaceAccessRightsDB.from_orm(row) async for row in result ] - return cast(int, total_count), results + return cast(int, total_count), items async def get_workspace_for_user( app: web.Application, + connection: AsyncConnection | None = None, + *, user_id: UserID, workspace_id: WorkspaceID, product_name: ProductName, @@ -172,11 +180,11 @@ async def get_workspace_for_user( base_query = ( select( *_SELECTION_ARGS, - access_rights_subquery.c.access_rights, + _access_rights_subquery.c.access_rights, my_access_rights_subquery.c.my_access_rights, ) .select_from( - workspaces.join(access_rights_subquery).join(my_access_rights_subquery) + workspaces.join(_access_rights_subquery).join(my_access_rights_subquery) ) .where( (workspaces.c.workspace_id == workspace_id) @@ -184,8 +192,8 @@ async def get_workspace_for_user( ) ) - async with get_database_engine(app).acquire() as conn: - result = await conn.execute(base_query) + async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream(base_query) row = await result.first() if row is None: raise WorkspaceAccessForbiddenError( @@ -196,14 +204,16 @@ async def get_workspace_for_user( async def update_workspace( app: web.Application, + connection: AsyncConnection | None = None, + *, workspace_id: WorkspaceID, name: str, description: str | None, thumbnail: str | None, product_name: ProductName, ) -> WorkspaceDB: - async with get_database_engine(app).acquire() as conn: - result = await conn.execute( + async with transaction_context(get_asyncpg_engine(app), connection) as conn: + result = await conn.stream( workspaces.update() .values( name=name, @@ -225,10 +235,12 @@ async def update_workspace( async def delete_workspace( app: web.Application, + connection: AsyncConnection | None = None, + *, workspace_id: WorkspaceID, product_name: ProductName, ) -> None: - async with get_database_engine(app).acquire() as conn: + async with transaction_context(get_asyncpg_engine(app), connection) as conn: await conn.execute( workspaces.delete().where( (workspaces.c.workspace_id == workspace_id) From ec1e84e842cc1f70b3e8751325cca808c305b18c Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 12 Nov 2024 10:58:07 +0100 Subject: [PATCH 08/22] =?UTF-8?q?=F0=9F=94=A8=20Fixes=20e2e:=20tests/perfo?= =?UTF-8?q?mance=20=20(#6707)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- tests/performance/Makefile | 12 ++++- .../locust_files/platform_ping_test.py | 4 +- tests/performance/locust_settings.py | 50 ++++++++++++++++++- 3 files changed, 60 insertions(+), 6 deletions(-) diff --git a/tests/performance/Makefile b/tests/performance/Makefile index ead1e417d39..d41a60d7af8 100644 --- a/tests/performance/Makefile +++ b/tests/performance/Makefile @@ -12,7 +12,8 @@ export ENV_FILE NETWORK_NAME=dashboards_timenet # UTILS -get_my_ip := $(shell (hostname --all-ip-addresses || hostname -i) 2>/dev/null | cut --delimiter=" " --fields=1) +# NOTE: keep short arguments for `cut` so it works in both BusyBox (alpine) AND Ubuntu +get_my_ip := $(shell (hostname --all-ip-addresses || hostname -i) 2>/dev/null | cut -d " " -f 1) # Check that given variables are set and all have non-empty values, # die with an error otherwise. @@ -28,6 +29,7 @@ __check_defined = \ $(error Undefined $1$(if $2, ($2)))) + .PHONY: build build: ## builds distributed osparc locust docker image docker \ @@ -42,6 +44,8 @@ build: ## builds distributed osparc locust docker image push: docker push itisfoundation/locust:$(LOCUST_VERSION) + + .PHONY: down down: ## stops and removes osparc locust containers docker compose --file docker-compose.yml down @@ -55,6 +59,8 @@ test: ## runs osparc locust. Locust and test configuration are specified in ENV_ fi docker compose --file docker-compose.yml up --scale worker=4 --exit-code-from=master + + .PHONY: dashboards-up dashboards-down dashboards-up: ## Create Grafana dashboard for inspecting locust results. See dashboard on localhost:3000 @@ -68,6 +74,8 @@ dashboards-up: ## Create Grafana dashboard for inspecting locust results. See da dashboards-down: @locust-compose down + + .PHONY: install-ci install-dev install-dev: @@ -80,4 +88,4 @@ install-ci: .PHONY: config config: @$(call check_defined, input, please define inputs when calling $@ - e.g. ```make $@ input="--help"```) - @uv run locust_settings.py $(input) | tee .env + @uv run locust_settings.py $(input) | tee "${ENV_FILE}" diff --git a/tests/performance/locust_files/platform_ping_test.py b/tests/performance/locust_files/platform_ping_test.py index 61cb0733458..c8839bb8c2b 100644 --- a/tests/performance/locust_files/platform_ping_test.py +++ b/tests/performance/locust_files/platform_ping_test.py @@ -19,7 +19,7 @@ assert locust_plugins # nosec -class LocustAuth(BaseSettings): +class MonitoringBasicAuth(BaseSettings): SC_USER_NAME: str = Field(default=..., examples=[""]) SC_PASSWORD: str = Field(default=..., examples=[""]) @@ -27,7 +27,7 @@ class LocustAuth(BaseSettings): class WebApiUser(FastHttpUser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - _auth = LocustAuth() + _auth = MonitoringBasicAuth() self.auth = ( _auth.SC_USER_NAME, _auth.SC_PASSWORD, diff --git a/tests/performance/locust_settings.py b/tests/performance/locust_settings.py index 24f896180fd..48c219871fe 100644 --- a/tests/performance/locust_settings.py +++ b/tests/performance/locust_settings.py @@ -1,10 +1,21 @@ +# /// script +# requires-python = ">=3.11" +# dependencies = [ +# "parse", +# "pydantic", +# "pydantic-settings", +# ] +# /// # pylint: disable=unused-argument # pylint: disable=no-self-use # pylint: disable=no-name-in-module +import importlib.util +import inspect import json from datetime import timedelta from pathlib import Path +from types import ModuleType from typing import Final from parse import Result, parse @@ -26,6 +37,37 @@ assert _LOCUST_FILES_DIR.is_dir() +def _check_load_and_instantiate_settings_classes(file_path: str): + module_name = Path(file_path).stem + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None or spec.loader is None: + msg = f"Invalid {file_path=}" + raise ValueError(msg) + + module: ModuleType = importlib.util.module_from_spec(spec) + + # Execute the module in its own namespace + try: + spec.loader.exec_module(module) + except Exception as e: + msg = f"Failed to load module {module_name} from {file_path}" + raise ValueError(msg) from e + + # Filter subclasses of BaseSettings + settings_classes = [ + obj + for _, obj in inspect.getmembers(module, inspect.isclass) + if issubclass(obj, BaseSettings) and obj is not BaseSettings + ] + + for settings_class in settings_classes: + try: + settings_class() + except Exception as e: + msg = f"Missing env vars for {settings_class.__name__} in {file_path=}: {e}" + raise ValueError(msg) from e + + class LocustSettings(BaseSettings): model_config = SettingsConfigDict(cli_parse_args=True) @@ -44,8 +86,8 @@ class LocustSettings(BaseSettings): LOCUST_RUN_TIME: timedelta LOCUST_SPAWN_RATE: PositiveInt = Field(default=20) - # Options for Timescale + Grafana Dashboards - # SEE https://github.com/SvenskaSpel/locust-plugins/blob/master/locust_plugins/timescale/ + # Timescale: Log and graph results using TimescaleDB and Grafana dashboards + # SEE https://github.com/SvenskaSpel/locust-plugins/tree/master/locust_plugins/dashboards # LOCUST_TIMESCALE: NonNegativeInt = Field( default=1, @@ -87,6 +129,10 @@ def _validate_locust_file(cls, v: Path) -> Path: if not v.is_relative_to(_LOCUST_FILES_DIR): msg = f"{v} must be a test file relative to {_LOCUST_FILES_DIR}" raise ValueError(msg) + + # NOTE: CHECK that all the env-vars are defined for this test + # _check_load_and_instantiate_settings_classes(f"{v}") + return v.relative_to(_TEST_DIR) @field_serializer("LOCUST_RUN_TIME") From f8f67c9609bfa7a701d2f60f421d81a9d6ea1688 Mon Sep 17 00:00:00 2001 From: Pedro Crespo-Valero <32402063+pcrespov@users.noreply.github.com> Date: Tue, 12 Nov 2024 13:49:27 +0100 Subject: [PATCH 09/22] =?UTF-8?q?=E2=9C=85=20Extends=20test=5FEC2=5FINSTAN?= =?UTF-8?q?CES=5FALLOWED=5FTYPES=5Fempty=5Fnot=5Fallowed=20(#6705)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../tests/unit/test_core_settings.py | 34 ++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/services/autoscaling/tests/unit/test_core_settings.py b/services/autoscaling/tests/unit/test_core_settings.py index 9315c8fcfd1..e975d944f0b 100644 --- a/services/autoscaling/tests/unit/test_core_settings.py +++ b/services/autoscaling/tests/unit/test_core_settings.py @@ -4,6 +4,7 @@ import datetime import json +import os import pytest from faker import Faker @@ -197,11 +198,42 @@ def test_EC2_INSTANCES_ALLOWED_TYPES_passing_valid_image_tags( # noqa: N802 def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed( # noqa: N802 app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch ): + assert app_environment["AUTOSCALING_EC2_INSTANCES"] == "{}" monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}") - with pytest.raises(ValidationError): + # test child settings + with pytest.raises(ValidationError) as err_info: + EC2InstancesSettings.create_from_envs() + + assert err_info.value.errors()[0]["loc"] == ("EC2_INSTANCES_ALLOWED_TYPES",) + + +def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_with_main_field_env_var( # noqa: N802 + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +): + assert os.environ["AUTOSCALING_EC2_INSTANCES"] == "{}" + monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}") + + # now as part of AUTOSCALING_EC2_INSTANCES: EC2InstancesSettings | None + with pytest.raises(ValidationError) as exc_before: + ApplicationSettings.create_from_envs(AUTOSCALING_EC2_INSTANCES={}) + + with pytest.raises(ValidationError) as exc_after: ApplicationSettings.create_from_envs() + assert exc_before.value.errors() == exc_after.value.errors() + + +def test_EC2_INSTANCES_ALLOWED_TYPES_empty_not_allowed_without_main_field_env_var( # noqa: N802 + app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch +): + monkeypatch.delenv("AUTOSCALING_EC2_INSTANCES") + monkeypatch.setenv("EC2_INSTANCES_ALLOWED_TYPES", "{}") + + # removing any value for AUTOSCALING_EC2_INSTANCES + settings = ApplicationSettings.create_from_envs() + assert settings.AUTOSCALING_EC2_INSTANCES is None + def test_invalid_instance_names( app_environment: EnvVarsDict, monkeypatch: pytest.MonkeyPatch, faker: Faker From f7e6d5b811129c7749673f2efada9da8163be5bf Mon Sep 17 00:00:00 2001 From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com> Date: Tue, 12 Nov 2024 15:30:27 +0100 Subject: [PATCH 10/22] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor=20listing?= =?UTF-8?q?=20folders=20(#6703)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/specs/web-server/_folders.py | 21 + .../src/models_library/folders.py | 8 + .../simcore_postgres_database/utils_sql.py | 6 + .../utils_workspaces_sql.py | 30 + .../db_access_layer.py | 9 +- .../api/v0/openapi.yaml | 64 ++ .../folders/_folders_api.py | 711 +++++++++--------- .../folders/_folders_db.py | 148 +++- .../folders/_folders_handlers.py | 41 + .../folders/_models.py | 35 +- .../workspaces/_workspaces_db.py | 38 +- .../04/folders/test_folders__full_search.py | 123 +++ ...st_workspaces__list_folders_full_search.py | 65 ++ 13 files changed, 879 insertions(+), 420 deletions(-) create mode 100644 packages/postgres-database/src/simcore_postgres_database/utils_sql.py create mode 100644 packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py create mode 100644 services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py create mode 100644 services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py diff --git a/api/specs/web-server/_folders.py b/api/specs/web-server/_folders.py index 90f1ad3beb1..25eecea5cd0 100644 --- a/api/specs/web-server/_folders.py +++ b/api/specs/web-server/_folders.py @@ -63,6 +63,27 @@ async def list_folders( ... +@router.get( + "/folders:search", + response_model=Envelope[list[FolderGet]], +) +async def list_folders_full_search( + params: Annotated[PageQueryParameters, Depends()], + order_by: Annotated[ + Json, + Query( + description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.", + example='{"field": "name", "direction": "desc"}', + ), + ] = '{"field": "modified_at", "direction": "desc"}', + filters: Annotated[ + Json | None, + Query(description=FolderFilters.schema_json(indent=1)), + ] = None, +): + ... + + @router.get( "/folders/{folder_id}", response_model=Envelope[FolderGet], diff --git a/packages/models-library/src/models_library/folders.py b/packages/models-library/src/models_library/folders.py index 485e74b86c8..1d2b9622943 100644 --- a/packages/models-library/src/models_library/folders.py +++ b/packages/models-library/src/models_library/folders.py @@ -4,6 +4,7 @@ from pydantic import BaseModel, Field, PositiveInt, validator +from .access_rights import AccessRights from .users import GroupID, UserID from .utils.enums import StrAutoEnum from .workspaces import WorkspaceID @@ -66,3 +67,10 @@ class FolderDB(BaseModel): class Config: orm_mode = True + + +class UserFolderAccessRightsDB(FolderDB): + my_access_rights: AccessRights + + class Config: + orm_mode = True diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_sql.py b/packages/postgres-database/src/simcore_postgres_database/utils_sql.py new file mode 100644 index 00000000000..e3d4e1438af --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/utils_sql.py @@ -0,0 +1,6 @@ +def assemble_array_groups(user_group_ids: list[int]) -> str: + return ( + "array[]::text[]" + if len(user_group_ids) == 0 + else f"""array[{', '.join(f"'{group_id}'" for group_id in user_group_ids)}]""" + ) diff --git a/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py b/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py new file mode 100644 index 00000000000..05b24d969bd --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/utils_workspaces_sql.py @@ -0,0 +1,30 @@ +from simcore_postgres_database.models.groups import user_to_groups +from simcore_postgres_database.models.workspaces_access_rights import ( + workspaces_access_rights, +) +from sqlalchemy import func +from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER +from sqlalchemy.sql import Subquery, select + + +def create_my_workspace_access_rights_subquery(user_id: int) -> Subquery: + return ( + select( + workspaces_access_rights.c.workspace_id, + func.json_build_object( + "read", + func.max(workspaces_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN), + "write", + func.max(workspaces_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN), + "delete", + func.max(workspaces_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN), + ).label("my_access_rights"), + ) + .select_from( + workspaces_access_rights.join( + user_to_groups, user_to_groups.c.gid == workspaces_access_rights.c.gid + ) + ) + .where(user_to_groups.c.uid == user_id) + .group_by(workspaces_access_rights.c.workspace_id) + ).subquery("my_workspace_access_rights_subquery") diff --git a/services/storage/src/simcore_service_storage/db_access_layer.py b/services/storage/src/simcore_service_storage/db_access_layer.py index 19452862de5..b77504088f1 100644 --- a/services/storage/src/simcore_service_storage/db_access_layer.py +++ b/services/storage/src/simcore_service_storage/db_access_layer.py @@ -51,6 +51,7 @@ workspaces_access_rights, ) from simcore_postgres_database.storage_models import file_meta_data, user_to_groups +from simcore_postgres_database.utils_sql import assemble_array_groups logger = logging.getLogger(__name__) @@ -117,14 +118,6 @@ def _aggregate_access_rights( return AccessRights.none() -def assemble_array_groups(user_group_ids: list[GroupID]) -> str: - return ( - "array[]::text[]" - if len(user_group_ids) == 0 - else f"""array[{', '.join(f"'{group_id}'" for group_id in user_group_ids)}]""" - ) - - access_rights_subquery = ( sa.select( project_to_groups.c.project_uuid, diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index dafb3f8fb08..40d0841c65a 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2690,6 +2690,70 @@ paths: application/json: schema: $ref: '#/components/schemas/Envelope_FolderGet_' + /v0/folders:search: + get: + tags: + - folders + summary: List Folders Full Search + operationId: list_folders_full_search + parameters: + - description: Order by field (modified_at|name|description) and direction (asc|desc). + The default sorting order is ascending. + required: false + schema: + title: Order By + description: Order by field (modified_at|name|description) and direction + (asc|desc). The default sorting order is ascending. + default: '{"field": "modified_at", "direction": "desc"}' + example: '{"field": "name", "direction": "desc"}' + name: order_by + in: query + - description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\ + \ as JSON. Each available filter can have its own logic (should be well\ + \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\ + ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"title\"\ + : \"Trashed\",\n \"description\": \"Set to true to list trashed, false\ + \ to list non-trashed (default), None to list all\",\n \"default\": false,\n\ + \ \"type\": \"boolean\"\n }\n }\n}" + required: false + schema: + title: Filters + type: string + description: "{\n \"title\": \"FolderFilters\",\n \"description\": \"Encoded\ + \ as JSON. Each available filter can have its own logic (should be well\ + \ documented)\\nInspired by Docker API https://docs.docker.com/engine/api/v1.43/#tag/Container/operation/ContainerList.\"\ + ,\n \"type\": \"object\",\n \"properties\": {\n \"trashed\": {\n \"\ + title\": \"Trashed\",\n \"description\": \"Set to true to list trashed,\ + \ false to list non-trashed (default), None to list all\",\n \"default\"\ + : false,\n \"type\": \"boolean\"\n }\n }\n}" + format: json-string + name: filters + in: query + - required: false + schema: + title: Limit + exclusiveMaximum: true + minimum: 1 + type: integer + default: 20 + maximum: 50 + name: limit + in: query + - required: false + schema: + title: Offset + minimum: 0 + type: integer + default: 0 + name: offset + in: query + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Envelope_list_models_library.api_schemas_webserver.folders_v2.FolderGet__' /v0/folders/{folder_id}: get: tags: diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py index 043527d2def..a791a65c715 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py @@ -1,341 +1,370 @@ -# pylint: disable=unused-argument - -import logging - -from aiohttp import web -from models_library.access_rights import AccessRights -from models_library.api_schemas_webserver.folders_v2 import FolderGet, FolderGetPage -from models_library.folders import FolderID -from models_library.products import ProductName -from models_library.projects import ProjectID -from models_library.rest_ordering import OrderBy -from models_library.users import UserID -from models_library.workspaces import WorkspaceID -from pydantic import NonNegativeInt -from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY -from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE -from servicelib.utils import fire_and_forget_task - -from ..folders.errors import FolderValueNotPermittedError -from ..projects.projects_api import submit_delete_project_task -from ..users.api import get_user -from ..workspaces.api import check_user_workspace_access -from ..workspaces.errors import ( - WorkspaceAccessForbiddenError, - WorkspaceFolderInconsistencyError, -) -from . import _folders_db as folders_db - -_logger = logging.getLogger(__name__) - - -async def create_folder( - app: web.Application, - user_id: UserID, - name: str, - parent_folder_id: FolderID | None, - product_name: ProductName, - workspace_id: WorkspaceID | None, -) -> FolderGet: - user = await get_user(app, user_id=user_id) - - workspace_is_private = True - user_folder_access_rights = AccessRights(read=True, write=True, delete=True) - if workspace_id: - user_workspace_access_rights = await check_user_workspace_access( - app, - user_id=user_id, - workspace_id=workspace_id, - product_name=product_name, - permission="write", - ) - workspace_is_private = False - user_folder_access_rights = user_workspace_access_rights.my_access_rights - - # Check parent_folder_id lives in the workspace - if parent_folder_id: - parent_folder_db = await folders_db.get( - app, folder_id=parent_folder_id, product_name=product_name - ) - if parent_folder_db.workspace_id != workspace_id: - raise WorkspaceFolderInconsistencyError( - folder_id=parent_folder_id, workspace_id=workspace_id - ) - - if parent_folder_id: - # Check user has access to the parent folder - parent_folder_db = await folders_db.get_for_user_or_workspace( - app, - folder_id=parent_folder_id, - product_name=product_name, - user_id=user_id if workspace_is_private else None, - workspace_id=workspace_id, - ) - if workspace_id and parent_folder_db.workspace_id != workspace_id: - # Check parent folder id exists inside the same workspace - raise WorkspaceAccessForbiddenError( - reason=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}." - ) - - folder_db = await folders_db.create( - app, - product_name=product_name, - created_by_gid=user["primary_gid"], - folder_name=name, - parent_folder_id=parent_folder_id, - user_id=user_id if workspace_is_private else None, - workspace_id=workspace_id, - ) - return FolderGet( - folder_id=folder_db.folder_id, - parent_folder_id=folder_db.parent_folder_id, - name=folder_db.name, - created_at=folder_db.created, - modified_at=folder_db.modified, - trashed_at=folder_db.trashed_at, - owner=folder_db.created_by_gid, - workspace_id=workspace_id, - my_access_rights=user_folder_access_rights, - ) - - -async def get_folder( - app: web.Application, - user_id: UserID, - folder_id: FolderID, - product_name: ProductName, -) -> FolderGet: - folder_db = await folders_db.get( - app, folder_id=folder_id, product_name=product_name - ) - - workspace_is_private = True - user_folder_access_rights = AccessRights(read=True, write=True, delete=True) - if folder_db.workspace_id: - user_workspace_access_rights = await check_user_workspace_access( - app, - user_id=user_id, - workspace_id=folder_db.workspace_id, - product_name=product_name, - permission="read", - ) - workspace_is_private = False - user_folder_access_rights = user_workspace_access_rights.my_access_rights - - folder_db = await folders_db.get_for_user_or_workspace( - app, - folder_id=folder_id, - product_name=product_name, - user_id=user_id if workspace_is_private else None, - workspace_id=folder_db.workspace_id, - ) - return FolderGet( - folder_id=folder_db.folder_id, - parent_folder_id=folder_db.parent_folder_id, - name=folder_db.name, - created_at=folder_db.created, - modified_at=folder_db.modified, - trashed_at=folder_db.trashed_at, - owner=folder_db.created_by_gid, - workspace_id=folder_db.workspace_id, - my_access_rights=user_folder_access_rights, - ) - - -async def list_folders( - app: web.Application, - user_id: UserID, - product_name: ProductName, - folder_id: FolderID | None, - workspace_id: WorkspaceID | None, - trashed: bool | None, - offset: NonNegativeInt, - limit: int, - order_by: OrderBy, -) -> FolderGetPage: - workspace_is_private = True - user_folder_access_rights = AccessRights(read=True, write=True, delete=True) - - if workspace_id: - user_workspace_access_rights = await check_user_workspace_access( - app, - user_id=user_id, - workspace_id=workspace_id, - product_name=product_name, - permission="read", - ) - workspace_is_private = False - user_folder_access_rights = user_workspace_access_rights.my_access_rights - - if folder_id: - # Check user access to folder - await folders_db.get_for_user_or_workspace( - app, - folder_id=folder_id, - product_name=product_name, - user_id=user_id if workspace_is_private else None, - workspace_id=workspace_id, - ) - - total_count, folders = await folders_db.list_( - app, - content_of_folder_id=folder_id, - user_id=user_id if workspace_is_private else None, - workspace_id=workspace_id, - product_name=product_name, - trashed=trashed, - offset=offset, - limit=limit, - order_by=order_by, - ) - return FolderGetPage( - items=[ - FolderGet( - folder_id=folder.folder_id, - parent_folder_id=folder.parent_folder_id, - name=folder.name, - created_at=folder.created, - modified_at=folder.modified, - trashed_at=folder.trashed_at, - owner=folder.created_by_gid, - workspace_id=folder.workspace_id, - my_access_rights=user_folder_access_rights, - ) - for folder in folders - ], - total=total_count, - ) - - -async def update_folder( - app: web.Application, - user_id: UserID, - folder_id: FolderID, - *, - name: str, - parent_folder_id: FolderID | None, - product_name: ProductName, -) -> FolderGet: - folder_db = await folders_db.get( - app, folder_id=folder_id, product_name=product_name - ) - - workspace_is_private = True - user_folder_access_rights = AccessRights(read=True, write=True, delete=True) - if folder_db.workspace_id: - user_workspace_access_rights = await check_user_workspace_access( - app, - user_id=user_id, - workspace_id=folder_db.workspace_id, - product_name=product_name, - permission="write", - ) - workspace_is_private = False - user_folder_access_rights = user_workspace_access_rights.my_access_rights - - # Check user has access to the folder - await folders_db.get_for_user_or_workspace( - app, - folder_id=folder_id, - product_name=product_name, - user_id=user_id if workspace_is_private else None, - workspace_id=folder_db.workspace_id, - ) - - if folder_db.parent_folder_id != parent_folder_id and parent_folder_id is not None: - # Check user has access to the parent folder - await folders_db.get_for_user_or_workspace( - app, - folder_id=parent_folder_id, - product_name=product_name, - user_id=user_id if workspace_is_private else None, - workspace_id=folder_db.workspace_id, - ) - # Do not allow to move to a child folder id - _child_folders = await folders_db.get_folders_recursively( - app, folder_id=folder_id, product_name=product_name - ) - if parent_folder_id in _child_folders: - raise FolderValueNotPermittedError( - reason="Parent folder id should not be one of children" - ) - - folder_db = await folders_db.update( - app, - folders_id_or_ids=folder_id, - name=name, - parent_folder_id=parent_folder_id, - product_name=product_name, - ) - return FolderGet( - folder_id=folder_db.folder_id, - parent_folder_id=folder_db.parent_folder_id, - name=folder_db.name, - created_at=folder_db.created, - modified_at=folder_db.modified, - trashed_at=folder_db.trashed_at, - owner=folder_db.created_by_gid, - workspace_id=folder_db.workspace_id, - my_access_rights=user_folder_access_rights, - ) - - -async def delete_folder( - app: web.Application, - user_id: UserID, - folder_id: FolderID, - product_name: ProductName, -) -> None: - folder_db = await folders_db.get( - app, folder_id=folder_id, product_name=product_name - ) - - workspace_is_private = True - if folder_db.workspace_id: - await check_user_workspace_access( - app, - user_id=user_id, - workspace_id=folder_db.workspace_id, - product_name=product_name, - permission="delete", - ) - workspace_is_private = False - - # Check user has access to the folder - await folders_db.get_for_user_or_workspace( - app, - folder_id=folder_id, - product_name=product_name, - user_id=user_id if workspace_is_private else None, - workspace_id=folder_db.workspace_id, - ) - - # 1. Delete folder content - # 1.1 Delete all child projects that I am an owner - project_id_list: list[ - ProjectID - ] = await folders_db.get_projects_recursively_only_if_user_is_owner( - app, - folder_id=folder_id, - private_workspace_user_id_or_none=user_id if workspace_is_private else None, - user_id=user_id, - product_name=product_name, - ) - - # fire and forget task for project deletion - for project_id in project_id_list: - fire_and_forget_task( - submit_delete_project_task( - app, - project_uuid=project_id, - user_id=user_id, - simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, - ), - task_suffix_name=f"delete_project_task_{project_id}", - fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], - ) - - # 1.2 Delete all child folders - await folders_db.delete_recursively( - app, folder_id=folder_id, product_name=product_name - ) +# pylint: disable=unused-argument + +import logging + +from aiohttp import web +from models_library.access_rights import AccessRights +from models_library.api_schemas_webserver.folders_v2 import FolderGet, FolderGetPage +from models_library.folders import FolderID, FolderQuery, FolderScope +from models_library.products import ProductName +from models_library.projects import ProjectID +from models_library.rest_ordering import OrderBy +from models_library.users import UserID +from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope +from pydantic import NonNegativeInt +from servicelib.aiohttp.application_keys import APP_FIRE_AND_FORGET_TASKS_KEY +from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.utils import fire_and_forget_task + +from ..folders.errors import FolderValueNotPermittedError +from ..projects.projects_api import submit_delete_project_task +from ..users.api import get_user +from ..workspaces.api import check_user_workspace_access +from ..workspaces.errors import ( + WorkspaceAccessForbiddenError, + WorkspaceFolderInconsistencyError, +) +from . import _folders_db as folders_db + +_logger = logging.getLogger(__name__) + + +async def create_folder( + app: web.Application, + user_id: UserID, + name: str, + parent_folder_id: FolderID | None, + product_name: ProductName, + workspace_id: WorkspaceID | None, +) -> FolderGet: + user = await get_user(app, user_id=user_id) + + workspace_is_private = True + user_folder_access_rights = AccessRights(read=True, write=True, delete=True) + if workspace_id: + user_workspace_access_rights = await check_user_workspace_access( + app, + user_id=user_id, + workspace_id=workspace_id, + product_name=product_name, + permission="write", + ) + workspace_is_private = False + user_folder_access_rights = user_workspace_access_rights.my_access_rights + + # Check parent_folder_id lives in the workspace + if parent_folder_id: + parent_folder_db = await folders_db.get( + app, folder_id=parent_folder_id, product_name=product_name + ) + if parent_folder_db.workspace_id != workspace_id: + raise WorkspaceFolderInconsistencyError( + folder_id=parent_folder_id, workspace_id=workspace_id + ) + + if parent_folder_id: + # Check user has access to the parent folder + parent_folder_db = await folders_db.get_for_user_or_workspace( + app, + folder_id=parent_folder_id, + product_name=product_name, + user_id=user_id if workspace_is_private else None, + workspace_id=workspace_id, + ) + if workspace_id and parent_folder_db.workspace_id != workspace_id: + # Check parent folder id exists inside the same workspace + raise WorkspaceAccessForbiddenError( + reason=f"Folder {parent_folder_id} does not exists in workspace {workspace_id}." + ) + + folder_db = await folders_db.create( + app, + product_name=product_name, + created_by_gid=user["primary_gid"], + folder_name=name, + parent_folder_id=parent_folder_id, + user_id=user_id if workspace_is_private else None, + workspace_id=workspace_id, + ) + return FolderGet( + folder_id=folder_db.folder_id, + parent_folder_id=folder_db.parent_folder_id, + name=folder_db.name, + created_at=folder_db.created, + modified_at=folder_db.modified, + trashed_at=folder_db.trashed_at, + owner=folder_db.created_by_gid, + workspace_id=workspace_id, + my_access_rights=user_folder_access_rights, + ) + + +async def get_folder( + app: web.Application, + user_id: UserID, + folder_id: FolderID, + product_name: ProductName, +) -> FolderGet: + folder_db = await folders_db.get( + app, folder_id=folder_id, product_name=product_name + ) + + workspace_is_private = True + user_folder_access_rights = AccessRights(read=True, write=True, delete=True) + if folder_db.workspace_id: + user_workspace_access_rights = await check_user_workspace_access( + app, + user_id=user_id, + workspace_id=folder_db.workspace_id, + product_name=product_name, + permission="read", + ) + workspace_is_private = False + user_folder_access_rights = user_workspace_access_rights.my_access_rights + + folder_db = await folders_db.get_for_user_or_workspace( + app, + folder_id=folder_id, + product_name=product_name, + user_id=user_id if workspace_is_private else None, + workspace_id=folder_db.workspace_id, + ) + return FolderGet( + folder_id=folder_db.folder_id, + parent_folder_id=folder_db.parent_folder_id, + name=folder_db.name, + created_at=folder_db.created, + modified_at=folder_db.modified, + trashed_at=folder_db.trashed_at, + owner=folder_db.created_by_gid, + workspace_id=folder_db.workspace_id, + my_access_rights=user_folder_access_rights, + ) + + +async def list_folders( + app: web.Application, + user_id: UserID, + product_name: ProductName, + folder_id: FolderID | None, + workspace_id: WorkspaceID | None, + trashed: bool | None, + offset: NonNegativeInt, + limit: int, + order_by: OrderBy, +) -> FolderGetPage: + # NOTE: Folder access rights for listing are checked within the listing DB function. + + total_count, folders = await folders_db.list_( + app, + product_name=product_name, + user_id=user_id, + folder_query=( + FolderQuery(folder_scope=FolderScope.SPECIFIC, folder_id=folder_id) + if folder_id + else FolderQuery(folder_scope=FolderScope.ROOT) + ), + workspace_query=( + WorkspaceQuery( + workspace_scope=WorkspaceScope.SHARED, workspace_id=workspace_id + ) + if workspace_id + else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE) + ), + filter_trashed=trashed, + offset=offset, + limit=limit, + order_by=order_by, + ) + return FolderGetPage( + items=[ + FolderGet( + folder_id=folder.folder_id, + parent_folder_id=folder.parent_folder_id, + name=folder.name, + created_at=folder.created, + modified_at=folder.modified, + trashed_at=folder.trashed_at, + owner=folder.created_by_gid, + workspace_id=folder.workspace_id, + my_access_rights=folder.my_access_rights, + ) + for folder in folders + ], + total=total_count, + ) + + +async def list_folders_full_search( + app: web.Application, + user_id: UserID, + product_name: ProductName, + trashed: bool | None, + offset: NonNegativeInt, + limit: int, + order_by: OrderBy, +) -> FolderGetPage: + # NOTE: Folder access rights for listing are checked within the listing DB function. + + total_count, folders = await folders_db.list_( + app, + product_name=product_name, + user_id=user_id, + folder_query=FolderQuery(folder_scope=FolderScope.ALL), + workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL), + filter_trashed=trashed, + offset=offset, + limit=limit, + order_by=order_by, + ) + return FolderGetPage( + items=[ + FolderGet( + folder_id=folder.folder_id, + parent_folder_id=folder.parent_folder_id, + name=folder.name, + created_at=folder.created, + modified_at=folder.modified, + trashed_at=folder.trashed_at, + owner=folder.created_by_gid, + workspace_id=folder.workspace_id, + my_access_rights=folder.my_access_rights, + ) + for folder in folders + ], + total=total_count, + ) + + +async def update_folder( + app: web.Application, + user_id: UserID, + folder_id: FolderID, + *, + name: str, + parent_folder_id: FolderID | None, + product_name: ProductName, +) -> FolderGet: + folder_db = await folders_db.get( + app, folder_id=folder_id, product_name=product_name + ) + + workspace_is_private = True + user_folder_access_rights = AccessRights(read=True, write=True, delete=True) + if folder_db.workspace_id: + user_workspace_access_rights = await check_user_workspace_access( + app, + user_id=user_id, + workspace_id=folder_db.workspace_id, + product_name=product_name, + permission="write", + ) + workspace_is_private = False + user_folder_access_rights = user_workspace_access_rights.my_access_rights + + # Check user has access to the folder + await folders_db.get_for_user_or_workspace( + app, + folder_id=folder_id, + product_name=product_name, + user_id=user_id if workspace_is_private else None, + workspace_id=folder_db.workspace_id, + ) + + if folder_db.parent_folder_id != parent_folder_id and parent_folder_id is not None: + # Check user has access to the parent folder + await folders_db.get_for_user_or_workspace( + app, + folder_id=parent_folder_id, + product_name=product_name, + user_id=user_id if workspace_is_private else None, + workspace_id=folder_db.workspace_id, + ) + # Do not allow to move to a child folder id + _child_folders = await folders_db.get_folders_recursively( + app, folder_id=folder_id, product_name=product_name + ) + if parent_folder_id in _child_folders: + raise FolderValueNotPermittedError( + reason="Parent folder id should not be one of children" + ) + + folder_db = await folders_db.update( + app, + folders_id_or_ids=folder_id, + name=name, + parent_folder_id=parent_folder_id, + product_name=product_name, + ) + return FolderGet( + folder_id=folder_db.folder_id, + parent_folder_id=folder_db.parent_folder_id, + name=folder_db.name, + created_at=folder_db.created, + modified_at=folder_db.modified, + trashed_at=folder_db.trashed_at, + owner=folder_db.created_by_gid, + workspace_id=folder_db.workspace_id, + my_access_rights=user_folder_access_rights, + ) + + +async def delete_folder( + app: web.Application, + user_id: UserID, + folder_id: FolderID, + product_name: ProductName, +) -> None: + folder_db = await folders_db.get( + app, folder_id=folder_id, product_name=product_name + ) + + workspace_is_private = True + if folder_db.workspace_id: + await check_user_workspace_access( + app, + user_id=user_id, + workspace_id=folder_db.workspace_id, + product_name=product_name, + permission="delete", + ) + workspace_is_private = False + + # Check user has access to the folder + await folders_db.get_for_user_or_workspace( + app, + folder_id=folder_id, + product_name=product_name, + user_id=user_id if workspace_is_private else None, + workspace_id=folder_db.workspace_id, + ) + + # 1. Delete folder content + # 1.1 Delete all child projects that I am an owner + project_id_list: list[ + ProjectID + ] = await folders_db.get_projects_recursively_only_if_user_is_owner( + app, + folder_id=folder_id, + private_workspace_user_id_or_none=user_id if workspace_is_private else None, + user_id=user_id, + product_name=product_name, + ) + + # fire and forget task for project deletion + for project_id in project_id_list: + fire_and_forget_task( + submit_delete_project_task( + app, + project_uuid=project_id, + user_id=user_id, + simcore_user_agent=UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE, + ), + task_suffix_name=f"delete_project_task_{project_id}", + fire_and_forget_tasks_collection=app[APP_FIRE_AND_FORGET_TASKS_KEY], + ) + + # 1.2 Delete all child folders + await folders_db.delete_recursively( + app, folder_id=folder_id, product_name=product_name + ) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py index 561bcb64c9e..0af9d36dadf 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py @@ -8,13 +8,20 @@ from datetime import datetime from typing import Any, Final, cast +import sqlalchemy as sa from aiohttp import web -from models_library.folders import FolderDB, FolderID +from models_library.folders import ( + FolderDB, + FolderID, + FolderQuery, + FolderScope, + UserFolderAccessRightsDB, +) from models_library.products import ProductName from models_library.projects import ProjectID from models_library.rest_ordering import OrderBy, OrderDirection from models_library.users import GroupID, UserID -from models_library.workspaces import WorkspaceID +from models_library.workspaces import WorkspaceID, WorkspaceQuery, WorkspaceScope from pydantic import NonNegativeInt from simcore_postgres_database.models.folders_v2 import folders_v2 from simcore_postgres_database.models.projects import projects @@ -23,10 +30,13 @@ pass_or_acquire_connection, transaction_context, ) +from simcore_postgres_database.utils_workspaces_sql import ( + create_my_workspace_access_rights_subquery, +) from sqlalchemy import func from sqlalchemy.ext.asyncio import AsyncConnection from sqlalchemy.orm import aliased -from sqlalchemy.sql import asc, desc, select +from sqlalchemy.sql import ColumnElement, CompoundSelect, Select, asc, desc, select from ..db.plugin import get_asyncpg_engine from .errors import FolderAccessForbiddenError, FolderNotFoundError @@ -92,68 +102,142 @@ async def create( return FolderDB.from_orm(row) -async def list_( +async def list_( # pylint: disable=too-many-arguments,too-many-branches app: web.Application, connection: AsyncConnection | None = None, *, - content_of_folder_id: FolderID | None, - user_id: UserID | None, - workspace_id: WorkspaceID | None, product_name: ProductName, - trashed: bool | None, + user_id: UserID, + # hierarchy filters + folder_query: FolderQuery, + workspace_query: WorkspaceQuery, + # attribute filters + filter_trashed: bool | None, + # pagination offset: NonNegativeInt, limit: int, + # order order_by: OrderBy, -) -> tuple[int, list[FolderDB]]: +) -> tuple[int, list[UserFolderAccessRightsDB]]: """ - content_of_folder_id - Used to filter in which folder we want to list folders. None means root folder. + folder_query - Used to filter in which folder we want to list folders. trashed - If set to true, it returns folders **explicitly** trashed, if false then non-trashed folders. """ - assert not ( # nosec - user_id is not None and workspace_id is not None - ), "Both user_id and workspace_id cannot be provided at the same time. Please provide only one." - base_query = ( - select(*_SELECTION_ARGS) - .select_from(folders_v2) - .where( - (folders_v2.c.product_name == product_name) - & (folders_v2.c.parent_folder_id == content_of_folder_id) - ) + workspace_access_rights_subquery = create_my_workspace_access_rights_subquery( + user_id=user_id ) - if user_id: - base_query = base_query.where(folders_v2.c.user_id == user_id) + if workspace_query.workspace_scope is not WorkspaceScope.SHARED: + assert workspace_query.workspace_scope in ( # nosec + WorkspaceScope.PRIVATE, + WorkspaceScope.ALL, + ) + + private_workspace_query = ( + select( + *_SELECTION_ARGS, + func.json_build_object( + "read", + sa.text("true"), + "write", + sa.text("true"), + "delete", + sa.text("true"), + ).label("my_access_rights"), + ) + .select_from(folders_v2) + .where( + (folders_v2.c.product_name == product_name) + & (folders_v2.c.user_id == user_id) + ) + ) + else: + private_workspace_query = None + + if workspace_query.workspace_scope is not WorkspaceScope.PRIVATE: + assert workspace_query.workspace_scope in ( # nosec + WorkspaceScope.SHARED, + WorkspaceScope.ALL, + ) + + shared_workspace_query = ( + select( + *_SELECTION_ARGS, workspace_access_rights_subquery.c.my_access_rights + ) + .select_from( + folders_v2.join( + workspace_access_rights_subquery, + folders_v2.c.workspace_id + == workspace_access_rights_subquery.c.workspace_id, + ) + ) + .where( + (folders_v2.c.product_name == product_name) + & (folders_v2.c.user_id.is_(None)) + ) + ) else: - assert workspace_id # nosec - base_query = base_query.where(folders_v2.c.workspace_id == workspace_id) + shared_workspace_query = None + + attributes_filters: list[ColumnElement] = [] - if trashed is not None: - base_query = base_query.where( + if filter_trashed is not None: + attributes_filters.append( ( (folders_v2.c.trashed_at.is_not(None)) & (folders_v2.c.trashed_explicitly.is_(True)) ) - if trashed + if filter_trashed else folders_v2.c.trashed_at.is_(None) ) + if folder_query.folder_scope is not FolderScope.ALL: + if folder_query.folder_scope == FolderScope.SPECIFIC: + attributes_filters.append( + folders_v2.c.parent_folder_id == folder_query.folder_id + ) + else: + assert folder_query.folder_scope == FolderScope.ROOT # nosec + attributes_filters.append(folders_v2.c.parent_folder_id.is_(None)) + + ### + # Combined + ### + + combined_query: CompoundSelect | Select | None = None + if private_workspace_query is not None and shared_workspace_query is not None: + combined_query = sa.union_all( + private_workspace_query.where(sa.and_(*attributes_filters)), + shared_workspace_query.where(sa.and_(*attributes_filters)), + ) + elif private_workspace_query is not None: + combined_query = private_workspace_query.where(sa.and_(*attributes_filters)) + elif shared_workspace_query is not None: + combined_query = shared_workspace_query.where(sa.and_(*attributes_filters)) + + if combined_query is None: + msg = f"No valid queries were provided to combine. Workspace scope: {workspace_query.workspace_scope}" + raise ValueError(msg) # Select total count from base_query - subquery = base_query.subquery() - count_query = select(func.count()).select_from(subquery) + count_query = select(func.count()).select_from(combined_query.subquery()) # Ordering and pagination if order_by.direction == OrderDirection.ASC: - list_query = base_query.order_by(asc(getattr(folders_v2.c, order_by.field))) + list_query = combined_query.order_by(asc(getattr(folders_v2.c, order_by.field))) else: - list_query = base_query.order_by(desc(getattr(folders_v2.c, order_by.field))) + list_query = combined_query.order_by( + desc(getattr(folders_v2.c, order_by.field)) + ) list_query = list_query.offset(offset).limit(limit) async with pass_or_acquire_connection(get_asyncpg_engine(app), connection) as conn: total_count = await conn.scalar(count_query) result = await conn.stream(list_query) - folders: list[FolderDB] = [FolderDB.from_orm(row) async for row in result] + folders: list[UserFolderAccessRightsDB] = [ + UserFolderAccessRightsDB.from_orm(row) async for row in result + ] return cast(int, total_count), folders diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py index e4fffd82fc6..7050205bd7d 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py @@ -28,6 +28,7 @@ from ._exceptions_handlers import handle_plugin_requests_exceptions from ._models import ( FolderFilters, + FolderListFullSearchWithJsonStrQueryParams, FolderListWithJsonStrQueryParams, FoldersPathParams, FoldersRequestContext, @@ -99,6 +100,46 @@ async def list_folders(request: web.Request): ) +@routes.get(f"/{VTAG}/folders:search", name="list_folders_full_search") +@login_required +@permission_required("folder.read") +@handle_plugin_requests_exceptions +async def list_folders_full_search(request: web.Request): + req_ctx = FoldersRequestContext.parse_obj(request) + query_params: FolderListFullSearchWithJsonStrQueryParams = ( + parse_request_query_parameters_as( + FolderListFullSearchWithJsonStrQueryParams, request + ) + ) + + if not query_params.filters: + query_params.filters = FolderFilters() + + folders: FolderGetPage = await _folders_api.list_folders_full_search( + app=request.app, + user_id=req_ctx.user_id, + product_name=req_ctx.product_name, + trashed=query_params.filters.trashed, + offset=query_params.offset, + limit=query_params.limit, + order_by=parse_obj_as(OrderBy, query_params.order_by), + ) + + page = Page[FolderGet].parse_obj( + paginate_data( + chunk=folders.items, + request_url=request.url, + total=folders.total, + limit=query_params.limit, + offset=query_params.offset, + ) + ) + return web.Response( + text=page.json(**RESPONSE_MODEL_POLICY), + content_type=MIMETYPE_APPLICATION_JSON, + ) + + @routes.get(f"/{VTAG}/folders/{{folder_id}}", name="get_folder") @login_required @permission_required("folder.read") diff --git a/services/web/server/src/simcore_service_webserver/folders/_models.py b/services/web/server/src/simcore_service_webserver/folders/_models.py index fb337b5b199..5e48f46fa37 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_models.py +++ b/services/web/server/src/simcore_service_webserver/folders/_models.py @@ -33,9 +33,7 @@ class FolderFilters(Filters): ) -class FolderListWithJsonStrQueryParams( - PageQueryParameters, FiltersQueryParameters[FolderFilters] -): +class FolderListSortParams(BaseModel): # pylint: disable=unsubscriptable-object order_by: Json[OrderBy] = Field( default=OrderBy(field=IDStr("modified"), direction=OrderDirection.DESC), @@ -43,14 +41,6 @@ class FolderListWithJsonStrQueryParams( example='{"field": "name", "direction": "desc"}', alias="order_by", ) - folder_id: FolderID | None = Field( - default=None, - description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).", - ) - workspace_id: WorkspaceID | None = Field( - default=None, - description="List folders in specific workspace. By default, list in the user private workspace", - ) @validator("order_by", check_fields=False) @classmethod @@ -69,6 +59,22 @@ def _validate_order_by_field(cls, v): class Config: extra = Extra.forbid + +class FolderListWithJsonStrQueryParams( + PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters] +): + folder_id: FolderID | None = Field( + default=None, + description="List the subfolders of this folder. By default, list the subfolders of the root directory (Folder ID is None).", + ) + workspace_id: WorkspaceID | None = Field( + default=None, + description="List folders in specific workspace. By default, list in the user private workspace", + ) + + class Config: + extra = Extra.forbid + # validators _null_or_none_str_to_none_validator = validator( "folder_id", allow_reuse=True, pre=True @@ -79,6 +85,13 @@ class Config: )(null_or_none_str_to_none_validator) +class FolderListFullSearchWithJsonStrQueryParams( + PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters] +): + class Config: + extra = Extra.forbid + + class RemoveQueryParams(BaseModel): force: bool = Field( default=False, description="Force removal (even if resource is active)" diff --git a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py index a959843a969..fa0ab9dbab6 100644 --- a/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py +++ b/services/web/server/src/simcore_service_webserver/workspaces/_workspaces_db.py @@ -17,7 +17,6 @@ WorkspaceID, ) from pydantic import NonNegativeInt -from simcore_postgres_database.models.groups import user_to_groups from simcore_postgres_database.models.workspaces import workspaces from simcore_postgres_database.models.workspaces_access_rights import ( workspaces_access_rights, @@ -26,10 +25,12 @@ pass_or_acquire_connection, transaction_context, ) +from simcore_postgres_database.utils_workspaces_sql import ( + create_my_workspace_access_rights_subquery, +) from sqlalchemy import asc, desc, func -from sqlalchemy.dialects.postgresql import BOOLEAN, INTEGER from sqlalchemy.ext.asyncio import AsyncConnection -from sqlalchemy.sql import Subquery, select +from sqlalchemy.sql import select from ..db.plugin import get_asyncpg_engine from .errors import WorkspaceAccessForbiddenError, WorkspaceNotFoundError @@ -98,29 +99,6 @@ async def create_workspace( ).subquery("access_rights_subquery") -def _create_my_access_rights_subquery(user_id: UserID) -> Subquery: - return ( - select( - workspaces_access_rights.c.workspace_id, - func.json_build_object( - "read", - func.max(workspaces_access_rights.c.read.cast(INTEGER)).cast(BOOLEAN), - "write", - func.max(workspaces_access_rights.c.write.cast(INTEGER)).cast(BOOLEAN), - "delete", - func.max(workspaces_access_rights.c.delete.cast(INTEGER)).cast(BOOLEAN), - ).label("my_access_rights"), - ) - .select_from( - workspaces_access_rights.join( - user_to_groups, user_to_groups.c.gid == workspaces_access_rights.c.gid - ) - ) - .where(user_to_groups.c.uid == user_id) - .group_by(workspaces_access_rights.c.workspace_id) - ).subquery("my_access_rights_subquery") - - async def list_workspaces_for_user( app: web.Application, connection: AsyncConnection | None = None, @@ -131,7 +109,9 @@ async def list_workspaces_for_user( limit: NonNegativeInt, order_by: OrderBy, ) -> tuple[int, list[UserWorkspaceAccessRightsDB]]: - my_access_rights_subquery = _create_my_access_rights_subquery(user_id=user_id) + my_access_rights_subquery = create_my_workspace_access_rights_subquery( + user_id=user_id + ) base_query = ( select( @@ -175,7 +155,9 @@ async def get_workspace_for_user( workspace_id: WorkspaceID, product_name: ProductName, ) -> UserWorkspaceAccessRightsDB: - my_access_rights_subquery = _create_my_access_rights_subquery(user_id=user_id) + my_access_rights_subquery = create_my_workspace_access_rights_subquery( + user_id=user_id + ) base_query = ( select( diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py new file mode 100644 index 00000000000..b9da926543e --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py @@ -0,0 +1,123 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements + + +from http import HTTPStatus + +import pytest +from aiohttp.test_utils import TestClient +from models_library.api_schemas_webserver.folders_v2 import FolderGet +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import LoggedUser, UserInfoDict +from pytest_simcore.helpers.webserver_parametrizations import ( + ExpectedResponse, + standard_role_response, +) +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole +from simcore_service_webserver.projects.models import ProjectDict + + +@pytest.mark.parametrize(*standard_role_response(), ids=str) +async def test_folders_user_role_permissions( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: ExpectedResponse, +): + assert client.app + + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + await assert_status(resp, expected.ok) + + +@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) +async def test_folders_full_search( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, +): + assert client.app + + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert data == [] + + # create a new folder + url = client.app.router["create_folder"].url_for() + resp = await client.post(f"{url}", json={"name": "My first folder"}) + root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # create a subfolder folder + url = client.app.router["create_folder"].url_for() + resp = await client.post( + f"{url}", + json={ + "name": "My subfolder", + "parentFolderId": root_folder["folderId"], + }, + ) + subfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 2 + + # create a sub sub folder + url = client.app.router["create_folder"].url_for() + resp = await client.post( + f"{url}", + json={ + "name": "My sub sub folder", + "parentFolderId": subfolder_folder["folderId"], + }, + ) + subsubfolder_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # move sub sub folder to root folder + url = client.app.router["replace_folder"].url_for( + folder_id=f"{subsubfolder_folder['folderId']}" + ) + resp = await client.put( + f"{url}", + json={ + "name": "My Updated Folder", + "parentFolderId": None, + }, + ) + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert FolderGet.parse_obj(data) + + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 3 + + # Create new user + async with LoggedUser(client) as new_logged_user: + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert data == [] + + # create a new folder + url = client.app.router["create_folder"].url_for() + resp = await client.post(f"{url}", json={"name": "New user folder"}) + new_user_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 1 diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py new file mode 100644 index 00000000000..3cfc1a78842 --- /dev/null +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__list_folders_full_search.py @@ -0,0 +1,65 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +# pylint: disable=too-many-arguments +# pylint: disable=too-many-statements + + +from http import HTTPStatus + +import pytest +from aiohttp.test_utils import TestClient +from pytest_simcore.helpers.assert_checks import assert_status +from pytest_simcore.helpers.webserver_login import UserInfoDict +from servicelib.aiohttp import status +from simcore_service_webserver.db.models import UserRole + + +@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) +async def test_workspaces__list_folders_full_search( + client: TestClient, + logged_user: UserInfoDict, + expected: HTTPStatus, + workspaces_clean_db: None, +): + assert client.app + + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert data == [] + + # create a new folder + url = client.app.router["create_folder"].url_for() + resp = await client.post(f"{url}", json={"name": "My first folder"}) + root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 1 + + # create a new workspace + url = client.app.router["create_workspace"].url_for() + resp = await client.post( + url.path, + json={ + "name": "My first workspace", + "description": "Custom description", + "thumbnail": None, + }, + ) + added_workspace, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # create a folder + url = client.app.router["create_folder"].url_for() + resp = await client.post(url.path, json={"name": "My first folder"}) + root_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # list full folder search + url = client.app.router["list_folders_full_search"].url_for() + resp = await client.get(f"{url}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 2 From 0981f491d19c3317016324908787f7f4f5bc060b Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Tue, 12 Nov 2024 15:52:35 +0100 Subject: [PATCH 11/22] =?UTF-8?q?=F0=9F=8E=A8Maintenance:=20make=20redis?= =?UTF-8?q?=20client=20use=20the=20client=20name=20to=20ease=20debugging?= =?UTF-8?q?=20(#6700)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../service-library/src/servicelib/redis.py | 5 +- packages/service-library/tests/conftest.py | 9 ++- .../tests/deferred_tasks/example_app.py | 2 + .../test__base_deferred_handler.py | 1 + packages/service-library/tests/test_pools.py | 29 +++++--- packages/service-library/tests/test_redis.py | 6 +- ...onection.py => test_redis__reconection.py} | 6 +- packages/service-library/tests/test_utils.py | 15 ++-- .../modules/redis.py | 6 +- .../modules/clusters_management_task.py | 3 +- .../modules/redis.py | 5 +- .../modules/comp_scheduler/_task.py | 3 +- .../modules/redis.py | 2 + .../unit/test_utils_distributed_identifier.py | 2 +- services/director/requirements/_tools.txt | 68 ------------------- .../services/redis.py | 3 + services/dynamic-scheduler/tests/conftest.py | 4 +- .../services/modules/redis.py | 6 +- .../_meta.py | 2 +- .../services/modules/redis.py | 6 +- .../src/simcore_service_storage/redis.py | 5 +- .../src/simcore_service_webserver/redis.py | 2 + 22 files changed, 86 insertions(+), 104 deletions(-) rename packages/service-library/tests/{test_redis__recoonection.py => test_redis__reconection.py} (88%) diff --git a/packages/service-library/src/servicelib/redis.py b/packages/service-library/src/servicelib/redis.py index 03847ae0b04..7bbb8b2b71b 100644 --- a/packages/service-library/src/servicelib/redis.py +++ b/packages/service-library/src/servicelib/redis.py @@ -60,6 +60,7 @@ async def _cancel_or_warn(task: Task) -> None: @dataclass class RedisClientSDK: redis_dsn: str + client_name: str decode_responses: bool = _DEFAULT_DECODE_RESPONSES health_check_interval: datetime.timedelta = _DEFAULT_HEALTH_CHECK_INTERVAL @@ -86,7 +87,7 @@ def __post_init__(self): socket_connect_timeout=_DEFAULT_SOCKET_TIMEOUT.total_seconds(), encoding="utf-8", decode_responses=self.decode_responses, - auto_close_connection_pool=True, + client_name=self.client_name, ) @retry(**RedisRetryPolicyUponInitialization(_logger).kwargs) @@ -238,6 +239,7 @@ class RedisClientsManager: databases_configs: set[RedisManagerDBConfig] settings: RedisSettings + client_name: str _client_sdks: dict[RedisDatabase, RedisClientSDK] = field(default_factory=dict) @@ -247,6 +249,7 @@ async def setup(self) -> None: redis_dsn=self.settings.build_redis_dsn(config.database), decode_responses=config.decode_responses, health_check_interval=config.health_check_interval, + client_name=f"{self.client_name}", ) for client in self._client_sdks.values(): diff --git a/packages/service-library/tests/conftest.py b/packages/service-library/tests/conftest.py index 927ff75477f..7527ee67a14 100644 --- a/packages/service-library/tests/conftest.py +++ b/packages/service-library/tests/conftest.py @@ -80,9 +80,12 @@ async def _( database: RedisDatabase, decode_response: bool = True # noqa: FBT002 ) -> AsyncIterator[RedisClientSDK]: redis_resources_dns = redis_service.build_redis_dsn(database) - client = RedisClientSDK(redis_resources_dns, decode_responses=decode_response) + client = RedisClientSDK( + redis_resources_dns, decode_responses=decode_response, client_name="pytest" + ) assert client assert client.redis_dsn == redis_resources_dns + assert client.client_name == "pytest" await client.setup() yield client @@ -94,7 +97,9 @@ async def _cleanup_redis_data(clients_manager: RedisClientsManager) -> None: await clients_manager.client(db).redis.flushall() async with RedisClientsManager( - {RedisManagerDBConfig(db) for db in RedisDatabase}, redis_service + {RedisManagerDBConfig(db) for db in RedisDatabase}, + redis_service, + client_name="pytest", ) as clients_manager: await _cleanup_redis_data(clients_manager) yield _ diff --git a/packages/service-library/tests/deferred_tasks/example_app.py b/packages/service-library/tests/deferred_tasks/example_app.py index 0ba848178d8..61450a9cb16 100644 --- a/packages/service-library/tests/deferred_tasks/example_app.py +++ b/packages/service-library/tests/deferred_tasks/example_app.py @@ -60,6 +60,7 @@ def __init__(self, redis_settings: RedisSettings, port: int) -> None: self.redis: Redis = RedisClientSDK( redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), decode_responses=True, + client_name="example_app", ).redis self.port = port @@ -84,6 +85,7 @@ def __init__( self._redis_client = RedisClientSDK( redis_settings.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), decode_responses=False, + client_name="example_app", ) self._manager = DeferredManager( rabbit_settings, diff --git a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py index a5b45ed80d9..3aa5b53e7f5 100644 --- a/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py +++ b/packages/service-library/tests/deferred_tasks/test__base_deferred_handler.py @@ -55,6 +55,7 @@ async def redis_client_sdk( sdk = RedisClientSDK( redis_service.build_redis_dsn(RedisDatabase.DEFERRED_TASKS), decode_responses=False, + client_name="pytest", ) await sdk.setup() yield sdk diff --git a/packages/service-library/tests/test_pools.py b/packages/service-library/tests/test_pools.py index 13c62ad0a3a..1604ba10147 100644 --- a/packages/service-library/tests/test_pools.py +++ b/packages/service-library/tests/test_pools.py @@ -1,4 +1,4 @@ -from asyncio import BaseEventLoop +import asyncio from concurrent.futures import ProcessPoolExecutor from servicelib.pools import ( @@ -11,17 +11,25 @@ def return_int_one() -> int: return 1 -async def test_default_thread_pool_executor(event_loop: BaseEventLoop) -> None: - assert await event_loop.run_in_executor(None, return_int_one) == 1 +async def test_default_thread_pool_executor() -> None: + assert await asyncio.get_running_loop().run_in_executor(None, return_int_one) == 1 -async def test_blocking_process_pool_executor(event_loop: BaseEventLoop) -> None: - assert await event_loop.run_in_executor(ProcessPoolExecutor(), return_int_one) == 1 +async def test_blocking_process_pool_executor() -> None: + assert ( + await asyncio.get_running_loop().run_in_executor( + ProcessPoolExecutor(), return_int_one + ) + == 1 + ) -async def test_non_blocking_process_pool_executor(event_loop: BaseEventLoop) -> None: +async def test_non_blocking_process_pool_executor() -> None: with non_blocking_process_pool_executor() as executor: - assert await event_loop.run_in_executor(executor, return_int_one) == 1 + assert ( + await asyncio.get_running_loop().run_in_executor(executor, return_int_one) + == 1 + ) async def test_same_pool_instances() -> None: @@ -36,9 +44,12 @@ async def test_different_pool_instances() -> None: assert first != second -async def test_non_blocking_thread_pool_executor(event_loop: BaseEventLoop) -> None: +async def test_non_blocking_thread_pool_executor() -> None: with non_blocking_thread_pool_executor() as executor: - assert await event_loop.run_in_executor(executor, return_int_one) == 1 + assert ( + await asyncio.get_running_loop().run_in_executor(executor, return_int_one) + == 1 + ) async def test_same_thread_pool_instances() -> None: diff --git a/packages/service-library/tests/test_redis.py b/packages/service-library/tests/test_redis.py index 7a3fa9b52d6..c120f85d344 100644 --- a/packages/service-library/tests/test_redis.py +++ b/packages/service-library/tests/test_redis.py @@ -277,7 +277,9 @@ async def test_redis_client_sdks_manager( RedisManagerDBConfig(db) for db in RedisDatabase } manager = RedisClientsManager( - databases_configs=all_redis_configs, settings=redis_service + databases_configs=all_redis_configs, + settings=redis_service, + client_name="pytest", ) async with manager: @@ -290,7 +292,7 @@ async def test_redis_client_sdk_setup_shutdown( ): # setup redis_resources_dns = redis_service.build_redis_dsn(RedisDatabase.RESOURCES) - client = RedisClientSDK(redis_resources_dns) + client = RedisClientSDK(redis_resources_dns, client_name="pytest") assert client assert client.redis_dsn == redis_resources_dns diff --git a/packages/service-library/tests/test_redis__recoonection.py b/packages/service-library/tests/test_redis__reconection.py similarity index 88% rename from packages/service-library/tests/test_redis__recoonection.py rename to packages/service-library/tests/test_redis__reconection.py index 89902a4b66e..8fe5a718527 100644 --- a/packages/service-library/tests/test_redis__recoonection.py +++ b/packages/service-library/tests/test_redis__reconection.py @@ -21,9 +21,9 @@ async def test_redis_client_sdk_lost_connection( docker_client: docker.client.DockerClient, ): redis_client_sdk = RedisClientSDK( - redis_service.build_redis_dsn(RedisDatabase.RESOURCES) + redis_service.build_redis_dsn(RedisDatabase.RESOURCES), client_name="pytest" ) - + assert redis_client_sdk.client_name == "pytest" await redis_client_sdk.setup() assert await redis_client_sdk.ping() is True @@ -41,3 +41,5 @@ async def test_redis_client_sdk_lost_connection( ): with attempt: assert await redis_client_sdk.ping() is False + + await redis_client_sdk.shutdown() diff --git a/packages/service-library/tests/test_utils.py b/packages/service-library/tests/test_utils.py index 7bfcd4cee69..ebcad03b031 100644 --- a/packages/service-library/tests/test_utils.py +++ b/packages/service-library/tests/test_utils.py @@ -5,7 +5,6 @@ import asyncio from collections.abc import AsyncIterator, Awaitable, Coroutine, Iterator from copy import copy, deepcopy -from random import randint from typing import NoReturn from unittest import mock @@ -66,7 +65,6 @@ def mock_logger(mocker: MockerFixture) -> Iterator[mock.Mock]: async def test_logged_gather( - event_loop: asyncio.AbstractEventLoop, coros: list[Coroutine], mock_logger: mock.Mock, ): @@ -79,7 +77,7 @@ async def test_logged_gather( # NOTE: only first error in the list is raised, since it is not RuntimeError, that task assert isinstance(excinfo.value, ValueError) - for task in asyncio.all_tasks(event_loop): + for task in asyncio.all_tasks(asyncio.get_running_loop()): if task is not asyncio.current_task(): # info task.print_stack() @@ -148,7 +146,7 @@ async def test_fire_and_forget_1000s_tasks(faker: Faker): tasks_collection = set() async def _some_task(n: int) -> str: - await asyncio.sleep(randint(1, 3)) + await asyncio.sleep(faker.random_int(1, 3)) return f"I'm great since I slept a bit, and by the way I'm task {n}" for n in range(1000): @@ -251,7 +249,6 @@ async def test_limited_gather_limits( async def test_limited_gather( - event_loop: asyncio.AbstractEventLoop, coros: list[Coroutine], mock_logger: mock.Mock, ): @@ -266,7 +263,7 @@ async def test_limited_gather( unfinished_tasks = [ task - for task in asyncio.all_tasks(event_loop) + for task in asyncio.all_tasks(asyncio.get_running_loop()) if task is not asyncio.current_task() ] final_results = await asyncio.gather(*unfinished_tasks, return_exceptions=True) @@ -288,9 +285,7 @@ async def test_limited_gather_wo_raising( assert results[5] == 5 -async def test_limited_gather_cancellation( - event_loop: asyncio.AbstractEventLoop, slow_successful_coros_list: list[Coroutine] -): +async def test_limited_gather_cancellation(slow_successful_coros_list: list[Coroutine]): task = asyncio.create_task(limited_gather(*slow_successful_coros_list, limit=0)) await asyncio.sleep(3) task.cancel() @@ -300,7 +295,7 @@ async def test_limited_gather_cancellation( # check all coros are cancelled unfinished_tasks = [ task - for task in asyncio.all_tasks(event_loop) + for task in asyncio.all_tasks(asyncio.get_running_loop()) if task is not asyncio.current_task() ] assert not unfinished_tasks diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py index 29fed9c6b97..60ce15df956 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/redis.py @@ -5,6 +5,8 @@ from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase, RedisSettings +from .._meta import APP_NAME + logger = logging.getLogger(__name__) @@ -13,7 +15,9 @@ async def on_startup() -> None: app.state.redis_client_sdk = None settings: RedisSettings = app.state.settings.AUTOSCALING_REDIS redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS) - app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn) + app.state.redis_client_sdk = client = RedisClientSDK( + redis_locks_dsn, client_name=APP_NAME + ) await client.setup() async def on_shutdown() -> None: diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py index 410edba1efb..d2e8f6e4c6f 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/clusters_management_task.py @@ -6,6 +6,7 @@ from servicelib.background_task import start_periodic_task, stop_periodic_task from servicelib.redis_utils import exclusive +from .._meta import APP_NAME from ..core.settings import ApplicationSettings from ..modules.redis import get_redis_client from .clusters_management_core import check_clusters @@ -19,7 +20,7 @@ def on_app_startup(app: FastAPI) -> Callable[[], Awaitable[None]]: async def _startup() -> None: app_settings: ApplicationSettings = app.state.settings - lock_key = f"{app.title}:clusters-management_lock" + lock_key = f"{APP_NAME}:clusters-management_lock" lock_value = json.dumps({}) app.state.clusters_cleaning_task = start_periodic_task( exclusive(get_redis_client(app), lock_key=lock_key, lock_value=lock_value)( diff --git a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py index 08f0ff54f73..a0a0d6a8745 100644 --- a/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py +++ b/services/clusters-keeper/src/simcore_service_clusters_keeper/modules/redis.py @@ -5,6 +5,7 @@ from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase, RedisSettings +from .._meta import APP_NAME from ..core.settings import get_application_settings logger = logging.getLogger(__name__) @@ -15,7 +16,9 @@ async def on_startup() -> None: app.state.redis_client_sdk = None settings: RedisSettings = get_application_settings(app).CLUSTERS_KEEPER_REDIS redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS) - app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn) + app.state.redis_client_sdk = client = RedisClientSDK( + redis_locks_dsn, client_name=APP_NAME + ) await client.setup() async def on_shutdown() -> None: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py index 0e1c79ff8b6..989b310687c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py @@ -10,6 +10,7 @@ from servicelib.redis_utils import exclusive from settings_library.redis import RedisDatabase +from ..._meta import APP_NAME from . import _scheduler_factory _logger = logging.getLogger(__name__) @@ -26,7 +27,7 @@ async def start_scheduler() -> None: _logger, level=logging.INFO, msg="starting computational scheduler" ): redis_clients_manager: RedisClientsManager = app.state.redis_clients_manager - lock_key = f"{app.title}:computational_scheduler" + lock_key = f"{APP_NAME}:computational_scheduler" app.state.scheduler = scheduler = await _scheduler_factory.create_from_db( app ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/redis.py b/services/director-v2/src/simcore_service_director_v2/modules/redis.py index 7cb6f86cc82..e7da01afef7 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/redis.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/redis.py @@ -2,6 +2,7 @@ from servicelib.redis import RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase +from .._meta import APP_NAME from ..core.settings import AppSettings @@ -18,6 +19,7 @@ async def on_startup() -> None: ) }, settings=settings.REDIS, + client_name=APP_NAME, ) await redis_clients_manager.setup() diff --git a/services/director-v2/tests/unit/test_utils_distributed_identifier.py b/services/director-v2/tests/unit/test_utils_distributed_identifier.py index ce200feef97..8c316876a9c 100644 --- a/services/director-v2/tests/unit/test_utils_distributed_identifier.py +++ b/services/director-v2/tests/unit/test_utils_distributed_identifier.py @@ -171,7 +171,7 @@ async def redis_client_sdk( RedisDatabase.DISTRIBUTED_IDENTIFIERS ) - client = RedisClientSDK(redis_resources_dns) + client = RedisClientSDK(redis_resources_dns, client_name="pytest") assert client assert client.redis_dsn == redis_resources_dns await client.setup() diff --git a/services/director/requirements/_tools.txt b/services/director/requirements/_tools.txt index 24945ba6807..e69de29bb2d 100644 --- a/services/director/requirements/_tools.txt +++ b/services/director/requirements/_tools.txt @@ -1,68 +0,0 @@ -# -# This file is autogenerated by pip-compile with python 3.6 -# To update, run: -# -# pip-compile --output-file=requirements/_tools.txt --strip-extras requirements/_tools.in -# -appdirs==1.4.4 - # via black -black==20.8b1 - # via -r requirements/_tools.in -bump2version==1.0.1 - # via -r requirements/_tools.in -click==8.0.3 - # via - # black - # pip-tools -dataclasses==0.7 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # black -importlib-metadata==2.0.0 - # via - # -c requirements/_test.txt - # click - # pep517 -mypy-extensions==0.4.3 - # via black -pathspec==0.9.0 - # via black -pep517==0.12.0 - # via pip-tools -pip==24.3.1 - # via pip-tools -pip-tools==6.4.0 - # via -r requirements/_tools.in -pyyaml==5.4 - # via - # -c requirements/_base.txt - # -c requirements/_test.txt - # watchdog -regex==2022.1.18 - # via black -toml==0.10.2 - # via - # -c requirements/_test.txt - # black -tomli==1.2.3 - # via pep517 -typed-ast==1.4.1 - # via - # -c requirements/_test.txt - # black -typing-extensions==4.0.1 - # via black -watchdog==2.1.6 - # via -r requirements/_tools.in -wheel==0.37.1 - # via pip-tools -zipp==3.4.0 - # via - # -c requirements/_test.txt - # importlib-metadata - # pep517 - -# The following packages are considered to be unsafe in a requirements file: -# pip -# setuptools diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py index 84131eaf54b..ff7d53920bf 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/redis.py @@ -4,6 +4,8 @@ from servicelib.redis import RedisClientSDK, RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase, RedisSettings +from .._meta import APP_NAME + _DECODE_DBS: Final[set[RedisDatabase]] = { RedisDatabase.LOCKS, } @@ -24,6 +26,7 @@ async def on_startup() -> None: {RedisManagerDBConfig(x, decode_responses=False) for x in _BINARY_DBS} | {RedisManagerDBConfig(x, decode_responses=True) for x in _DECODE_DBS}, settings, + client_name=APP_NAME, ) await manager.setup() diff --git a/services/dynamic-scheduler/tests/conftest.py b/services/dynamic-scheduler/tests/conftest.py index 8b672b0408e..ae2e723708e 100644 --- a/services/dynamic-scheduler/tests/conftest.py +++ b/services/dynamic-scheduler/tests/conftest.py @@ -135,7 +135,9 @@ async def app( @pytest.fixture async def remove_redis_data(redis_service: RedisSettings) -> None: async with RedisClientsManager( - {RedisManagerDBConfig(x) for x in RedisDatabase}, redis_service + {RedisManagerDBConfig(x) for x in RedisDatabase}, + redis_service, + client_name="pytest", ) as manager: await logged_gather( *[manager.client(d).redis.flushall() for d in RedisDatabase] diff --git a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py index 20cbcc0a4db..4876e5b8b21 100644 --- a/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py +++ b/services/efs-guardian/src/simcore_service_efs_guardian/services/modules/redis.py @@ -5,6 +5,8 @@ from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase, RedisSettings +from ..._meta import APP_NAME + logger = logging.getLogger(__name__) @@ -13,7 +15,9 @@ async def on_startup() -> None: app.state.redis_lock_client_sdk = None settings: RedisSettings = app.state.settings.EFS_GUARDIAN_REDIS redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS) - app.state.redis_lock_client_sdk = lock_client = RedisClientSDK(redis_locks_dsn) + app.state.redis_lock_client_sdk = lock_client = RedisClientSDK( + redis_locks_dsn, client_name=APP_NAME + ) await lock_client.setup() async def on_shutdown() -> None: diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py index 63e86cce819..ceb639ddcc9 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/_meta.py @@ -19,7 +19,7 @@ API_VERSION: Final[VersionStr] = info.__version__ API_VTAG: Final[VersionTag] = parse_obj_as(VersionTag, info.api_prefix_path_tag) SUMMARY: Final[str] = info.get_summary() - +APP_NAME: Final[str] = PROJECT_NAME # NOTE: https://texteditor.com/ascii-frames/ APP_STARTED_BANNER_MSG = r""" diff --git a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py index 922b0e7e49e..0aece119077 100644 --- a/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py +++ b/services/resource-usage-tracker/src/simcore_service_resource_usage_tracker/services/modules/redis.py @@ -5,6 +5,8 @@ from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase, RedisSettings +from ..._meta import APP_NAME + logger = logging.getLogger(__name__) @@ -13,7 +15,9 @@ async def on_startup() -> None: app.state.redis_client_sdk = None settings: RedisSettings = app.state.settings.RESOURCE_USAGE_TRACKER_REDIS redis_locks_dsn = settings.build_redis_dsn(RedisDatabase.LOCKS) - app.state.redis_client_sdk = client = RedisClientSDK(redis_locks_dsn) + app.state.redis_client_sdk = client = RedisClientSDK( + redis_locks_dsn, client_name=APP_NAME + ) await client.setup() async def on_shutdown() -> None: diff --git a/services/storage/src/simcore_service_storage/redis.py b/services/storage/src/simcore_service_storage/redis.py index 2380bd332dc..f18f891ec19 100644 --- a/services/storage/src/simcore_service_storage/redis.py +++ b/services/storage/src/simcore_service_storage/redis.py @@ -5,6 +5,7 @@ from servicelib.redis import RedisClientSDK from settings_library.redis import RedisDatabase, RedisSettings +from ._meta import APP_NAME from .constants import APP_CONFIG_KEY from .settings import Settings @@ -20,7 +21,9 @@ async def _setup(app: web.Application): assert settings.STORAGE_REDIS # nosec redis_settings: RedisSettings = settings.STORAGE_REDIS redis_locks_dsn = redis_settings.build_redis_dsn(RedisDatabase.LOCKS) - app[_APP_REDIS_KEY] = client = RedisClientSDK(redis_locks_dsn) + app[_APP_REDIS_KEY] = client = RedisClientSDK( + redis_locks_dsn, client_name=APP_NAME + ) await client.setup() yield diff --git a/services/web/server/src/simcore_service_webserver/redis.py b/services/web/server/src/simcore_service_webserver/redis.py index deee93f1fbd..1a1427cc09c 100644 --- a/services/web/server/src/simcore_service_webserver/redis.py +++ b/services/web/server/src/simcore_service_webserver/redis.py @@ -7,6 +7,7 @@ from settings_library.redis import RedisDatabase, RedisSettings from ._constants import APP_SETTINGS_KEY +from ._meta import APP_NAME _logger = logging.getLogger(__name__) @@ -44,6 +45,7 @@ async def setup_redis_client(app: web.Application): ) }, settings=redis_settings, + client_name=APP_NAME, ) await manager.setup() From 819df89656bb0c9576234512324b783475f70b4c Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Tue, 12 Nov 2024 17:02:44 +0100 Subject: [PATCH 12/22] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Make=20the=20?= =?UTF-8?q?Light=20Theme=20less=20white=20(#6681)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/Application.js | 9 +- .../source/class/osparc/auth/LoginPageS4L.js | 2 +- .../class/osparc/desktop/SlideshowView.js | 2 +- .../class/osparc/desktop/WorkbenchView.js | 26 ++--- .../class/osparc/node/BootOptionsView.js | 5 - .../source/class/osparc/node/LifeCycleView.js | 5 - .../osparc/node/UpdateResourceLimitsView.js | 5 - .../notification/RibbonNotifications.js | 5 +- .../source/class/osparc/theme/ColorDark.js | 81 +++++++------- .../source/class/osparc/theme/ColorLight.js | 100 +++++++++--------- .../source/class/osparc/widget/NodeOptions.js | 9 -- .../osparc/workbench/DiskUsageIndicator.js | 2 +- 12 files changed, 100 insertions(+), 151 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/Application.js b/services/static-webserver/client/source/class/osparc/Application.js index c5f760188e5..0b18c01bd22 100644 --- a/services/static-webserver/client/source/class/osparc/Application.js +++ b/services/static-webserver/client/source/class/osparc/Application.js @@ -222,9 +222,6 @@ qx.Class.define("osparc.Application", { __setDeviceSpecificIcons: function() { const isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent) && !window.MSStream; const isAndroid = /android/i.test(navigator.userAgent); - const isWindows = /windows/i.test(navigator.userAgent); - // const productColor = qx.theme.manager.Color.getInstance().resolve("product-color"); - // const backgroundColor = qx.theme.manager.Color.getInstance().resolve("primary-background-color"); // default icons this.__updateMetaTags(); this.__setDefaultIcons() @@ -232,8 +229,6 @@ qx.Class.define("osparc.Application", { this.__setIOSpIcons(); } else if (isAndroid) { this.__setGoogleIcons(); - } else if (isWindows) { - // this.__updateBrowserConfig(this.__getProductMetaData().productColor); } }, @@ -246,16 +241,14 @@ qx.Class.define("osparc.Application", { } const productColor = qx.theme.manager.Color.getInstance().resolve("product-color"); - const backgroundColor = qx.theme.manager.Color.getInstance().resolve("primary-background-color"); return { productName: productName, productColor: productColor, - backgroundColor: backgroundColor } }, __updateMetaTags: function() { - // check device type and only set the icons for the divice type + // check device type and only set the icons for the device type // i.e iOS, Android or windows etc const themeColorMeta = document.querySelector("meta[name='theme-color']"); const tileColorMeta = document.querySelector("meta[name='msapplication-TileColor']"); diff --git a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js index 8118ecdc9f8..1e7cf123b37 100644 --- a/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js +++ b/services/static-webserver/client/source/class/osparc/auth/LoginPageS4L.js @@ -29,7 +29,7 @@ qx.Class.define("osparc.auth.LoginPageS4L", { const layout = new qx.ui.layout.HBox(); this._setLayout(layout); - this.setBackgroundColor("primary-background-color"); + this.setBackgroundColor("rgba(0, 20, 46, 1)"); this._removeAll(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js index 593088bc4cd..e05a37f56a1 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/SlideshowView.js @@ -24,7 +24,7 @@ qx.Class.define("osparc.desktop.SlideshowView", { this._setLayout(new qx.ui.layout.VBox()); const slideshowToolbar = this.__slideshowToolbar = new osparc.desktop.SlideshowToolbar().set({ - backgroundColor: "tab_navigation_bar_background_color" + backgroundColor: "workbench-view-navbar" }); const collapseWithUserMenu = this.__collapseWithUserMenu = new osparc.desktop.CollapseWithUserMenu(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js index 1daeea1c0f1..accb850ab5d 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/static-webserver/client/source/class/osparc/desktop/WorkbenchView.js @@ -40,16 +40,10 @@ qx.Class.define("osparc.desktop.WorkbenchView", { TAB_BUTTON_HEIGHT: 46, decorateSplitter: function(splitter) { - const colorManager = qx.theme.manager.Color.getInstance(); - const binaryColor = osparc.utils.Utils.getRoundedBinaryColor(colorManager.resolve("background-main")); splitter.set({ width: 2, - backgroundColor: binaryColor + backgroundColor: "workbench-view-splitter" }); - colorManager.addListener("changeTheme", () => { - const newBinaryColor = osparc.utils.Utils.getRoundedBinaryColor(colorManager.resolve("background-main")); - splitter.setBackgroundColor(newBinaryColor); - }, this); }, decorateSlider: function(slider) { @@ -202,7 +196,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", { control = new qx.ui.tabview.TabView().set({ contentPadding: osparc.widget.CollapsibleViewLight.CARET_WIDTH + 2, // collapse bar + padding contentPaddingRight: 2, - backgroundColor: this.self().PRIMARY_COL_BG_COLOR, barPosition: "top" }); const collapsibleViewLeft = this.getChildControl("collapsible-view-left"); @@ -322,7 +315,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { const topBar = tabViewPrimary.getChildControl("bar"); topBar.set({ height: this.self().TAB_BUTTON_HEIGHT, - backgroundColor: "tab_navigation_bar_background_color" + backgroundColor: "workbench-view-navbar" }); this.__addTopBarSpacer(topBar); @@ -392,7 +385,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { const topBar = tabViewSecondary.getChildControl("bar"); topBar.set({ height: this.self().TAB_BUTTON_HEIGHT, - backgroundColor: "tab_navigation_bar_background_color" + backgroundColor: "workbench-view-navbar" }); this.__addTopBarSpacer(topBar); @@ -427,7 +420,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { topBar.set({ height: this.self().TAB_BUTTON_HEIGHT, alignY: "top", - backgroundColor: "tab_navigation_bar_background_color" + backgroundColor: "workbench-view-navbar" }); this.__addTopBarSpacer(topBar); @@ -483,7 +476,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { __addTopBarSpacer: function(tabViewTopBar) { const spacer = new qx.ui.core.Widget().set({ - backgroundColor: "tab_navigation_bar_background_color" + backgroundColor: "workbench-view-navbar" }); tabViewTopBar.add(spacer, { flex: 1 @@ -492,7 +485,7 @@ qx.Class.define("osparc.desktop.WorkbenchView", { __createCollapsibleViewSpacer: function() { const spacer = new qx.ui.core.Widget().set({ - backgroundColor: "tab_navigation_bar_background_color", + backgroundColor: "workbench-view-navbar", height: this.self().TAB_BUTTON_HEIGHT }); return spacer; @@ -1079,13 +1072,6 @@ qx.Class.define("osparc.desktop.WorkbenchView", { const nodeOptions = new osparc.widget.NodeOptions(node); nodeOptions.buildLayout(); - [ - "versionChanged", - "bootModeChanged", - "limitsChanged" - ].forEach(eventName => { - nodeOptions.addListener(eventName, () => this.__populateSecondaryColumn(node)); - }); return nodeOptions; }, diff --git a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js index a6c38981dff..291c028422d 100644 --- a/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js +++ b/services/static-webserver/client/source/class/osparc/node/BootOptionsView.js @@ -18,10 +18,6 @@ qx.Class.define("osparc.node.BootOptionsView", { extend: osparc.node.ServiceOptionsView, - events: { - "bootModeChanged": "qx.event.type.Event" - }, - members: { _applyNode: function(node) { if (node.hasBootModes()) { @@ -61,7 +57,6 @@ qx.Class.define("osparc.node.BootOptionsView", { setTimeout(() => { buttonsLayout.setEnabled(true); node.requestStartNode(); - this.fireEvent("bootModeChanged"); }, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL); } }, this); diff --git a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js index 2cdfb2c1f74..5f810b18799 100644 --- a/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js +++ b/services/static-webserver/client/source/class/osparc/node/LifeCycleView.js @@ -18,10 +18,6 @@ qx.Class.define("osparc.node.LifeCycleView", { extend: osparc.node.ServiceOptionsView, - events: { - "versionChanged": "qx.event.type.Event" - }, - members: { _applyNode: function(node) { if (node.isUpdatable() || node.isDeprecated() || node.isRetired()) { @@ -125,7 +121,6 @@ qx.Class.define("osparc.node.LifeCycleView", { setTimeout(() => { updateButton.setFetching(false); node.requestStartNode(); - this.fireEvent("versionChanged"); }, osparc.desktop.StudyEditor.AUTO_SAVE_INTERVAL); }); diff --git a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js index f6770a7e675..3c75815c296 100644 --- a/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js +++ b/services/static-webserver/client/source/class/osparc/node/UpdateResourceLimitsView.js @@ -18,10 +18,6 @@ qx.Class.define("osparc.node.UpdateResourceLimitsView", { extend: osparc.node.ServiceOptionsView, - events: { - "limitsChanged": "qx.event.type.Event" - }, - members: { __resourceFields: null, __saveBtn: null, @@ -159,7 +155,6 @@ qx.Class.define("osparc.node.UpdateResourceLimitsView", { osparc.data.Resources.fetch("nodesInStudyResources", "put", params) .then(() => { osparc.FlashMessenger.getInstance().logAs(this.tr("Limits successfully updated")); - this.fireEvent("limitsChanged"); }) .catch(err => { console.error(err); diff --git a/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js b/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js index 1cbe3b5f7ea..b2ea90b2b8e 100644 --- a/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js +++ b/services/static-webserver/client/source/class/osparc/notification/RibbonNotifications.js @@ -97,15 +97,14 @@ qx.Class.define("osparc.notification.RibbonNotifications", { if (notification.getType() === "announcement") { const dontShowButton = new qx.ui.form.Button(this.tr("Don't show again")).set({ - backgroundColor: "transparent", - textColor: "strong-text", + appearance: "strong-button", alignY: "middle", padding: 4, allowGrowX: false, allowGrowY: false, marginLeft: 15 }); - osparc.utils.Utils.addBorder(dontShowButton, 1, qx.theme.manager.Color.getInstance().resolve("strong-text")); + osparc.utils.Utils.addBorder(dontShowButton, 1, qx.theme.manager.Color.getInstance().resolve("text")); dontShowButton.addListener("tap", () => { this.removeNotification(notification); osparc.utils.Utils.localCache.setDontShowAnnouncement(notification.announcementId); diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js index ca275a2371d..fda2ccd25a4 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorDark.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorDark.js @@ -2,29 +2,27 @@ qx.Theme.define("osparc.theme.ColorDark", { include: osparc.theme.mixin.Color, colors: { + // 105-0 "c00": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105), - "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 100), - "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 95), - "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 85), - "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 80), - "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 70), - "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 55), - "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 45), - "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 35), - "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 30), - "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 25), - "c11": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 20), - "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 15), - "c13": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 8), - "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0), + "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-5), + "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-10), + "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-20), + "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-25), + "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-35), + "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-50), + "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-60), + "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-70), + "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-75), + "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-80), + "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-90), + "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105-105), "product-color": "rgba(0, 144, 208, 1)", // override in product "strong-main": "product-color", - "strong-text": "rgba(255, 255, 255, 1)", "a-bit-transparent": "rgba(0, 0, 0, 0.4)", // main - "background-main": "#222", + "background-main": "c01", "background-main-1": "c02", "background-main-2": "c03", "background-main-3": "c04", @@ -34,10 +32,7 @@ qx.Theme.define("osparc.theme.ColorDark", { "background-card-overlay": "rgba(25, 33, 37, 0.8)", "background-workspace-card-overlay": "rgb(35, 93, 122)", - "primary-background-color": "rgba(0, 20, 46, 1)", "navigation_bar_background_color": "rgba(1, 18, 26, 0.8)", - "tab_navigation_bar_background_color": "c00", - "modal-backdrop": "rgba(8, 9, 13, 1)", "fab_text": "contrasted-text-dark", "fab-background": "rgba(255, 255, 255, 0.2)", "input_background": "#213248", @@ -58,18 +53,18 @@ qx.Theme.define("osparc.theme.ColorDark", { "link": "rgba(10, 182, 255, 1)", // shadows - "bg-shadow": "background-main-5", - "box-shadow": "rgba(0,0,0, 0.15)", + "bg-shadow": "background-main-5", + "box-shadow": "rgba(0, 0, 0, 0.15)", "shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow", // window "window-popup-background": "rgba(66, 66, 66, 1)", "window-caption-background": "background-main", - "window-caption-background-active": "background-main-3", + "window-caption-background-active": "background-main-3", "window-caption-text": "text", "window-caption-text-active": "c12", - "window-border": "background-main-2", - "window-border-inner": "background-main-1", + "window-border": "background-main-2", + "window-border-inner": "background-main-1", // material-button "material-button-background": "fab-background", @@ -88,10 +83,10 @@ qx.Theme.define("osparc.theme.ColorDark", { // backgrounds "background-selected": "default-button-background", - "background-selected-disabled": "default-button-disabled", - "background-selected-dark": "product-color", + "background-selected-disabled": "default-button-disabled", + "background-selected-dark": "product-color", "background-disabled": "background-main", - "background-disabled-checked": "background-main-1", + "background-disabled-checked": "background-main-1", "background-pane": "background-main", // tabview @@ -102,23 +97,23 @@ qx.Theme.define("osparc.theme.ColorDark", { "tabview-button-background": "transparent", // scrollbar - "scrollbar-passive": "background-main-4", - "scrollbar-active": "background-main-5", + "scrollbar-passive": "background-main-4", + "scrollbar-active": "background-main-5", // form "button": "background-main-4", - "button-border": "background-main-5", + "button-border": "background-main-5", "button-border-hovered": "c07", - "button-box": "background-main-3", - "button-box-pressed": "background-main-4", + "button-box": "background-main-3", + "button-box-pressed": "background-main-4", "border-lead": "c07", // group box - "white-box-border": "background-main-2", + "white-box-border": "background-main-2", // borders // 'border-main' is an alias of 'background-selected' (compatibility reasons) - "border": "background-main-3", + "border": "background-main-3", "border-focused": "c09", "border-invalid": "failed-red", "border-disabled": "background-main", @@ -134,13 +129,13 @@ qx.Theme.define("osparc.theme.ColorDark", { "table-header": "background-main", "table-header-foreground": "c09", "table-header-border": "c07", - "table-focus-indicator": "background-main-5", + "table-focus-indicator": "background-main-5", // used in table code "table-header-cell": "background-main", - "table-row-background-focused-selected": "background-main-4", - "table-row-background-focused": "background-main-3", - "table-row-background-selected": "background-main-4", + "table-row-background-focused-selected": "background-main-4", + "table-row-background-focused": "background-main-3", + "table-row-background-selected": "background-main-4", "table-row-background-even": "background-main", "table-row-background-odd": "background-main", @@ -156,11 +151,11 @@ qx.Theme.define("osparc.theme.ColorDark", { "progressive-table-header": "c08", "progressive-table-row-background-even": "background-main", "progressive-table-row-background-odd": "background-main", - "progressive-progressbar-background": "background-main", + "progressive-progressbar-background": "background-main", "progressive-progressbar-indicator-done": "background-main", - "progressive-progressbar-indicator-undone": "background-main-1", - "progressive-progressbar-percent-background": "background-main", - "progressive-progressbar-percent-text": "background-main-1", + "progressive-progressbar-indicator-undone": "background-main-1", + "progressive-progressbar-percent-background": "background-main", + "progressive-progressbar-percent-text": "background-main-1", @@ -168,6 +163,8 @@ qx.Theme.define("osparc.theme.ColorDark", { "workbench-edge-comp-active": "#777777", "workbench-edge-api-active": "#BBBBBB", "workbench-start-hint": "#505050", + "workbench-view-navbar": "c00", + "workbench-view-splitter": "#000000", "node-background": "rgba(113, 157, 181, 0.5)", "node-selected-background": "strong-main", diff --git a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js index 54f1e83d0ea..c1a6bfb5783 100644 --- a/services/static-webserver/client/source/class/osparc/theme/ColorLight.js +++ b/services/static-webserver/client/source/class/osparc/theme/ColorLight.js @@ -2,29 +2,27 @@ qx.Theme.define("osparc.theme.ColorLight", { include: osparc.theme.mixin.Color, colors: { + // 0-105 "c00": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0), - "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 8), - "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 15), - "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 25), - "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 35), - "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 45), - "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 55), - "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 60), - "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 65), - "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 70), - "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 80), - "c11": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 85), - "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 95), - "c13": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 100), - "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 105), + "c01": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+5), + "c02": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+10), + "c03": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+20), + "c04": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+25), + "c05": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+35), + "c06": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+50), + "c07": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+60), + "c08": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+70), + "c09": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+75), + "c10": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+80), + "c12": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+90), + "c14": osparc.theme.colorProvider.ColorProvider.getColor("color.scales.static.base", 0+105), "product-color": "rgba(0, 144, 208, 1)", // override in product "strong-main": "product-color", - "strong-text": "background-main-1", "a-bit-transparent": "rgba(255, 255, 255, 0.4)", // main - "background-main": "rgba(250,250,250, 1)", // Is manipulated + "background-main": "c01", "background-main-1": "c02", "background-main-2": "c03", "background-main-3": "c04", @@ -34,12 +32,9 @@ qx.Theme.define("osparc.theme.ColorLight", { "background-card-overlay": "rgba(229, 229, 229, 0.8)", "background-workspace-card-overlay": "rgb(165, 223, 252)", - "primary-background-color": "rgba(255, 255, 255, 1)", "navigation_bar_background_color": "rgba(229, 229, 229, 0.8)", - "tab_navigation_bar_background_color": "c00", - "modal-backdrop": "rgba(247, 248, 252, 0.4)", "fab_text": "contrasted-text-dark", - "fab-background": "rgba(255, 255, 255, 1)", + "fab-background": "rgba(255, 255, 255, 0.2)", "input_background": "rgba(209, 214, 218, 1)", "input_background_disable": "rgba(113, 157, 181, 0.04)", "hint-background": "rgba(201, 201, 201, 1)", @@ -58,24 +53,25 @@ qx.Theme.define("osparc.theme.ColorLight", { "link": "rgba(10, 182, 255, 1)", // shadows - "bg-shadow": "background-main-5", - "box-shadow": "rgba(0,0,0, 0.15)", + "bg-shadow": "background-main-5", + "box-shadow": "rgba(0, 0, 0, 0.15)", "shadow": qx.core.Environment.get("css.rgba") ? "a-bit-transparent" : "bg-shadow", // window - "window-popup-background": "rgba(255, 255, 255, 1)", + // OM here + "window-popup-background": "rgba(225, 225, 225, 1)", "window-caption-background": "background-main", - "window-caption-background-active": "background-main-3", + "window-caption-background-active": "background-main-3", "window-caption-text": "text", "window-caption-text-active": "c12", - "window-border": "background-main-2", - "window-border-inner": "background-main-1", + "window-border": "background-main-2", + "window-border-inner": "background-main-1", // material-button - "material-button-background": "fab-background", - "material-button-background-disabled": "default-button-disabled-background", - "material-button-background-hovered": "default-button-hover-background", - "material-button-background-pressed": "default-button-active-background", + "material-button-background": "fab-background", + "material-button-background-disabled": "default-button-disabled-background", + "material-button-background-hovered": "default-button-hover-background", + "material-button-background-pressed": "default-button-active-background", "material-button-text-disabled": "default-button-disabled-background", "material-button-text": "default-button-text-outline", @@ -88,43 +84,43 @@ qx.Theme.define("osparc.theme.ColorLight", { // backgrounds "background-selected": "default-button-background", - "background-selected-disabled": "default-button-disabled", - "background-selected-dark": "product-color", + "background-selected-disabled": "default-button-disabled", + "background-selected-dark": "product-color", "background-disabled": "background-main", - "background-disabled-checked": "background-main-1", + "background-disabled-checked": "background-main-1", "background-pane": "background-main", // tabview "tabview-unselected": "c14", - "tabview-button-border": "c14", + "tabview-button-border": "product-color", "tabview-label-active-disabled": "c10", "tabview-pane-background": "transparent", "tabview-button-background": "transparent", // scrollbar - "scrollbar-passive": "background-main-4", - "scrollbar-active": "background-main-5", + "scrollbar-passive": "background-main-4", + "scrollbar-active": "background-main-5", // form - "button": "background-main-4", - "button-border": "background-main-5", + "button": "background-main-4", + "button-border": "background-main-5", "button-border-hovered": "c07", - "button-box": "background-main-3", - "button-box-pressed": "background-main-4", + "button-box": "background-main-3", + "button-box-pressed": "background-main-4", "border-lead": "c07", // group box - "white-box-border": "background-main-2", + "white-box-border": "background-main-2", // borders // 'border-main' is an alias of 'background-selected' (compatibility reasons) - "border": "background-main-3", + "border": "background-main-3", "border-focused": "c09", "border-invalid": "failed-red", "border-disabled": "background-main", // separator - "border-separator": "fab-background", + "border-separator": "background-main-3", // tooltip "tooltip": "flash_message_bg", @@ -135,13 +131,13 @@ qx.Theme.define("osparc.theme.ColorLight", { "table-header": "background-main", "table-header-foreground": "c09", "table-header-border": "c07", - "table-focus-indicator": "background-main-5", + "table-focus-indicator": "background-main-5", // used in table code "table-header-cell": "background-main", - "table-row-background-focused-selected": "background-main-4", - "table-row-background-focused": "background-main-3", - "table-row-background-selected": "background-main-4", + "table-row-background-focused-selected": "background-main-4", + "table-row-background-focused": "background-main-3", + "table-row-background-selected": "background-main-4", "table-row-background-even": "background-main", "table-row-background-odd": "background-main", @@ -157,17 +153,19 @@ qx.Theme.define("osparc.theme.ColorLight", { "progressive-table-header": "c08", "progressive-table-row-background-even": "background-main", "progressive-table-row-background-odd": "background-main", - "progressive-progressbar-background": "background-main", + "progressive-progressbar-background": "background-main", "progressive-progressbar-indicator-done": "background-main", - "progressive-progressbar-indicator-undone": "background-main-1", - "progressive-progressbar-percent-background": "background-main", - "progressive-progressbar-percent-text": "background-main-1", + "progressive-progressbar-indicator-undone": "background-main-1", + "progressive-progressbar-percent-background": "background-main", + "progressive-progressbar-percent-text": "background-main-1", // OSPARC "workbench-edge-comp-active": "#888888", "workbench-edge-api-active": "#444444", "workbench-start-hint": "#AFAFAF", + "workbench-view-navbar": "c02", + "workbench-view-splitter": "background-main-3", "node-background": "rgba(113, 157, 181, 0.35)", "node-selected-background": "strong-main", diff --git a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js index 180de5bb2cb..7cf74384589 100644 --- a/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js +++ b/services/static-webserver/client/source/class/osparc/widget/NodeOptions.js @@ -33,12 +33,6 @@ qx.Class.define("osparc.widget.NodeOptions", { this.setNode(node); }, - events: { - "versionChanged": "qx.event.type.Event", - "bootModeChanged": "qx.event.type.Event", - "limitsChanged": "qx.event.type.Event" - }, - properties: { node: { check: "osparc.data.model.Node", @@ -74,7 +68,6 @@ qx.Class.define("osparc.widget.NodeOptions", { (node.isUpdatable() || node.isDeprecated() || node.isRetired()) ) { const lifeCycleView = new osparc.node.LifeCycleView(node); - node.addListener("versionChanged", () => this.fireEvent("versionChanged")); sections.push(lifeCycleView); showStartStopButton = true; @@ -83,7 +76,6 @@ qx.Class.define("osparc.widget.NodeOptions", { // Boot Options if (node.hasBootModes()) { const bootOptionsView = new osparc.node.BootOptionsView(node); - node.addListener("bootModeChanged", () => this.fireEvent("bootModeChanged")); sections.push(bootOptionsView); showStartStopButton = true; @@ -95,7 +87,6 @@ qx.Class.define("osparc.widget.NodeOptions", { (node.isComputational() || node.isDynamic()) ) { const updateResourceLimitsView = new osparc.node.UpdateResourceLimitsView(node); - node.addListener("limitsChanged", () => this.fireEvent("limitsChanged")); sections.push(updateResourceLimitsView); showStartStopButton |= node.isDynamic(); diff --git a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js index 173b3689524..e733be3b6bc 100644 --- a/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js +++ b/services/static-webserver/client/source/class/osparc/workbench/DiskUsageIndicator.js @@ -172,7 +172,7 @@ qx.Class.define("osparc.workbench.DiskUsageIndicator", { toolTipText += this.tr("Data storage: ") + osparc.utils.Utils.bytesToSize(diskVolsUsage.free) + "
"; toolTipText += this.tr("I/O storage: ") + osparc.utils.Utils.bytesToSize(diskHostUsage.free) + "
"; } - const bgColor = qx.theme.manager.Color.getInstance().resolve("tab_navigation_bar_background_color"); + const bgColor = qx.theme.manager.Color.getInstance().resolve("workbench-view-navbar"); const color2 = qx.theme.manager.Color.getInstance().resolve("progressive-progressbar-background"); indicator.getContentElement().setStyles({ "background-color": bgColor, From d9fb9d26bea8142c588650aae212224e1686b4fe Mon Sep 17 00:00:00 2001 From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com> Date: Wed, 13 Nov 2024 13:24:06 +0100 Subject: [PATCH 13/22] =?UTF-8?q?=F0=9F=90=9B=20folder=20full=20search=20a?= =?UTF-8?q?dding=20text=20query=20parameter=20(#6716)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/specs/web-server/_folders.py | 1 + .../api/v0/openapi.yaml | 6 ++++++ .../folders/_folders_api.py | 3 +++ .../folders/_folders_db.py | 3 +++ .../folders/_folders_handlers.py | 1 + .../simcore_service_webserver/folders/_models.py | 16 +++++++++++++++- .../04/folders/test_folders__full_search.py | 8 ++++++++ 7 files changed, 37 insertions(+), 1 deletion(-) diff --git a/api/specs/web-server/_folders.py b/api/specs/web-server/_folders.py index 25eecea5cd0..ef5e29ac85d 100644 --- a/api/specs/web-server/_folders.py +++ b/api/specs/web-server/_folders.py @@ -69,6 +69,7 @@ async def list_folders( ) async def list_folders_full_search( params: Annotated[PageQueryParameters, Depends()], + text: str | None = None, order_by: Annotated[ Json, Query( diff --git a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml index 40d0841c65a..860d9869218 100644 --- a/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml +++ b/services/web/server/src/simcore_service_webserver/api/v0/openapi.yaml @@ -2697,6 +2697,12 @@ paths: summary: List Folders Full Search operationId: list_folders_full_search parameters: + - required: false + schema: + title: Text + type: string + name: text + in: query - description: Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending. required: false diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py index a791a65c715..2ef9818f431 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_api.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_api.py @@ -172,6 +172,7 @@ async def list_folders( else WorkspaceQuery(workspace_scope=WorkspaceScope.PRIVATE) ), filter_trashed=trashed, + filter_by_text=None, offset=offset, limit=limit, order_by=order_by, @@ -199,6 +200,7 @@ async def list_folders_full_search( app: web.Application, user_id: UserID, product_name: ProductName, + text: str | None, trashed: bool | None, offset: NonNegativeInt, limit: int, @@ -213,6 +215,7 @@ async def list_folders_full_search( folder_query=FolderQuery(folder_scope=FolderScope.ALL), workspace_query=WorkspaceQuery(workspace_scope=WorkspaceScope.ALL), filter_trashed=trashed, + filter_by_text=text, offset=offset, limit=limit, order_by=order_by, diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py index 0af9d36dadf..f4e27fa3a7a 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py @@ -113,6 +113,7 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches workspace_query: WorkspaceQuery, # attribute filters filter_trashed: bool | None, + filter_by_text: str | None, # pagination offset: NonNegativeInt, limit: int, @@ -199,6 +200,8 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches else: assert folder_query.folder_scope == FolderScope.ROOT # nosec attributes_filters.append(folders_v2.c.parent_folder_id.is_(None)) + if filter_by_text: + attributes_filters.append(folders_v2.c.name.ilike(f"%{filter_by_text}%")) ### # Combined diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py index 7050205bd7d..b1a01ef61aa 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_handlers.py @@ -119,6 +119,7 @@ async def list_folders_full_search(request: web.Request): app=request.app, user_id=req_ctx.user_id, product_name=req_ctx.product_name, + text=query_params.text, trashed=query_params.filters.trashed, offset=query_params.offset, limit=query_params.limit, diff --git a/services/web/server/src/simcore_service_webserver/folders/_models.py b/services/web/server/src/simcore_service_webserver/folders/_models.py index 5e48f46fa37..899514a271b 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_models.py +++ b/services/web/server/src/simcore_service_webserver/folders/_models.py @@ -6,7 +6,10 @@ from models_library.rest_ordering import OrderBy, OrderDirection from models_library.rest_pagination import PageQueryParameters from models_library.users import UserID -from models_library.utils.common_validators import null_or_none_str_to_none_validator +from models_library.utils.common_validators import ( + empty_str_to_none_pre_validator, + null_or_none_str_to_none_validator, +) from models_library.workspaces import WorkspaceID from pydantic import BaseModel, Extra, Field, Json, validator from servicelib.aiohttp.requests_validation import RequestParams, StrictRequestParams @@ -88,6 +91,17 @@ class Config: class FolderListFullSearchWithJsonStrQueryParams( PageQueryParameters, FolderListSortParams, FiltersQueryParameters[FolderFilters] ): + text: str | None = Field( + default=None, + description="Multi column full text search, across all folders and workspaces", + max_length=100, + example="My Project", + ) + + _empty_is_none = validator("text", allow_reuse=True, pre=True)( + empty_str_to_none_pre_validator + ) + class Config: extra = Extra.forbid diff --git a/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py index b9da926543e..74126da042f 100644 --- a/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py +++ b/services/web/server/tests/unit/with_dbs/04/folders/test_folders__full_search.py @@ -103,6 +103,14 @@ async def test_folders_full_search( data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 3 + # list full folder search with specific text + url = client.app.router["list_folders_full_search"].url_for() + query_parameters = {"text": "My subfolder"} + url_with_query = url.with_query(**query_parameters) + resp = await client.get(f"{url_with_query}") + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 1 + # Create new user async with LoggedUser(client) as new_logged_user: # list full folder search From 0718e142676741607a38d6fc99c78aaef760577c Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 13 Nov 2024 15:04:52 +0100 Subject: [PATCH 14/22] =?UTF-8?q?=E2=9C=A8=20[Frontend]=20Enh:=20``:search?= =?UTF-8?q?``=20also=20``/folders``=20(#6713)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../osparc/dashboard/FolderButtonItem.js | 42 +++++++--- .../osparc/dashboard/ResourceBrowserBase.js | 12 +++ .../dashboard/ResourceContainerManager.js | 2 + .../class/osparc/dashboard/StudyBrowser.js | 81 ++++++++++++------- .../source/class/osparc/data/Resources.js | 8 +- .../source/class/osparc/store/Folders.js | 51 ++++++++++-- .../client/source/class/osparc/store/Store.js | 6 ++ 7 files changed, 155 insertions(+), 47 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js index 526f7032c27..0971a7d4990 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/FolderButtonItem.js @@ -46,7 +46,8 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { "folderSelected": "qx.event.type.Data", "folderUpdated": "qx.event.type.Data", "moveFolderToRequested": "qx.event.type.Data", - "deleteFolderRequested": "qx.event.type.Data" + "deleteFolderRequested": "qx.event.type.Data", + "changeContext": "qx.event.type.Data", }, properties: { @@ -186,19 +187,38 @@ qx.Class.define("osparc.dashboard.FolderButtonItem", { position: "bottom-right" }); - const editButton = new qx.ui.menu.Button(this.tr("Rename..."), "@FontAwesome5Solid/pencil-alt/12"); - editButton.addListener("execute", () => this.__editFolder(), this); - menu.add(editButton); + const studyBrowserContext = osparc.store.Store.getInstance().getStudyBrowserContext(); + if ( + studyBrowserContext === "search" || + studyBrowserContext === "studiesAndFolders" + ) { + const editButton = new qx.ui.menu.Button(this.tr("Rename..."), "@FontAwesome5Solid/pencil-alt/12"); + editButton.addListener("execute", () => this.__editFolder(), this); + menu.add(editButton); + + if (studyBrowserContext === "search") { + const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12"); + openLocationButton.addListener("execute", () => { + const folder = this.getFolder(); + this.fireDataEvent("changeContext", { + context: "studiesAndFolders", + workspaceId: folder.getWorkspaceId(), + folderId: folder.getParentFolderId(), + }); + }, this); + menu.add(openLocationButton); + } - const moveToButton = new qx.ui.menu.Button(this.tr("Move to..."), "@FontAwesome5Solid/folder/12"); - moveToButton.addListener("execute", () => this.fireDataEvent("moveFolderToRequested", this.getFolderId()), this); - menu.add(moveToButton); + const moveToButton = new qx.ui.menu.Button(this.tr("Move to..."), "@FontAwesome5Solid/folder/12"); + moveToButton.addListener("execute", () => this.fireDataEvent("moveFolderToRequested", this.getFolderId()), this); + menu.add(moveToButton); - menu.addSeparator(); + menu.addSeparator(); - const deleteButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); - deleteButton.addListener("execute", () => this.__deleteFolderRequested(), this); - menu.add(deleteButton); + const deleteButton = new qx.ui.menu.Button(this.tr("Delete"), "@FontAwesome5Solid/trash/12"); + deleteButton.addListener("execute", () => this.__deleteFolderRequested(), this); + menu.add(deleteButton); + } menuButton.setMenu(menu); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index 31524310535..a80672bd3cd 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -280,6 +280,14 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { const workspaceId = e.getData(); this._workspaceSelected(workspaceId); }, this); + resourcesContainer.addListener("changeContext", e => { + const { + context, + workspaceId, + folderId, + } = e.getData(); + this._changeContext(context, workspaceId, folderId); + }, this); resourcesContainer.addListener("workspaceUpdated", e => this._workspaceUpdated(e.getData())); resourcesContainer.addListener("deleteWorkspaceRequested", e => this._deleteWorkspaceRequested(e.getData())); @@ -479,6 +487,10 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { throw new Error("Abstract method called!"); }, + _changeContext: function(context, workspaceId, folderId) { + throw new Error("Abstract method called!"); + }, + _folderSelected: function(folderId) { throw new Error("Abstract method called!"); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js index 187f6b441d3..b28b5d89a04 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js @@ -79,6 +79,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { "workspaceSelected": "qx.event.type.Data", "workspaceUpdated": "qx.event.type.Data", "deleteWorkspaceRequested": "qx.event.type.Data", + "changeContext": "qx.event.type.Data", }, statics: { @@ -419,6 +420,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { "folderUpdated", "moveFolderToRequested", "deleteFolderRequested", + "changeContext", ].forEach(eName => card.addListener(eName, e => this.fireDataEvent(eName, e.getData()))); return card; }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index 288290b06df..b82286a5f0c 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -171,17 +171,30 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { if ( !osparc.auth.Manager.getInstance().isLoggedIn() || !osparc.utils.DisabledPlugins.isFoldersEnabled() || - this.getCurrentContext() !== "studiesAndFolders" || + this.getCurrentContext() === "workspaces" || this.__loadingFolders ) { return; } - const workspaceId = this.getCurrentWorkspaceId(); - const folderId = this.getCurrentFolderId(); this.__loadingFolders = true; + let request = null; + switch (this.getCurrentContext()) { + case "search": { + const filterData = this._searchBarFilter.getFilterData(); + const text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid + request = osparc.store.Folders.getInstance().searchFolders(text, this.getOrderBy()); + break; + } + case "studiesAndFolders": { + const workspaceId = this.getCurrentWorkspaceId(); + const folderId = this.getCurrentFolderId(); + request = osparc.store.Folders.getInstance().fetchFolders(folderId, workspaceId, this.getOrderBy()); + break; + } + } this.__setFoldersToList([]); - osparc.store.Folders.getInstance().fetchFolders(folderId, workspaceId, this.getOrderBy()) + request .then(folders => { this.__setFoldersToList(folders); }) @@ -384,7 +397,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, _workspaceSelected: function(workspaceId) { - this.__changeContext("studiesAndFolders", workspaceId, null); + this._changeContext("studiesAndFolders", workspaceId, null); }, _workspaceUpdated: function() { @@ -444,7 +457,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }, _folderSelected: function(folderId) { - this.__changeContext("studiesAndFolders", this.getCurrentWorkspaceId(), folderId); + this._changeContext("studiesAndFolders", this.getCurrentWorkspaceId(), folderId); }, _folderUpdated: function() { @@ -653,17 +666,23 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const requestParams = {}; requestParams.orderBy = JSON.stringify(this.getOrderBy()); - const filterData = this._searchBarFilter.getFilterData(); - // Use the ``search`` functionality only if the user types some text - // tags should only be used to filter the current context (search context ot workspace/folder context) - if (filterData.text) { - requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid - requestParams["tagIds"] = filterData.tags.length ? filterData.tags.join(",") : ""; - return requestParams; + switch (this.getCurrentContext()) { + case "studiesAndFolders": + requestParams.workspaceId = this.getCurrentWorkspaceId(); + requestParams.folderId = this.getCurrentFolderId(); + break; + case "search": { + // Use the ``search`` functionality only if the user types some text + // tags should only be used to filter the current context (search context ot workspace/folder context) + const filterData = this._searchBarFilter.getFilterData(); + if (filterData.text) { + requestParams.text = filterData.text ? encodeURIComponent(filterData.text) : ""; // name, description and uuid + requestParams["tagIds"] = filterData.tags.length ? filterData.tags.join(",") : ""; + } + break; + } } - requestParams.workspaceId = this.getCurrentWorkspaceId(); - requestParams.folderId = this.getCurrentFolderId(); return requestParams; }, @@ -688,10 +707,16 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { resolveWResponse: true }; - if ("text" in requestParams) { - return osparc.data.Resources.fetch("studies", "getPageSearch", params, options); + let request = null; + switch (this.getCurrentContext()) { + case "search": + request = osparc.data.Resources.fetch("studies", "getPageSearch", params, options); + break; + case "studiesAndFolders": + request = osparc.data.Resources.fetch("studies", "getPage", params, options); + break; } - return osparc.data.Resources.fetch("studies", "getPage", params, options); + return request; }, invalidateStudies: function() { @@ -886,10 +911,11 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); this._resourcesContainer.addListener("changeSelection", e => { + const currentContext = this.getCurrentContext(); const selection = e.getData(); studiesMoveButton.set({ - visibility: selection.length ? "visible" : "excluded", + visibility: selection.length && currentContext === "studiesAndFolders" ? "visible" : "excluded", label: selection.length > 1 ? this.tr("Move selected")+" ("+selection.length+")" : this.tr("Move") }); @@ -910,7 +936,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { header.addListener("locationChanged", () => { const workspaceId = header.getCurrentWorkspaceId(); const folderId = header.getCurrentFolderId(); - this.__changeContext("studiesAndFolders", workspaceId, folderId); + this._changeContext("studiesAndFolders", workspaceId, folderId); }, this); const workspacesAndFoldersTree = this._resourceFilter.getWorkspacesAndFoldersTree(); @@ -918,27 +944,27 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { const context = e.getData(); const workspaceId = context["workspaceId"]; if (workspaceId === -1) { - this.__changeContext("workspaces"); + this._changeContext("workspaces"); } else { const folderId = context["folderId"]; - this.__changeContext("studiesAndFolders", workspaceId, folderId); + this._changeContext("studiesAndFolders", workspaceId, folderId); } }, this); this._searchBarFilter.addListener("filterChanged", e => { const filterData = e.getData(); if (filterData.text) { - this.__changeContext("search"); + this._changeContext("search"); } else { const workspaceId = this.getCurrentWorkspaceId(); const folderId = this.getCurrentFolderId(); - this.__changeContext("studiesAndFolders", workspaceId, folderId); + this._changeContext("studiesAndFolders", workspaceId, folderId); } }); } }, - __changeContext: function(context, workspaceId = null, folderId = null) { + _changeContext: function(context, workspaceId = null, folderId = null) { if (osparc.utils.DisabledPlugins.isFoldersEnabled()) { if ( context !== "search" && // reload studies for a new search @@ -950,6 +976,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { return; } + osparc.store.Store.getInstance().setStudyBrowserContext(context); this.set({ currentContext: context, currentWorkspaceId: workspaceId, @@ -962,7 +989,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._resourcesContainer.setResourcesToList([]); if (context === "search") { - this.__setFoldersToList([]); + this.__reloadFolders(); this.__reloadStudies(); } else if (context === "workspaces") { this._searchBarFilter.resetFilters(); @@ -1342,7 +1369,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { __getOpenLocationMenuButton: function(studyData) { const openLocationButton = new qx.ui.menu.Button(this.tr("Open location"), "@FontAwesome5Solid/external-link-alt/12"); openLocationButton.addListener("execute", () => { - this.__changeContext("studiesAndFolders", studyData["workspaceId"], studyData["folderId"]); + this._changeContext("studiesAndFolders", studyData["workspaceId"], studyData["folderId"]); }, this); return openLocationButton; }, diff --git a/services/static-webserver/client/source/class/osparc/data/Resources.js b/services/static-webserver/client/source/class/osparc/data/Resources.js index 5484107fd96..007ba33eddd 100644 --- a/services/static-webserver/client/source/class/osparc/data/Resources.js +++ b/services/static-webserver/client/source/class/osparc/data/Resources.js @@ -301,6 +301,11 @@ qx.Class.define("osparc.data.Resources", { method: "GET", url: statics.API + "/folders?workspace_id={workspaceId}&folder_id={folderId}&offset={offset}&limit={limit}&order_by={orderBy}" }, + getPageSearch: { + useCache: false, + method: "GET", + url: statics.API + "/folders:search?offset={offset}&limit={limit}&text={text}&order_by={orderBy}" + }, getOne: { method: "GET", url: statics.API + "/folders/{folderId}" @@ -1368,7 +1373,7 @@ qx.Class.define("osparc.data.Resources", { }); }, - getAllPages: function(resource, params = {}) { + getAllPages: function(resource, params = {}, endpoint = "getPage") { return new Promise((resolve, reject) => { let resources = []; let offset = 0; @@ -1377,7 +1382,6 @@ qx.Class.define("osparc.data.Resources", { } params["url"]["offset"] = offset; params["url"]["limit"] = 10; - const endpoint = "getPage"; const options = { resolveWResponse: true }; diff --git a/services/static-webserver/client/source/class/osparc/store/Folders.js b/services/static-webserver/client/source/class/osparc/store/Folders.js index 16385de935c..727896c28ef 100644 --- a/services/static-webserver/client/source/class/osparc/store/Folders.js +++ b/services/static-webserver/client/source/class/osparc/store/Folders.js @@ -31,6 +31,17 @@ qx.Class.define("osparc.store.Folders", { "folderMoved": "qx.event.type.Data", }, + statics: { + curateOrderBy: function(orderBy) { + const curatedOrderBy = osparc.utils.Utils.deepCloneObject(orderBy); + if (curatedOrderBy.field !== "name") { + // only "modified_at" and "name" supported + curatedOrderBy.field = "modified_at"; + } + return curatedOrderBy; + }, + }, + members: { foldersCached: null, @@ -40,7 +51,7 @@ qx.Class.define("osparc.store.Folders", { orderBy = { field: "modified_at", direction: "desc" - } + }, ) { if (osparc.auth.Data.getInstance().isGuest()) { return new Promise(resolve => { @@ -48,12 +59,7 @@ qx.Class.define("osparc.store.Folders", { }); } - const curatedOrderBy = osparc.utils.Utils.deepCloneObject(orderBy); - if (curatedOrderBy.field !== "name") { - // only "modified_at" and "name" supported - curatedOrderBy.field = "modified_at"; - } - + const curatedOrderBy = this.self().curateOrderBy(orderBy); const params = { url: { workspaceId, @@ -72,6 +78,37 @@ qx.Class.define("osparc.store.Folders", { }); }, + searchFolders: function( + text, + orderBy = { + field: "modified_at", + direction: "desc" + }, + ) { + if (osparc.auth.Data.getInstance().isGuest()) { + return new Promise(resolve => { + resolve([]); + }); + } + + const curatedOrderBy = this.self().curateOrderBy(orderBy); + const params = { + url: { + text, + orderBy: JSON.stringify(curatedOrderBy), + } + }; + return osparc.data.Resources.getInstance().getAllPages("folders", params, "getPageSearch") + .then(foldersData => { + const folders = []; + foldersData.forEach(folderData => { + const folder = this.__addToCache(folderData); + folders.push(folder); + }); + return folders; + }); + }, + postFolder: function(name, parentFolderId = null, workspaceId = null) { const newFolderData = { name, diff --git a/services/static-webserver/client/source/class/osparc/store/Store.js b/services/static-webserver/client/source/class/osparc/store/Store.js index 0e015ed7811..89ccc5e51a0 100644 --- a/services/static-webserver/client/source/class/osparc/store/Store.js +++ b/services/static-webserver/client/source/class/osparc/store/Store.js @@ -66,6 +66,12 @@ qx.Class.define("osparc.store.Store", { init: null, nullable: true }, + studyBrowserContext: { + check: ["studiesAndFolders", "workspaces", "search"], + init: "studiesAndFolders", + nullable: false, + event: "changeStudyBrowserContext", + }, studies: { check: "Array", init: [] From d5dca964611fdf683e684c6eab61a329722a2388 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 13 Nov 2024 15:16:03 +0100 Subject: [PATCH 15/22] =?UTF-8?q?=F0=9F=8E=A8=F0=9F=90=9B=20Enh/fix:=20fro?= =?UTF-8?q?ntend=20knows=20about=20``trashedAt``=20(#6717)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../source/class/osparc/data/model/Folder.js | 9 ++++++++- .../client/source/class/osparc/data/model/Study.js | 14 +++++++++++--- .../client/source/class/osparc/store/Folders.js | 2 ++ 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/data/model/Folder.js b/services/static-webserver/client/source/class/osparc/data/model/Folder.js index 1dd99d015a2..b8b9eb03b21 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Folder.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Folder.js @@ -37,6 +37,7 @@ qx.Class.define("osparc.data.model.Folder", { owner: folderData.owner, createdAt: new Date(folderData.createdAt), lastModified: new Date(folderData.modifiedAt), + trashedAt: folderData.trashedAt ? new Date(folderData.trashedAt) : this.getTrashedAt(), }); }, @@ -95,7 +96,13 @@ qx.Class.define("osparc.data.model.Folder", { nullable: true, init: null, event: "changeLastModified" - } + }, + + trashedAt: { + check: "Date", + nullable: true, + init: null, + }, }, statics: { diff --git a/services/static-webserver/client/source/class/osparc/data/model/Study.js b/services/static-webserver/client/source/class/osparc/data/model/Study.js index 598e0575d22..ab178aca669 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/Study.js +++ b/services/static-webserver/client/source/class/osparc/data/model/Study.js @@ -58,7 +58,8 @@ qx.Class.define("osparc.data.model.Study", { state: studyData.state || this.getState(), quality: studyData.quality || this.getQuality(), permalink: studyData.permalink || this.getPermalink(), - dev: studyData.dev || this.getDev() + dev: studyData.dev || this.getDev(), + trashedAt: studyData.trashedAt ? new Date(studyData.trashedAt) : this.getTrashedAt(), }); const wbData = studyData.workbench || this.getWorkbench(); @@ -209,7 +210,13 @@ qx.Class.define("osparc.data.model.Study", { nullable: true, event: "changeReadOnly", init: true - } + }, + + trashedAt: { + check: "Date", + nullable: true, + init: null, + }, // ------ ignore for serializing ------ }, @@ -218,7 +225,8 @@ qx.Class.define("osparc.data.model.Study", { "permalink", "state", "pipelineRunning", - "readOnly" + "readOnly", + "trashedAt", ], IgnoreModelizationProps: [ diff --git a/services/static-webserver/client/source/class/osparc/store/Folders.js b/services/static-webserver/client/source/class/osparc/store/Folders.js index 727896c28ef..7deb66618bb 100644 --- a/services/static-webserver/client/source/class/osparc/store/Folders.js +++ b/services/static-webserver/client/source/class/osparc/store/Folders.js @@ -178,6 +178,8 @@ qx.Class.define("osparc.store.Folders", { folder.set("createdAt", new Date(folderData["createdAt"])); } else if (key === "modifiedAt") { folder.set("lastModified", new Date(folderData["modifiedAt"])); + } else if (key === "trashedAt") { + folder.set("trashedAt", new Date(folderData["trashedAt"])); } else { folder.set(key, folderData[key]); } From e6e2c705c8e4ac4a6b62a73668ef57f720f55284 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Wed, 13 Nov 2024 16:12:24 +0100 Subject: [PATCH 16/22] =?UTF-8?q?=F0=9F=90=9B=20[Frontend]=20TIP:=20New=20?= =?UTF-8?q?plan=20after=20creating=20its=20template=20(#6710)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../class/osparc/dashboard/StudyBrowser.js | 45 ++++++++++--------- .../source/class/osparc/info/StudyLarge.js | 4 ++ 2 files changed, 28 insertions(+), 21 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index b82286a5f0c..ceaee03b3ac 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -746,7 +746,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { break; case "tis": case "tiplite": - this.__addTIPPlusButtons(); + this.__addTIPPlusButton(); break; case "s4l": case "s4lacad": @@ -770,24 +770,27 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { this._resourcesContainer.addNonResourceCard(newStudyBtn); }, - __addTIPPlusButtons: function() { - osparc.data.Resources.get("templates") - .then(templates => { - if (templates) { - osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json") - .then(newStudiesData => { - const product = osparc.product.Utils.getProductName() - if (product in newStudiesData) { - const mode = this._resourcesContainer.getMode(); - const title = this.tr("New Plan"); - const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title) : new osparc.dashboard.ListButtonNew(title); - newStudyBtn.setCardKey("new-study"); - newStudyBtn.subscribeToFilterGroup("searchBarFilter"); - osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); - this._resourcesContainer.addNonResourceCard(newStudyBtn); - newStudyBtn.addListener("execute", () => { - newStudyBtn.setValue(false); + __addTIPPlusButton: function() { + const mode = this._resourcesContainer.getMode(); + const title = this.tr("New Plan"); + const newStudyBtn = (mode === "grid") ? new osparc.dashboard.GridButtonNew(title) : new osparc.dashboard.ListButtonNew(title); + newStudyBtn.setCardKey("new-study"); + newStudyBtn.subscribeToFilterGroup("searchBarFilter"); + osparc.utils.Utils.setIdToWidget(newStudyBtn, "newStudyBtn"); + this._resourcesContainer.addNonResourceCard(newStudyBtn); + newStudyBtn.setEnabled(false); + osparc.utils.Utils.fetchJSON("/resource/osparc/new_studies.json") + .then(newStudiesData => { + const product = osparc.product.Utils.getProductName() + if (product in newStudiesData) { + newStudyBtn.setEnabled(true); + + newStudyBtn.addListener("execute", () => { + newStudyBtn.setValue(false); + osparc.data.Resources.get("templates") + .then(templates => { + if (templates) { const newStudies = new osparc.dashboard.NewStudies(newStudiesData[product]); newStudies.addListener("templatesLoaded", () => { newStudies.setGroupBy("category"); @@ -806,9 +809,9 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { }); osparc.utils.Utils.setIdToWidget(win, "newStudiesWindow"); }); - }); - } - }); + } + }); + }); } }); }, diff --git a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js index 3351ed0fc96..5709bfd70a2 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyLarge.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyLarge.js @@ -324,6 +324,10 @@ qx.Class.define("osparc.info.StudyLarge", { studyData["resourceType"] = this.__isTemplate ? "template" : "study"; this.fireDataEvent("updateStudy", studyData); qx.event.message.Bus.getInstance().dispatchByName("updateStudy", studyData); + if (this.__isTemplate) { + // reload templates + osparc.data.Resources.get("templates", {}, false) + } }) .catch(err => { console.error(err); From a4b7c7a67dea15a7bc40efb8afb08a93b81c0186 Mon Sep 17 00:00:00 2001 From: Matus Drobuliak <60785969+matusdrobuliak66@users.noreply.github.com> Date: Wed, 13 Nov 2024 18:54:09 +0100 Subject: [PATCH 17/22] =?UTF-8?q?=F0=9F=90=9B=20Fix=20listing=20folders=20?= =?UTF-8?q?in=20workspace=20(#6718)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> --- .../folders/_folders_db.py | 6 ++ ...t_workspaces__folders_and_projects_crud.py | 95 +++++++++++++++++++ 2 files changed, 101 insertions(+) diff --git a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py index f4e27fa3a7a..e2992d111ee 100644 --- a/services/web/server/src/simcore_service_webserver/folders/_folders_db.py +++ b/services/web/server/src/simcore_service_webserver/folders/_folders_db.py @@ -178,6 +178,12 @@ async def list_( # pylint: disable=too-many-arguments,too-many-branches & (folders_v2.c.user_id.is_(None)) ) ) + + if workspace_query.workspace_scope == WorkspaceScope.SHARED: + shared_workspace_query = shared_workspace_query.where( + folders_v2.c.workspace_id == workspace_query.workspace_id + ) + else: shared_workspace_query = None diff --git a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py index c95aebe6fdd..717de9303fd 100644 --- a/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py +++ b/services/web/server/tests/unit/with_dbs/04/workspaces/test_workspaces__folders_and_projects_crud.py @@ -365,3 +365,98 @@ async def test_workspaces_delete_folders( resp = await client.get(url) data, _ = await assert_status(resp, status.HTTP_200_OK) assert len(data) == 0 + + +@pytest.mark.parametrize("user_role,expected", [(UserRole.USER, status.HTTP_200_OK)]) +async def test_listing_folders_and_projects_in_workspace__multiple_workspaces_created( + client: TestClient, + logged_user: UserInfoDict, + user_project: ProjectDict, + expected: HTTPStatus, + mock_catalog_api_get_services_for_user_in_product: MockerFixture, + fake_project: ProjectDict, + workspaces_clean_db: None, +): + assert client.app + + # create a new workspace + url = client.app.router["create_workspace"].url_for() + resp = await client.post( + url.path, + json={ + "name": "My first workspace", + "description": "Custom description", + "thumbnail": None, + }, + ) + added_workspace_1, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # Create project in workspace + project_data = deepcopy(fake_project) + project_data["workspace_id"] = f"{added_workspace_1['workspaceId']}" + project = await create_project( + client.app, + project_data, + user_id=logged_user["id"], + product_name="osparc", + ) + + # Create folder in workspace + url = client.app.router["create_folder"].url_for() + resp = await client.post( + url.path, + json={ + "name": "Original user folder", + "workspaceId": f"{added_workspace_1['workspaceId']}", + }, + ) + first_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # create a new workspace + url = client.app.router["create_workspace"].url_for() + resp = await client.post( + url.path, + json={ + "name": "My first workspace", + "description": "Custom description", + "thumbnail": None, + }, + ) + added_workspace_2, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # Create project in workspace + project_data = deepcopy(fake_project) + project_data["workspace_id"] = f"{added_workspace_2['workspaceId']}" + project = await create_project( + client.app, + project_data, + user_id=logged_user["id"], + product_name="osparc", + ) + + # Create folder in workspace + url = client.app.router["create_folder"].url_for() + resp = await client.post( + url.path, + json={ + "name": "Original user folder", + "workspaceId": f"{added_workspace_2['workspaceId']}", + }, + ) + first_folder, _ = await assert_status(resp, status.HTTP_201_CREATED) + + # List projects in workspace 1 + base_url = client.app.router["list_projects"].url_for() + url = base_url.with_query({"workspace_id": f"{added_workspace_1['workspaceId']}"}) + resp = await client.get(url) + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 1 + + # List folders in workspace 1 + base_url = client.app.router["list_folders"].url_for() + url = base_url.with_query( + {"workspace_id": f"{added_workspace_1['workspaceId']}", "folder_id": "null"} + ) + resp = await client.get(url) + data, _ = await assert_status(resp, status.HTTP_200_OK) + assert len(data) == 1 From a44de5c91dc8a81bd9f43ea40f1bfbb77fd6c1d4 Mon Sep 17 00:00:00 2001 From: Mads Bisgaard <126242332+bisgaard-itis@users.noreply.github.com> Date: Thu, 14 Nov 2024 09:40:21 +0100 Subject: [PATCH 18/22] =?UTF-8?q?=E2=9C=A8=20instrument=20(opentelemetry)?= =?UTF-8?q?=20httpx=20clients=20(#6715)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/aws-library/requirements/_base.txt | 2 -- .../requirements/_base.txt | 27 +++++++++++++++ .../requirements/_test.txt | 3 +- .../requirements/_tools.txt | 3 +- .../service-library/requirements/_base.txt | 2 -- .../service-library/requirements/_fastapi.in | 1 + .../service-library/requirements/_fastapi.txt | 6 ++++ .../servicelib/fastapi/http_client_thin.py | 8 ++++- .../src/servicelib/fastapi/tracing.py | 6 ++++ .../tests/fastapi/test_http_client_thin.py | 30 +++++++++++----- packages/simcore-sdk/requirements/_base.txt | 1 - services/agent/requirements/_base.txt | 8 +++-- services/api-server/requirements/_base.txt | 7 +++- .../core/application.py | 29 ++++++++++++---- .../services/catalog.py | 11 ++++-- .../services/director_v2.py | 6 +++- .../services/storage.py | 11 ++++-- .../services/webserver.py | 13 +++++-- .../utils/client_base.py | 6 +++- .../tests/unit/test_utils_client_base.py | 1 + services/autoscaling/requirements/_base.txt | 8 +++-- services/autoscaling/requirements/_test.txt | 4 --- services/catalog/requirements/_base.txt | 10 ++++-- .../core/application.py | 9 +++-- .../simcore_service_catalog/core/events.py | 7 ++-- .../services/director.py | 24 ++++++++++--- .../clusters-keeper/requirements/_base.txt | 8 +++-- .../clusters-keeper/requirements/_test.txt | 4 --- services/dask-sidecar/requirements/_base.txt | 2 -- .../datcore-adapter/requirements/_base.txt | 8 +++-- services/director-v2/requirements/_base.txt | 7 +++- .../cli/_client.py | 4 ++- .../simcore_service_director_v2/cli/_core.py | 7 ++-- .../core/application.py | 25 ++++++++++---- .../modules/catalog.py | 27 ++++++++++----- .../modules/director_v0.py | 34 ++++++++++++------- .../modules/dynamic_services.py | 13 ++++--- .../dynamic_sidecar/api_client/_thin.py | 5 +++ .../modules/resource_usage_tracker_client.py | 3 ++ .../modules/storage.py | 26 +++++++++----- ...t_dynamic_sidecar_nodeports_integration.py | 11 ++++-- .../dynamic-scheduler/requirements/_base.txt | 10 ++++-- .../services/director_v2/_thin_client.py | 1 + .../dynamic-sidecar/requirements/_base.txt | 7 +++- services/efs-guardian/requirements/_base.txt | 10 ++++-- services/efs-guardian/requirements/_test.txt | 4 --- services/invitations/requirements/_base.txt | 8 +++-- services/payments/requirements/_base.txt | 10 ++++-- .../services/payments_gateway.py | 3 ++ .../services/resource_usage_tracker.py | 3 ++ .../services/stripe.py | 3 ++ .../requirements/_base.txt | 10 ++++-- .../requirements/_test.txt | 4 --- services/storage/requirements/_base.txt | 1 - services/storage/requirements/_test.txt | 4 --- services/web/server/requirements/_base.txt | 1 - services/web/server/requirements/_test.txt | 1 - tests/swarm-deploy/requirements/_test.txt | 27 ++++++++++++++- 58 files changed, 395 insertions(+), 139 deletions(-) diff --git a/packages/aws-library/requirements/_base.txt b/packages/aws-library/requirements/_base.txt index 63c88ba0037..6caf09a9844 100644 --- a/packages/aws-library/requirements/_base.txt +++ b/packages/aws-library/requirements/_base.txt @@ -44,8 +44,6 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in # -r requirements/_base.in -async-timeout==4.0.3 - # via redis attrs==24.2.0 # via # aiohttp diff --git a/packages/notifications-library/requirements/_base.txt b/packages/notifications-library/requirements/_base.txt index abc242615c5..634746a1298 100644 --- a/packages/notifications-library/requirements/_base.txt +++ b/packages/notifications-library/requirements/_base.txt @@ -16,6 +16,10 @@ attrs==24.2.0 # referencing click==8.1.7 # via typer +deprecated==1.2.14 + # via + # opentelemetry-api + # opentelemetry-semantic-conventions dnspython==2.6.1 # via email-validator email-validator==2.2.0 @@ -26,6 +30,8 @@ idna==3.10 # via # email-validator # yarl +importlib-metadata==8.5.0 + # via opentelemetry-api jinja2==3.1.4 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -54,6 +60,19 @@ mdurl==0.1.2 # via markdown-it-py multidict==6.1.0 # via yarl +opentelemetry-api==1.28.1 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg + # opentelemetry-semantic-conventions +opentelemetry-instrumentation==0.49b1 + # via opentelemetry-instrumentation-asyncpg +opentelemetry-instrumentation-asyncpg==0.49b1 + # via -r requirements/../../../packages/postgres-database/requirements/_base.in +opentelemetry-semantic-conventions==0.49b1 + # via + # opentelemetry-instrumentation + # opentelemetry-instrumentation-asyncpg orjson==3.10.7 # via # -c requirements/../../../packages/models-library/requirements/../../../requirements/constraints.txt @@ -61,6 +80,8 @@ orjson==3.10.7 # -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt # -c requirements/../../../requirements/constraints.txt # -r requirements/../../../packages/models-library/requirements/_base.in +packaging==24.2 + # via opentelemetry-instrumentation psycopg2-binary==2.9.9 # via sqlalchemy pydantic==1.10.18 @@ -109,5 +130,11 @@ typing-extensions==4.12.2 # alembic # pydantic # typer +wrapt==1.16.0 + # via + # deprecated + # opentelemetry-instrumentation yarl==1.12.1 # via -r requirements/../../../packages/postgres-database/requirements/_base.in +zipp==3.21.0 + # via importlib-metadata diff --git a/packages/notifications-library/requirements/_test.txt b/packages/notifications-library/requirements/_test.txt index 55a7d9b8ee8..e802554a901 100644 --- a/packages/notifications-library/requirements/_test.txt +++ b/packages/notifications-library/requirements/_test.txt @@ -28,8 +28,9 @@ mypy==1.12.0 # via sqlalchemy mypy-extensions==1.0.0 # via mypy -packaging==24.1 +packaging==24.2 # via + # -c requirements/_base.txt # pytest # pytest-sugar pluggy==1.5.0 diff --git a/packages/notifications-library/requirements/_tools.txt b/packages/notifications-library/requirements/_tools.txt index 217752d687f..4a902da9cb2 100644 --- a/packages/notifications-library/requirements/_tools.txt +++ b/packages/notifications-library/requirements/_tools.txt @@ -38,8 +38,9 @@ mypy-extensions==1.0.0 # mypy nodeenv==1.9.1 # via pre-commit -packaging==24.1 +packaging==24.2 # via + # -c requirements/_base.txt # -c requirements/_test.txt # black # build diff --git a/packages/service-library/requirements/_base.txt b/packages/service-library/requirements/_base.txt index d53ce73a8c4..696dc496fcf 100644 --- a/packages/service-library/requirements/_base.txt +++ b/packages/service-library/requirements/_base.txt @@ -28,8 +28,6 @@ arrow==1.3.0 # via # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/_base.in -async-timeout==4.0.3 - # via redis attrs==24.2.0 # via # aiohttp diff --git a/packages/service-library/requirements/_fastapi.in b/packages/service-library/requirements/_fastapi.in index 7b6a6bb2cf2..e11871af331 100644 --- a/packages/service-library/requirements/_fastapi.in +++ b/packages/service-library/requirements/_fastapi.in @@ -9,6 +9,7 @@ fastapi httpx opentelemetry-instrumentation-fastapi +opentelemetry-instrumentation-httpx prometheus-client prometheus-fastapi-instrumentator uvicorn diff --git a/packages/service-library/requirements/_fastapi.txt b/packages/service-library/requirements/_fastapi.txt index 8a3aed37600..71c9d7cabce 100644 --- a/packages/service-library/requirements/_fastapi.txt +++ b/packages/service-library/requirements/_fastapi.txt @@ -47,23 +47,29 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-semantic-conventions opentelemetry-instrumentation==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx opentelemetry-instrumentation-asgi==0.48b0 # via opentelemetry-instrumentation-fastapi opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/_fastapi.in opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx prometheus-client==0.21.0 # via # -r requirements/_fastapi.in diff --git a/packages/service-library/src/servicelib/fastapi/http_client_thin.py b/packages/service-library/src/servicelib/fastapi/http_client_thin.py index e00e0d636a2..554ccb450ad 100644 --- a/packages/service-library/src/servicelib/fastapi/http_client_thin.py +++ b/packages/service-library/src/servicelib/fastapi/http_client_thin.py @@ -8,6 +8,8 @@ from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response from httpx._types import TimeoutTypes, URLTypes from pydantic.errors import PydanticErrorMixin +from servicelib.fastapi.tracing import setup_httpx_client_tracing +from settings_library.tracing import TracingSettings from tenacity import RetryCallState from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -201,6 +203,7 @@ def __init__( base_url: URLTypes | None = None, default_http_client_timeout: TimeoutTypes | None = None, extra_allowed_method_names: set[str] | None = None, + tracing_settings: TracingSettings | None, ) -> None: _assert_public_interface(self, extra_allowed_method_names) @@ -220,7 +223,10 @@ def __init__( if default_http_client_timeout: client_args["timeout"] = default_http_client_timeout - super().__init__(client=AsyncClient(**client_args)) + client = AsyncClient(**client_args) + if tracing_settings: + setup_httpx_client_tracing(client) + super().__init__(client=client) async def __aenter__(self): await self.setup_client() diff --git a/packages/service-library/src/servicelib/fastapi/tracing.py b/packages/service-library/src/servicelib/fastapi/tracing.py index b5179a8a5f6..36e9b06fa12 100644 --- a/packages/service-library/src/servicelib/fastapi/tracing.py +++ b/packages/service-library/src/servicelib/fastapi/tracing.py @@ -5,11 +5,13 @@ import logging from fastapi import FastAPI +from httpx import AsyncClient, Client from opentelemetry import trace from opentelemetry.exporter.otlp.proto.http.trace_exporter import ( OTLPSpanExporter as OTLPSpanExporterHTTP, ) from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor +from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor @@ -121,3 +123,7 @@ def setup_tracing( msg="Attempting to add requests opentelemetry autoinstrumentation...", ): RequestsInstrumentor().instrument() + + +def setup_httpx_client_tracing(client: AsyncClient | Client): + HTTPXClientInstrumentor.instrument_client(client) diff --git a/packages/service-library/tests/fastapi/test_http_client_thin.py b/packages/service-library/tests/fastapi/test_http_client_thin.py index f98de720c33..8c052948f6d 100644 --- a/packages/service-library/tests/fastapi/test_http_client_thin.py +++ b/packages/service-library/tests/fastapi/test_http_client_thin.py @@ -71,7 +71,9 @@ def request_timeout() -> int: @pytest.fixture async def thick_client(request_timeout: int) -> AsyncIterable[FakeThickClient]: - async with FakeThickClient(total_retry_interval=request_timeout) as client: + async with FakeThickClient( + total_retry_interval=request_timeout, tracing_settings=None + ) as client: yield client @@ -95,7 +97,9 @@ async def test_retry_on_errors( test_url: AnyHttpUrl, caplog_info_level: pytest.LogCaptureFixture, ) -> None: - client = FakeThickClient(total_retry_interval=request_timeout) + client = FakeThickClient( + total_retry_interval=request_timeout, tracing_settings=None + ) with pytest.raises(ClientHttpError): await client.get_provided_url(test_url) @@ -119,7 +123,7 @@ async def raises_request_error(self) -> Response: request=Request(method="GET", url=test_url), ) - client = ATestClient(total_retry_interval=request_timeout) + client = ATestClient(total_retry_interval=request_timeout, tracing_settings=None) with pytest.raises(ClientHttpError): await client.raises_request_error() @@ -145,7 +149,7 @@ async def raises_http_error(self) -> Response: msg = "mock_http_error" raise HTTPError(msg) - client = ATestClient(total_retry_interval=request_timeout) + client = ATestClient(total_retry_interval=request_timeout, tracing_settings=None) with pytest.raises(ClientHttpError): await client.raises_http_error() @@ -159,21 +163,25 @@ async def public_method_ok(self) -> Response: # type: ignore """this method will be ok even if no code is used""" # OK - OKTestClient(total_retry_interval=request_timeout) + OKTestClient(total_retry_interval=request_timeout, tracing_settings=None) class FailWrongAnnotationTestClient(BaseThinClient): async def public_method_wrong_annotation(self) -> None: """this method will raise an error""" with pytest.raises(AssertionError, match="should return an instance"): - FailWrongAnnotationTestClient(total_retry_interval=request_timeout) + FailWrongAnnotationTestClient( + total_retry_interval=request_timeout, tracing_settings=None + ) class FailNoAnnotationTestClient(BaseThinClient): async def public_method_no_annotation(self): """this method will raise an error""" with pytest.raises(AssertionError, match="should return an instance"): - FailNoAnnotationTestClient(total_retry_interval=request_timeout) + FailNoAnnotationTestClient( + total_retry_interval=request_timeout, tracing_settings=None + ) async def test_expect_state_decorator( @@ -197,7 +205,9 @@ async def get_wrong_state(self) -> Response: respx_mock.get(url_get_200_ok).mock(return_value=Response(codes.OK)) respx_mock.get(get_wrong_state).mock(return_value=Response(codes.OK)) - test_client = ATestClient(total_retry_interval=request_timeout) + test_client = ATestClient( + total_retry_interval=request_timeout, tracing_settings=None + ) # OK response = await test_client.get_200_ok() @@ -218,7 +228,9 @@ async def test_retry_timeout_overwrite( request_timeout: int, caplog_info_level: pytest.LogCaptureFixture, ) -> None: - client = FakeThickClient(total_retry_interval=request_timeout) + client = FakeThickClient( + total_retry_interval=request_timeout, tracing_settings=None + ) caplog_info_level.clear() start = arrow.utcnow() diff --git a/packages/simcore-sdk/requirements/_base.txt b/packages/simcore-sdk/requirements/_base.txt index 5eac02fa1ec..11be2af08e1 100644 --- a/packages/simcore-sdk/requirements/_base.txt +++ b/packages/simcore-sdk/requirements/_base.txt @@ -48,7 +48,6 @@ async-timeout==4.0.3 # via # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==24.2.0 diff --git a/services/agent/requirements/_base.txt b/services/agent/requirements/_base.txt index 59f29515fe5..a42027b8a00 100644 --- a/services/agent/requirements/_base.txt +++ b/services/agent/requirements/_base.txt @@ -38,8 +38,6 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -async-timeout==4.0.3 - # via redis attrs==24.2.0 # via # aiohttp @@ -143,6 +141,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -161,12 +160,15 @@ opentelemetry-instrumentation==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.48b0 # via opentelemetry-instrumentation-fastapi opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.48b0 @@ -185,6 +187,7 @@ opentelemetry-semantic-conventions==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -192,6 +195,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.7 # via diff --git a/services/api-server/requirements/_base.txt b/services/api-server/requirements/_base.txt index 92a441a0e25..02a3778eab2 100644 --- a/services/api-server/requirements/_base.txt +++ b/services/api-server/requirements/_base.txt @@ -74,7 +74,6 @@ async-timeout==4.0.3 # via # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -283,6 +282,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -306,6 +306,7 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-aiopg==0.48b0 @@ -320,6 +321,8 @@ opentelemetry-instrumentation-dbapi==0.48b0 # via opentelemetry-instrumentation-aiopg opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -345,6 +348,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -352,6 +356,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.0 # via diff --git a/services/api-server/src/simcore_service_api_server/core/application.py b/services/api-server/src/simcore_service_api_server/core/application.py index 04dcd397c28..3d67746deb7 100644 --- a/services/api-server/src/simcore_service_api_server/core/application.py +++ b/services/api-server/src/simcore_service_api_server/core/application.py @@ -82,19 +82,36 @@ def init_app(settings: ApplicationSettings | None = None) -> FastAPI: setup_rabbitmq(app) + if settings.API_SERVER_TRACING: + setup_tracing(app, settings.API_SERVER_TRACING, APP_NAME) + if settings.API_SERVER_WEBSERVER: - webserver.setup(app, settings.API_SERVER_WEBSERVER) - if app.state.settings.API_SERVER_TRACING: - setup_tracing(app, app.state.settings.API_SERVER_TRACING, APP_NAME) + webserver.setup( + app, + settings.API_SERVER_WEBSERVER, + tracing_settings=settings.API_SERVER_TRACING, + ) if settings.API_SERVER_CATALOG: - catalog.setup(app, settings.API_SERVER_CATALOG) + catalog.setup( + app, + settings.API_SERVER_CATALOG, + tracing_settings=settings.API_SERVER_TRACING, + ) if settings.API_SERVER_STORAGE: - storage.setup(app, settings.API_SERVER_STORAGE) + storage.setup( + app, + settings.API_SERVER_STORAGE, + tracing_settings=settings.API_SERVER_TRACING, + ) if settings.API_SERVER_DIRECTOR_V2: - director_v2.setup(app, settings.API_SERVER_DIRECTOR_V2) + director_v2.setup( + app, + settings.API_SERVER_DIRECTOR_V2, + tracing_settings=settings.API_SERVER_TRACING, + ) # setup app app.add_event_handler("startup", create_start_app_handler(app)) diff --git a/services/api-server/src/simcore_service_api_server/services/catalog.py b/services/api-server/src/simcore_service_api_server/services/catalog.py index 56a7d648790..461237ce998 100644 --- a/services/api-server/src/simcore_service_api_server/services/catalog.py +++ b/services/api-server/src/simcore_service_api_server/services/catalog.py @@ -11,6 +11,7 @@ from models_library.services import ServiceMetaDataPublished, ServiceType from pydantic import Extra, ValidationError, parse_obj_as, parse_raw_as from settings_library.catalog import CatalogSettings +from settings_library.tracing import TracingSettings from simcore_service_api_server.exceptions.backend_errors import ( ListSolversOrStudiesError, SolverOrStudyNotFoundError, @@ -209,10 +210,16 @@ async def get_latest_release( # MODULES APP SETUP ------------------------------------------------------------- -def setup(app: FastAPI, settings: CatalogSettings) -> None: +def setup( + app: FastAPI, settings: CatalogSettings, tracing_settings: TracingSettings | None +) -> None: if not settings: settings = CatalogSettings() setup_client_instance( - app, CatalogApi, api_baseurl=settings.api_base_url, service_name="catalog" + app, + CatalogApi, + api_baseurl=settings.api_base_url, + service_name="catalog", + tracing_settings=tracing_settings, ) diff --git a/services/api-server/src/simcore_service_api_server/services/director_v2.py b/services/api-server/src/simcore_service_api_server/services/director_v2.py index ff31490b072..938e36c5242 100644 --- a/services/api-server/src/simcore_service_api_server/services/director_v2.py +++ b/services/api-server/src/simcore_service_api_server/services/director_v2.py @@ -9,6 +9,7 @@ from models_library.projects_pipeline import ComputationTask from models_library.projects_state import RunningState from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field, PositiveInt, parse_raw_as +from settings_library.tracing import TracingSettings from simcore_service_api_server.exceptions.backend_errors import ( JobNotFoundError, LogFileNotFoundError, @@ -191,11 +192,14 @@ async def get_computation_logs( # MODULES APP SETUP ------------------------------------------------------------- -def setup(app: FastAPI, settings: DirectorV2Settings) -> None: +def setup( + app: FastAPI, settings: DirectorV2Settings, tracing_settings: TracingSettings | None +) -> None: setup_client_instance( app, DirectorV2Api, # WARNING: it has /v0 and /v2 prefixes api_baseurl=settings.base_url, service_name="director_v2", + tracing_settings=tracing_settings, ) diff --git a/services/api-server/src/simcore_service_api_server/services/storage.py b/services/api-server/src/simcore_service_api_server/services/storage.py index 13920d8a931..4e6d8be54ca 100644 --- a/services/api-server/src/simcore_service_api_server/services/storage.py +++ b/services/api-server/src/simcore_service_api_server/services/storage.py @@ -14,6 +14,7 @@ from models_library.basic_types import SHA256Str from models_library.generics import Envelope from pydantic import AnyUrl, PositiveInt +from settings_library.tracing import TracingSettings from starlette.datastructures import URL from ..core.settings import StorageSettings @@ -209,12 +210,18 @@ async def create_soft_link( # MODULES APP SETUP ------------------------------------------------------------- -def setup(app: FastAPI, settings: StorageSettings) -> None: +def setup( + app: FastAPI, settings: StorageSettings, tracing_settings: TracingSettings | None +) -> None: if not settings: settings = StorageSettings() setup_client_instance( - app, StorageApi, api_baseurl=settings.api_base_url, service_name="storage" + app, + StorageApi, + api_baseurl=settings.api_base_url, + service_name="storage", + tracing_settings=tracing_settings, ) diff --git a/services/api-server/src/simcore_service_api_server/services/webserver.py b/services/api-server/src/simcore_service_api_server/services/webserver.py index 0d265248dc2..19688728cb5 100644 --- a/services/api-server/src/simcore_service_api_server/services/webserver.py +++ b/services/api-server/src/simcore_service_api_server/services/webserver.py @@ -48,6 +48,7 @@ X_SIMCORE_PARENT_NODE_ID, X_SIMCORE_PARENT_PROJECT_UUID, ) +from settings_library.tracing import TracingSettings from simcore_service_api_server.exceptions.backend_errors import ( ConfigurationError, ForbiddenWalletError, @@ -588,24 +589,30 @@ async def get_service_pricing_plan( # MODULES APP SETUP ------------------------------------------------------------- -def setup(app: FastAPI, settings: WebServerSettings) -> None: +def setup( + app: FastAPI, + webserver_settings: WebServerSettings, + tracing_settings: TracingSettings | None, +) -> None: setup_client_instance( app, WebserverApi, - api_baseurl=settings.api_base_url, + api_baseurl=webserver_settings.api_base_url, service_name="webserver", + tracing_settings=tracing_settings, ) setup_client_instance( app, LongRunningTasksClient, api_baseurl="", service_name="long_running_tasks_client", + tracing_settings=tracing_settings, ) def _on_startup() -> None: # normalize & encrypt - secret_key = settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value() + secret_key = webserver_settings.WEBSERVER_SESSION_SECRET_KEY.get_secret_value() app.state.webserver_fernet = fernet.Fernet(secret_key) async def _on_shutdown() -> None: diff --git a/services/api-server/src/simcore_service_api_server/utils/client_base.py b/services/api-server/src/simcore_service_api_server/utils/client_base.py index ed58f7429e3..3cc35a74bb6 100644 --- a/services/api-server/src/simcore_service_api_server/utils/client_base.py +++ b/services/api-server/src/simcore_service_api_server/utils/client_base.py @@ -4,6 +4,8 @@ import httpx from fastapi import FastAPI from httpx import AsyncClient +from servicelib.fastapi.tracing import setup_httpx_client_tracing +from settings_library.tracing import TracingSettings from .app_data import AppDataMixin @@ -43,14 +45,16 @@ def setup_client_instance( api_cls: type[BaseServiceClientApi], api_baseurl, service_name: str, + tracing_settings: TracingSettings | None, **extra_fields, ) -> None: """Helper to add init/cleanup of ServiceClientApi instances in the app lifespam""" assert issubclass(api_cls, BaseServiceClientApi) # nosec - # NOTE: this term is mocked in tests. If you need to modify pay attention to the mock client = AsyncClient(base_url=api_baseurl) + if tracing_settings: + setup_httpx_client_tracing(client) # events def _create_instance() -> None: diff --git a/services/api-server/tests/unit/test_utils_client_base.py b/services/api-server/tests/unit/test_utils_client_base.py index 61370a8ea52..9fe2da1a28c 100644 --- a/services/api-server/tests/unit/test_utils_client_base.py +++ b/services/api-server/tests/unit/test_utils_client_base.py @@ -43,6 +43,7 @@ class TheClientApi(BaseServiceClientApi): service_name="the_service", health_check_path="/health", x=42, + tracing_settings=None, ) assert not TheClientApi.get_instance(app) diff --git a/services/autoscaling/requirements/_base.txt b/services/autoscaling/requirements/_base.txt index 0c7ff77b07f..995fb44e3f4 100644 --- a/services/autoscaling/requirements/_base.txt +++ b/services/autoscaling/requirements/_base.txt @@ -65,8 +65,6 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -async-timeout==4.0.3 - # via redis attrs==23.2.0 # via # aiohttp @@ -260,6 +258,7 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-propagator-aws-xray @@ -282,6 +281,7 @@ opentelemetry-instrumentation==0.47b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.47b0 @@ -290,6 +290,8 @@ opentelemetry-instrumentation-botocore==0.47b0 # via -r requirements/../../../packages/aws-library/requirements/_base.in opentelemetry-instrumentation-fastapi==0.47b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.47b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.47b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -316,6 +318,7 @@ opentelemetry-semantic-conventions==0.47b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -323,6 +326,7 @@ opentelemetry-util-http==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.3 # via diff --git a/services/autoscaling/requirements/_test.txt b/services/autoscaling/requirements/_test.txt index 8abc686eb76..47379c4d69f 100644 --- a/services/autoscaling/requirements/_test.txt +++ b/services/autoscaling/requirements/_test.txt @@ -6,10 +6,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # redis attrs==23.2.0 # via # -c requirements/_base.txt diff --git a/services/catalog/requirements/_base.txt b/services/catalog/requirements/_base.txt index 890adbe5508..e650830f05d 100644 --- a/services/catalog/requirements/_base.txt +++ b/services/catalog/requirements/_base.txt @@ -41,9 +41,7 @@ arrow==1.3.0 asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 - # via - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via # -r requirements/_base.in @@ -191,6 +189,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -210,6 +209,7 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.48b0 @@ -218,6 +218,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.48b0 @@ -237,6 +239,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -244,6 +247,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.0 # via diff --git a/services/catalog/src/simcore_service_catalog/core/application.py b/services/catalog/src/simcore_service_catalog/core/application.py index a28dc8c5a32..94f35b3d1ea 100644 --- a/services/catalog/src/simcore_service_catalog/core/application.py +++ b/services/catalog/src/simcore_service_catalog/core/application.py @@ -46,8 +46,13 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: # STATE app.state.settings = settings + if settings.CATALOG_TRACING: + setup_tracing(app, settings.CATALOG_TRACING, APP_NAME) + # STARTUP-EVENT - app.add_event_handler("startup", create_on_startup(app)) + app.add_event_handler( + "startup", create_on_startup(app, tracing_settings=settings.CATALOG_TRACING) + ) # PLUGIN SETUP setup_function_services(app) @@ -65,8 +70,6 @@ def create_app(settings: ApplicationSettings | None = None) -> FastAPI: app.add_middleware( BaseHTTPMiddleware, dispatch=timing_middleware.add_process_time_header ) - if app.state.settings.CATALOG_TRACING: - setup_tracing(app, app.state.settings.CATALOG_TRACING, APP_NAME) app.add_middleware(GZipMiddleware) diff --git a/services/catalog/src/simcore_service_catalog/core/events.py b/services/catalog/src/simcore_service_catalog/core/events.py index f22adbba4ec..dde295a2e56 100644 --- a/services/catalog/src/simcore_service_catalog/core/events.py +++ b/services/catalog/src/simcore_service_catalog/core/events.py @@ -5,6 +5,7 @@ from fastapi import FastAPI from servicelib.fastapi.db_asyncpg_engine import close_db_connection, connect_to_db from servicelib.logging_utils import log_context +from settings_library.tracing import TracingSettings from .._meta import APP_FINISHED_BANNER_MSG, APP_STARTED_BANNER_MSG from ..db.events import setup_default_product @@ -26,7 +27,9 @@ def _flush_finished_banner() -> None: print(APP_FINISHED_BANNER_MSG, flush=True) # noqa: T201 -def create_on_startup(app: FastAPI) -> EventCallable: +def create_on_startup( + app: FastAPI, tracing_settings: TracingSettings | None +) -> EventCallable: async def _() -> None: _flush_started_banner() @@ -37,7 +40,7 @@ async def _() -> None: if app.state.settings.CATALOG_DIRECTOR: # setup connection to director - await setup_director(app) + await setup_director(app, tracing_settings=tracing_settings) # FIXME: check director service is in place and ready. Hand-shake?? # SEE https://github.com/ITISFoundation/osparc-simcore/issues/1728 diff --git a/services/catalog/src/simcore_service_catalog/services/director.py b/services/catalog/src/simcore_service_catalog/services/director.py index 7c6925902f4..e97b72bb3f2 100644 --- a/services/catalog/src/simcore_service_catalog/services/director.py +++ b/services/catalog/src/simcore_service_catalog/services/director.py @@ -11,7 +11,9 @@ from models_library.services_metadata_published import ServiceMetaDataPublished from models_library.services_types import ServiceKey, ServiceVersion from models_library.utils.json_serialization import json_dumps +from servicelib.fastapi.tracing import setup_httpx_client_tracing from servicelib.logging_utils import log_context +from settings_library.tracing import TracingSettings from starlette import status from tenacity.asyncio import AsyncRetrying from tenacity.before_sleep import before_sleep_log @@ -106,11 +108,15 @@ class DirectorApi: SEE services/catalog/src/simcore_service_catalog/api/dependencies/director.py """ - def __init__(self, base_url: str, app: FastAPI): + def __init__( + self, base_url: str, app: FastAPI, tracing_settings: TracingSettings | None + ): self.client = httpx.AsyncClient( base_url=base_url, timeout=app.state.settings.CATALOG_CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, ) + if tracing_settings: + setup_httpx_client_tracing(self.client) self.vtag = app.state.settings.CATALOG_DIRECTOR.DIRECTOR_VTAG async def close(self): @@ -151,15 +157,25 @@ async def get_service( return ServiceMetaDataPublished.parse_obj(data[0]) -async def setup_director(app: FastAPI) -> None: +async def setup_director( + app: FastAPI, tracing_settings: TracingSettings | None +) -> None: if settings := app.state.settings.CATALOG_DIRECTOR: with log_context( _logger, logging.DEBUG, "Setup director at %s", f"{settings.base_url=}" ): async for attempt in AsyncRetrying(**_director_startup_retry_policy): - client = DirectorApi(base_url=settings.base_url, app=app) + client = DirectorApi( + base_url=settings.base_url, + app=app, + tracing_settings=tracing_settings, + ) with attempt: - client = DirectorApi(base_url=settings.base_url, app=app) + client = DirectorApi( + base_url=settings.base_url, + app=app, + tracing_settings=tracing_settings, + ) if not await client.is_responsive(): with suppress(Exception): await client.close() diff --git a/services/clusters-keeper/requirements/_base.txt b/services/clusters-keeper/requirements/_base.txt index 9443ee269ef..344d07b5339 100644 --- a/services/clusters-keeper/requirements/_base.txt +++ b/services/clusters-keeper/requirements/_base.txt @@ -63,8 +63,6 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -async-timeout==4.0.3 - # via redis attrs==23.2.0 # via # aiohttp @@ -258,6 +256,7 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-propagator-aws-xray @@ -280,6 +279,7 @@ opentelemetry-instrumentation==0.47b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.47b0 @@ -288,6 +288,8 @@ opentelemetry-instrumentation-botocore==0.47b0 # via -r requirements/../../../packages/aws-library/requirements/_base.in opentelemetry-instrumentation-fastapi==0.47b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.47b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.47b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -314,6 +316,7 @@ opentelemetry-semantic-conventions==0.47b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -321,6 +324,7 @@ opentelemetry-util-http==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.3 # via diff --git a/services/clusters-keeper/requirements/_test.txt b/services/clusters-keeper/requirements/_test.txt index e2832a14944..00a7437644c 100644 --- a/services/clusters-keeper/requirements/_test.txt +++ b/services/clusters-keeper/requirements/_test.txt @@ -19,10 +19,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # redis attrs==23.2.0 # via # -c requirements/_base.txt diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 6cdd686b12f..dc0ea01d6f9 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -46,8 +46,6 @@ arrow==1.3.0 # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/service-library/requirements/_base.in -async-timeout==4.0.3 - # via redis attrs==23.2.0 # via # aiohttp diff --git a/services/datcore-adapter/requirements/_base.txt b/services/datcore-adapter/requirements/_base.txt index f8fe44d6058..5a9116dfe47 100644 --- a/services/datcore-adapter/requirements/_base.txt +++ b/services/datcore-adapter/requirements/_base.txt @@ -39,8 +39,6 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -async-timeout==4.0.3 - # via redis attrs==23.2.0 # via # aiohttp @@ -166,6 +164,7 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -184,12 +183,15 @@ opentelemetry-instrumentation==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.47b0 # via opentelemetry-instrumentation-fastapi opentelemetry-instrumentation-fastapi==0.47b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.47b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.47b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.47b0 @@ -208,6 +210,7 @@ opentelemetry-semantic-conventions==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -215,6 +218,7 @@ opentelemetry-util-http==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.0 # via diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 02162fe9a64..dfcfa5ab028 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -81,7 +81,6 @@ async-timeout==4.0.3 # via # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -340,6 +339,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -363,6 +363,7 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-aiopg==0.48b0 @@ -377,6 +378,8 @@ opentelemetry-instrumentation-dbapi==0.48b0 # via opentelemetry-instrumentation-aiopg opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -402,6 +405,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -409,6 +413,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests ordered-set==4.1.0 # via -r requirements/_base.in diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_client.py b/services/director-v2/src/simcore_service_director_v2/cli/_client.py index 541d90688dc..872c08f3b5f 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_client.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_client.py @@ -12,7 +12,9 @@ class ThinDV2LocalhostClient(BaseThinClient): def __init__(self): super().__init__( - total_retry_interval=10, default_http_client_timeout=Timeout(5) + total_retry_interval=10, + default_http_client_timeout=Timeout(5), + tracing_settings=None, ) def _get_url(self, postfix: str) -> str: diff --git a/services/director-v2/src/simcore_service_director_v2/cli/_core.py b/services/director-v2/src/simcore_service_director_v2/cli/_core.py index 893aed2504e..70ee252aa20 100644 --- a/services/director-v2/src/simcore_service_director_v2/cli/_core.py +++ b/services/director-v2/src/simcore_service_director_v2/cli/_core.py @@ -36,13 +36,16 @@ async def _initialized_app(only_db: bool = False) -> AsyncIterator[FastAPI]: app = create_base_app() settings: AppSettings = app.state.settings - # Initialize minimal required components for the application db.setup(app, settings.POSTGRES) if not only_db: dynamic_sidecar.setup(app) - director_v0.setup(app, settings.DIRECTOR_V0) + director_v0.setup( + app, + director_v0_settings=settings.DIRECTOR_V0, + tracing_settings=settings.DIRECTOR_V2_TRACING, + ) await app.router.startup() yield app diff --git a/services/director-v2/src/simcore_service_director_v2/core/application.py b/services/director-v2/src/simcore_service_director_v2/core/application.py index f1c81f18f98..6487d725143 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/application.py +++ b/services/director-v2/src/simcore_service_director_v2/core/application.py @@ -149,19 +149,34 @@ def init_app(settings: AppSettings | None = None) -> FastAPI: substitutions.setup(app) + if settings.DIRECTOR_V2_TRACING: + setup_tracing(app, settings.DIRECTOR_V2_TRACING, APP_NAME) + if settings.DIRECTOR_V0.DIRECTOR_V0_ENABLED: - director_v0.setup(app, settings.DIRECTOR_V0) + director_v0.setup( + app, + director_v0_settings=settings.DIRECTOR_V0, + tracing_settings=settings.DIRECTOR_V2_TRACING, + ) if settings.DIRECTOR_V2_STORAGE: - storage.setup(app, settings.DIRECTOR_V2_STORAGE) + storage.setup( + app, + storage_settings=settings.DIRECTOR_V2_STORAGE, + tracing_settings=settings.DIRECTOR_V2_TRACING, + ) if settings.DIRECTOR_V2_CATALOG: - catalog.setup(app, settings.DIRECTOR_V2_CATALOG) + catalog.setup( + app, + catalog_settings=settings.DIRECTOR_V2_CATALOG, + tracing_settings=settings.DIRECTOR_V2_TRACING, + ) db.setup(app, settings.POSTGRES) if settings.DYNAMIC_SERVICES.DIRECTOR_V2_DYNAMIC_SERVICES_ENABLED: - dynamic_services.setup(app) + dynamic_services.setup(app, tracing_settings=settings.DIRECTOR_V2_TRACING) dynamic_scheduler_enabled = settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR and ( settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER @@ -192,8 +207,6 @@ def init_app(settings: AppSettings | None = None) -> FastAPI: if settings.DIRECTOR_V2_PROMETHEUS_INSTRUMENTATION_ENABLED: instrumentation.setup(app) - if settings.DIRECTOR_V2_TRACING: - setup_tracing(app, app.state.settings.DIRECTOR_V2_TRACING, APP_NAME) if settings.DIRECTOR_V2_PROFILING: app.add_middleware(ProfilerMiddleware) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py index f5e378afa43..22b4eb89bd3 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/catalog.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/catalog.py @@ -9,26 +9,37 @@ from models_library.services_resources import ServiceResourcesDict from models_library.users import UserID from pydantic import parse_obj_as +from servicelib.fastapi.tracing import setup_httpx_client_tracing from settings_library.catalog import CatalogSettings +from settings_library.tracing import TracingSettings from ..utils.client_decorators import handle_errors, handle_retry logger = logging.getLogger(__name__) -def setup(app: FastAPI, settings: CatalogSettings) -> None: - if not settings: - settings = CatalogSettings() +def setup( + app: FastAPI, + catalog_settings: CatalogSettings | None, + tracing_settings: TracingSettings | None, +) -> None: + + if not catalog_settings: + catalog_settings = CatalogSettings() async def on_startup() -> None: + client = httpx.AsyncClient( + base_url=f"{catalog_settings.api_base_url}", + timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, + ) + if tracing_settings: + setup_httpx_client_tracing(client=client) + CatalogClient.create( app, - client=httpx.AsyncClient( - base_url=f"{settings.api_base_url}", - timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, - ), + client=client, ) - logger.debug("created client for catalog: %s", settings.api_base_url) + logger.debug("created client for catalog: %s", catalog_settings.api_base_url) # Here we currently do not ensure the catalog is up on start # This will need to be assessed. diff --git a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py index 0bc8c799dcb..3229ddc642a 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/director_v0.py @@ -1,7 +1,4 @@ -""" Module that takes care of communications with director v0 service - - -""" +"""Module that takes care of communications with director v0 service""" import logging import urllib.parse @@ -20,7 +17,9 @@ from models_library.service_settings_labels import SimcoreServiceLabels from models_library.services import ServiceKey, ServiceKeyVersion, ServiceVersion from models_library.users import UserID +from servicelib.fastapi.tracing import setup_httpx_client_tracing from servicelib.logging_utils import log_decorator +from settings_library.tracing import TracingSettings from ..core.settings import DirectorV0Settings from ..utils.client_decorators import handle_errors, handle_retry @@ -31,25 +30,34 @@ # Module's setup logic --------------------------------------------- -def setup(app: FastAPI, settings: DirectorV0Settings | None): - if not settings: - settings = DirectorV0Settings() +def setup( + app: FastAPI, + director_v0_settings: DirectorV0Settings | None, + tracing_settings: TracingSettings | None, +): + if not director_v0_settings: + director_v0_settings = DirectorV0Settings() def on_startup() -> None: + client = httpx.AsyncClient( + base_url=f"{director_v0_settings.endpoint}", + timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, + ) + if tracing_settings: + setup_httpx_client_tracing(client=client) DirectorV0Client.create( app, - client=httpx.AsyncClient( - base_url=f"{settings.endpoint}", - timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, - ), + client=client, + ) + logger.debug( + "created client for director-v0: %s", director_v0_settings.endpoint ) - logger.debug("created client for director-v0: %s", settings.endpoint) async def on_shutdown() -> None: client = DirectorV0Client.instance(app).client await client.aclose() del client - logger.debug("delete client for director-v0: %s", settings.endpoint) + logger.debug("delete client for director-v0: %s", director_v0_settings.endpoint) app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py index d572a9f23fb..acbc08849a6 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_services.py @@ -8,19 +8,24 @@ import httpx from fastapi import FastAPI +from servicelib.fastapi.tracing import setup_httpx_client_tracing +from settings_library.tracing import TracingSettings from ..utils.client_decorators import handle_errors, handle_retry logger = logging.getLogger(__name__) -def setup(app: FastAPI) -> None: +def setup(app: FastAPI, tracing_settings: TracingSettings | None) -> None: def on_startup() -> None: + client = httpx.AsyncClient( + timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT + ) + if tracing_settings: + setup_httpx_client_tracing(client=client) ServicesClient.create( app, - client=httpx.AsyncClient( - timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT - ), + client=client, ) async def on_shutdown() -> None: diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py index 241f32fe70e..feba415ecd0 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -12,6 +12,7 @@ expect_status, retry_on_errors, ) +from settings_library.tracing import TracingSettings from ....core.dynamic_services_settings.scheduler import ( DynamicServicesSchedulerSettings, @@ -31,6 +32,9 @@ def __init__(self, app: FastAPI): scheduler_settings: DynamicServicesSchedulerSettings = ( app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SCHEDULER ) + tracing_settings: TracingSettings | None = ( + app.state.settings.DIRECTOR_V2_TRACING + ) # timeouts self._health_request_timeout = Timeout(1.0, connect=1.0) @@ -53,6 +57,7 @@ def __init__(self, app: FastAPI): scheduler_settings.DYNAMIC_SIDECAR_API_REQUEST_TIMEOUT, connect=scheduler_settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, ), + tracing_settings=tracing_settings, ) def _get_url( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py index 2c546ea3d84..4eaf3ba2016 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/resource_usage_tracker_client.py @@ -24,6 +24,7 @@ from models_library.services import ServiceKey, ServiceVersion from models_library.wallets import WalletID from pydantic import parse_obj_as +from servicelib.fastapi.tracing import setup_httpx_client_tracing from ..core.errors import PricingPlanUnitNotFoundError from ..core.settings import AppSettings @@ -41,6 +42,8 @@ def create(cls, settings: AppSettings) -> "ResourceUsageTrackerClient": client = httpx.AsyncClient( base_url=settings.DIRECTOR_V2_RESOURCE_USAGE_TRACKER.api_base_url, ) + if settings.DIRECTOR_V2_TRACING: + setup_httpx_client_tracing(client=client) exit_stack = contextlib.AsyncExitStack() return cls(client=client, exit_stack=exit_stack) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/storage.py b/services/director-v2/src/simcore_service_director_v2/modules/storage.py index 98e18845333..c3e9cd21576 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/storage.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/storage.py @@ -8,9 +8,11 @@ import httpx from fastapi import FastAPI, HTTPException from models_library.users import UserID +from servicelib.fastapi.tracing import setup_httpx_client_tracing from servicelib.logging_utils import log_decorator from settings_library.s3 import S3Settings from settings_library.storage import StorageSettings +from settings_library.tracing import TracingSettings # Module's business logic --------------------------------------------- from starlette import status @@ -23,19 +25,27 @@ # Module's setup logic --------------------------------------------- -def setup(app: FastAPI, settings: StorageSettings): - if not settings: - settings = StorageSettings() +def setup( + app: FastAPI, + storage_settings: StorageSettings | None, + tracing_settings: TracingSettings | None, +): + + if not storage_settings: + storage_settings = StorageSettings() def on_startup() -> None: + client = httpx.AsyncClient( + base_url=f"{storage_settings.api_base_url}", + timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, + ) + if tracing_settings: + setup_httpx_client_tracing(client=client) StorageClient.create( app, - client=httpx.AsyncClient( - base_url=f"{settings.api_base_url}", - timeout=app.state.settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, - ), + client=client, ) - logger.debug("created client for storage: %s", settings.api_base_url) + logger.debug("created client for storage: %s", storage_settings.api_base_url) async def on_shutdown() -> None: client = StorageClient.instance(app).client diff --git a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py index 720e7d0c3e1..ec955f1e167 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py +++ b/services/director-v2/tests/integration/02/test_dynamic_sidecar_nodeports_integration.py @@ -64,6 +64,7 @@ from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings from settings_library.storage import StorageSettings +from settings_library.tracing import TracingSettings from simcore_postgres_database.models.comp_pipeline import comp_pipeline from simcore_postgres_database.models.comp_tasks import comp_tasks from simcore_postgres_database.models.projects_networks import projects_networks @@ -340,8 +341,14 @@ async def patch_storage_setup( original_setup = dv2_modules_storage.setup - def setup(app: FastAPI, settings: StorageSettings) -> None: - original_setup(app, local_settings) + def setup( + app: FastAPI, + storage_settings: StorageSettings, + tracing_settings: TracingSettings | None, + ) -> None: + original_setup( + app, storage_settings=local_settings, tracing_settings=tracing_settings + ) mocker.patch("simcore_service_director_v2.modules.storage.setup", side_effect=setup) diff --git a/services/dynamic-scheduler/requirements/_base.txt b/services/dynamic-scheduler/requirements/_base.txt index cb2cc603fb0..3462f0ba65b 100644 --- a/services/dynamic-scheduler/requirements/_base.txt +++ b/services/dynamic-scheduler/requirements/_base.txt @@ -40,9 +40,7 @@ arrow==1.3.0 asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 - # via - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -172,6 +170,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -191,6 +190,7 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.48b0 @@ -199,6 +199,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.48b0 @@ -218,6 +220,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -225,6 +228,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.0 # via diff --git a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py index e823216576b..68aae3b97f3 100644 --- a/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py +++ b/services/dynamic-scheduler/src/simcore_service_dynamic_scheduler/services/director_v2/_thin_client.py @@ -37,6 +37,7 @@ def __init__(self, app: FastAPI) -> None: DEFAULT_LEGACY_WB_TO_DV2_HTTP_REQUESTS_TIMEOUT_S ), extra_allowed_method_names={"attach_lifespan_to"}, + tracing_settings=settings.DYNAMIC_SCHEDULER_TRACING, ) @retry_on_errors() diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index 40c32b696ec..559440b03f0 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -76,7 +76,6 @@ async-timeout==4.0.3 # via # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -243,6 +242,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -266,6 +266,7 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-aiopg==0.48b0 @@ -280,6 +281,8 @@ opentelemetry-instrumentation-dbapi==0.48b0 # via opentelemetry-instrumentation-aiopg opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -305,6 +308,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-dbapi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -312,6 +316,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.0 # via diff --git a/services/efs-guardian/requirements/_base.txt b/services/efs-guardian/requirements/_base.txt index 26a626f01db..8e46a857186 100644 --- a/services/efs-guardian/requirements/_base.txt +++ b/services/efs-guardian/requirements/_base.txt @@ -69,9 +69,7 @@ arrow==1.3.0 asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 - # via - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via sqlalchemy attrs==24.2.0 @@ -238,6 +236,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-propagator-aws-xray @@ -261,6 +260,7 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.48b0 @@ -271,6 +271,8 @@ opentelemetry-instrumentation-botocore==0.48b0 # via -r requirements/../../../packages/aws-library/requirements/_base.in opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -298,6 +300,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -305,6 +308,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.7 # via diff --git a/services/efs-guardian/requirements/_test.txt b/services/efs-guardian/requirements/_test.txt index 8bdc1ec8ebf..f188e8071de 100644 --- a/services/efs-guardian/requirements/_test.txt +++ b/services/efs-guardian/requirements/_test.txt @@ -23,10 +23,6 @@ anyio==4.6.2.post1 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # redis attrs==24.2.0 # via # -c requirements/_base.txt diff --git a/services/invitations/requirements/_base.txt b/services/invitations/requirements/_base.txt index c6e253b5e6a..732bac0872f 100644 --- a/services/invitations/requirements/_base.txt +++ b/services/invitations/requirements/_base.txt @@ -35,8 +35,6 @@ arrow==1.3.0 # -r requirements/../../../packages/service-library/requirements/_base.in asgiref==3.8.1 # via opentelemetry-instrumentation-asgi -async-timeout==4.0.3 - # via redis attrs==23.2.0 # via # aiohttp @@ -153,6 +151,7 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -171,12 +170,15 @@ opentelemetry-instrumentation==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.47b0 # via opentelemetry-instrumentation-fastapi opentelemetry-instrumentation-fastapi==0.47b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.47b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.47b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.47b0 @@ -195,6 +197,7 @@ opentelemetry-semantic-conventions==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -202,6 +205,7 @@ opentelemetry-util-http==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.0 # via diff --git a/services/payments/requirements/_base.txt b/services/payments/requirements/_base.txt index 88aae6375d4..c38b7880c1d 100644 --- a/services/payments/requirements/_base.txt +++ b/services/payments/requirements/_base.txt @@ -43,9 +43,7 @@ arrow==1.3.0 asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 - # via - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -201,6 +199,7 @@ opentelemetry-api==1.27.0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -220,6 +219,7 @@ opentelemetry-instrumentation==0.48b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.48b0 @@ -228,6 +228,8 @@ opentelemetry-instrumentation-asyncpg==0.48b0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in opentelemetry-instrumentation-fastapi==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.48b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.48b0 # via -r requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.48b0 @@ -247,6 +249,7 @@ opentelemetry-semantic-conventions==0.48b0 # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -254,6 +257,7 @@ opentelemetry-util-http==0.48b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.6 # via diff --git a/services/payments/src/simcore_service_payments/services/payments_gateway.py b/services/payments/src/simcore_service_payments/services/payments_gateway.py index 0b1097492c6..44c54b6108d 100644 --- a/services/payments/src/simcore_service_payments/services/payments_gateway.py +++ b/services/payments/src/simcore_service_payments/services/payments_gateway.py @@ -25,6 +25,7 @@ HealthMixinMixin, ) from servicelib.fastapi.httpx_utils import to_curl_command +from servicelib.fastapi.tracing import setup_httpx_client_tracing from simcore_service_payments.models.schemas.acknowledgements import ( AckPaymentWithPaymentMethod, ) @@ -216,5 +217,7 @@ def setup_payments_gateway(app: FastAPI): secret=settings.PAYMENTS_GATEWAY_API_SECRET.get_secret_value() ), ) + if settings.PAYMENTS_TRACING: + setup_httpx_client_tracing(api.client) api.attach_lifespan_to(app) api.set_to_app_state(app) diff --git a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py index e66f650fe7b..3f114540f99 100644 --- a/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py +++ b/services/payments/src/simcore_service_payments/services/resource_usage_tracker.py @@ -25,6 +25,7 @@ BaseHTTPApi, HealthMixinMixin, ) +from servicelib.fastapi.tracing import setup_httpx_client_tracing from ..core.settings import ApplicationSettings @@ -73,5 +74,7 @@ def setup_resource_usage_tracker(app: FastAPI): api = ResourceUsageTrackerApi.from_client_kwargs( base_url=settings.PAYMENTS_RESOURCE_USAGE_TRACKER.base_url, ) + if settings.PAYMENTS_TRACING: + setup_httpx_client_tracing(api.client) api.set_to_app_state(app) api.attach_lifespan_to(app) diff --git a/services/payments/src/simcore_service_payments/services/stripe.py b/services/payments/src/simcore_service_payments/services/stripe.py index 38cc21fab0e..3f3fa933bb6 100644 --- a/services/payments/src/simcore_service_payments/services/stripe.py +++ b/services/payments/src/simcore_service_payments/services/stripe.py @@ -19,6 +19,7 @@ BaseHTTPApi, HealthMixinMixin, ) +from servicelib.fastapi.tracing import setup_httpx_client_tracing from ..core.errors import StripeRuntimeError from ..core.settings import ApplicationSettings @@ -91,6 +92,8 @@ def setup_stripe(app: FastAPI): base_url=settings.PAYMENTS_STRIPE_URL, auth=_StripeBearerAuth(settings.PAYMENTS_STRIPE_API_SECRET.get_secret_value()), ) + if settings.PAYMENTS_TRACING: + setup_httpx_client_tracing(api.client) api.set_to_app_state(app) api.attach_lifespan_to(app) diff --git a/services/resource-usage-tracker/requirements/_base.txt b/services/resource-usage-tracker/requirements/_base.txt index 97a3bd129b7..bbd3cddf53d 100644 --- a/services/resource-usage-tracker/requirements/_base.txt +++ b/services/resource-usage-tracker/requirements/_base.txt @@ -69,9 +69,7 @@ arrow==1.3.0 asgiref==3.8.1 # via opentelemetry-instrumentation-asgi async-timeout==4.0.3 - # via - # asyncpg - # redis + # via asyncpg asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 @@ -260,6 +258,7 @@ opentelemetry-api==1.26.0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-propagator-aws-xray @@ -283,6 +282,7 @@ opentelemetry-instrumentation==0.47b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests opentelemetry-instrumentation-asgi==0.47b0 @@ -293,6 +293,8 @@ opentelemetry-instrumentation-botocore==0.47b0 # via -r requirements/../../../packages/aws-library/requirements/_base.in opentelemetry-instrumentation-fastapi==0.47b0 # via -r requirements/../../../packages/service-library/requirements/_fastapi.in +opentelemetry-instrumentation-httpx==0.47b0 + # via -r requirements/../../../packages/service-library/requirements/_fastapi.in opentelemetry-instrumentation-redis==0.47b0 # via # -r requirements/../../../packages/aws-library/requirements/../../../packages/service-library/requirements/_base.in @@ -320,6 +322,7 @@ opentelemetry-semantic-conventions==0.47b0 # opentelemetry-instrumentation-asyncpg # opentelemetry-instrumentation-botocore # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk @@ -327,6 +330,7 @@ opentelemetry-util-http==0.47b0 # via # opentelemetry-instrumentation-asgi # opentelemetry-instrumentation-fastapi + # opentelemetry-instrumentation-httpx # opentelemetry-instrumentation-requests orjson==3.10.0 # via diff --git a/services/resource-usage-tracker/requirements/_test.txt b/services/resource-usage-tracker/requirements/_test.txt index e70753feb19..4db08363ded 100644 --- a/services/resource-usage-tracker/requirements/_test.txt +++ b/services/resource-usage-tracker/requirements/_test.txt @@ -10,10 +10,6 @@ anyio==4.3.0 # httpx asgi-lifespan==2.1.0 # via -r requirements/_test.in -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # redis attrs==23.2.0 # via # -c requirements/_base.txt diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index edadd851b65..c73f10b2ef0 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -78,7 +78,6 @@ async-timeout==4.0.3 # via # aiopg # asyncpg - # redis asyncpg==0.29.0 # via sqlalchemy attrs==23.2.0 diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index 1e33824a7c0..f0132fe4c7c 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -13,10 +13,6 @@ aiosignal==1.3.1 # aiohttp antlr4-python3-runtime==4.13.2 # via moto -async-timeout==4.0.3 - # via - # -c requirements/_base.txt - # redis attrs==23.2.0 # via # -c requirements/_base.txt diff --git a/services/web/server/requirements/_base.txt b/services/web/server/requirements/_base.txt index 5b42c95fffd..01c8859912d 100644 --- a/services/web/server/requirements/_base.txt +++ b/services/web/server/requirements/_base.txt @@ -89,7 +89,6 @@ async-timeout==4.0.3 # via # aiohttp # aiopg - # redis asyncpg==0.27.0 # via # -r requirements/_base.in diff --git a/services/web/server/requirements/_test.txt b/services/web/server/requirements/_test.txt index 67fcd247fda..3aab7cde47d 100644 --- a/services/web/server/requirements/_test.txt +++ b/services/web/server/requirements/_test.txt @@ -18,7 +18,6 @@ async-timeout==4.0.3 # via # -c requirements/_base.txt # aiohttp - # redis asyncpg==0.27.0 # via # -c requirements/_base.txt diff --git a/tests/swarm-deploy/requirements/_test.txt b/tests/swarm-deploy/requirements/_test.txt index 2f4dc983011..dad3c42339d 100644 --- a/tests/swarm-deploy/requirements/_test.txt +++ b/tests/swarm-deploy/requirements/_test.txt @@ -197,6 +197,10 @@ opentelemetry-api==1.27.0 # opentelemetry-exporter-otlp-proto-grpc # opentelemetry-exporter-otlp-proto-http # opentelemetry-instrumentation + # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk # opentelemetry-semantic-conventions @@ -213,7 +217,22 @@ opentelemetry-exporter-otlp-proto-grpc==1.27.0 opentelemetry-exporter-otlp-proto-http==1.27.0 # via opentelemetry-exporter-otlp opentelemetry-instrumentation==0.48b0 - # via opentelemetry-instrumentation-requests + # via + # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis + # opentelemetry-instrumentation-requests +opentelemetry-instrumentation-aiopg==0.48b0 + # via -r requirements/../../../packages/simcore-sdk/requirements/_base.in +opentelemetry-instrumentation-asyncpg==0.48b0 + # via -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in +opentelemetry-instrumentation-dbapi==0.48b0 + # via opentelemetry-instrumentation-aiopg +opentelemetry-instrumentation-redis==0.48b0 + # via + # -r requirements/../../../packages/service-library/requirements/_base.in + # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in opentelemetry-instrumentation-requests==0.48b0 # via # -r requirements/../../../packages/service-library/requirements/_base.in @@ -231,6 +250,9 @@ opentelemetry-sdk==1.27.0 # opentelemetry-exporter-otlp-proto-http opentelemetry-semantic-conventions==0.48b0 # via + # opentelemetry-instrumentation-asyncpg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis # opentelemetry-instrumentation-requests # opentelemetry-sdk opentelemetry-util-http==0.48b0 @@ -494,6 +516,9 @@ wrapt==1.16.0 # via # deprecated # opentelemetry-instrumentation + # opentelemetry-instrumentation-aiopg + # opentelemetry-instrumentation-dbapi + # opentelemetry-instrumentation-redis yarl==1.12.1 # via # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/postgres-database/requirements/_base.in From 17f486e3848c3943bb0b5f7f2336aa19a2dbcee3 Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 14 Nov 2024 11:16:18 +0100 Subject: [PATCH 19/22] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20UX:=20Create?= =?UTF-8?q?=20Study=20from=20Template=20(#6706)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../osparc/dashboard/ResourceBrowserBase.js | 4 +- .../class/osparc/dashboard/StudyBrowser.js | 2 +- .../class/osparc/dashboard/TemplateBrowser.js | 98 ++++++++--- .../class/osparc/node/TierSelectionView.js | 2 +- .../class/osparc/study/NodePricingUnits.js | 76 ++++++--- .../source/class/osparc/study/StudyOptions.js | 152 +++++++++++------- .../class/osparc/study/StudyPricingUnits.js | 34 +++- .../client/source/class/osparc/study/Utils.js | 2 +- 8 files changed, 259 insertions(+), 111 deletions(-) diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js index a80672bd3cd..344507aad9a 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -95,7 +95,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { return isLogged; }, - startStudyById: function(studyId, openCB, cancelCB, isStudyCreation = false) { + startStudyById: function(studyId, openCB, cancelCB, showStudyOptions = false) { if (!osparc.dashboard.ResourceBrowserBase.checkLoggedIn()) { return; } @@ -117,7 +117,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { osparc.data.Resources.fetch("studies", "getWallet", params) .then(wallet => { if ( - isStudyCreation || + showStudyOptions || wallet === null || osparc.desktop.credits.Utils.getWallet(wallet["walletId"]) === null ) { diff --git a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js index ceaee03b3ac..a2de2032524 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/StudyBrowser.js @@ -1229,7 +1229,7 @@ qx.Class.define("osparc.dashboard.StudyBrowser", { folderId: this.getCurrentFolderId(), }; osparc.study.Utils.createStudyFromTemplate(templateCopyData, this._loadingPage, contextProps) - .then(studyId => this.__startStudyAfterCreating(studyId)) + .then(studyData => this.__startStudyAfterCreating(studyData["uuid"])) .catch(err => { this._hideLoadingPage(); osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js index d597d8a438c..7f4f0362cab 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/TemplateBrowser.js @@ -137,27 +137,85 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", { return; } - this._showLoadingPage(this.tr("Creating ") + (templateData.name || osparc.product.Utils.getStudyAlias({firstUpperCase: true}))); - osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage) - .then(studyId => { - const openCB = () => this._hideLoadingPage(); - const cancelCB = () => { - this._hideLoadingPage(); - const params = { - url: { - studyId - } + const studyAlias = osparc.product.Utils.getStudyAlias({firstUpperCase: true}); + this._showLoadingPage(this.tr("Creating ") + (templateData.name || studyAlias)); + + const studyOptions = new osparc.study.StudyOptions(); + // they will be patched once the study is created + studyOptions.setPatchStudy(false); + studyOptions.setStudyData(templateData); + const win = osparc.study.StudyOptions.popUpInWindow(studyOptions); + win.moveItUp(); + const cancelStudyOptions = () => { + this._hideLoadingPage(); + win.close(); + } + win.addListener("cancel", () => cancelStudyOptions()); + studyOptions.addListener("cancel", () => cancelStudyOptions()); + studyOptions.addListener("startStudy", () => { + const newName = studyOptions.getChildControl("title-field").getValue(); + const walletSelection = studyOptions.getChildControl("wallet-selector").getSelection(); + const nodesPricingUnits = studyOptions.getChildControl("study-pricing-units").getNodePricingUnits(); + win.close(); + this._showLoadingPage(this.tr("Creating ") + (newName || studyAlias)); + osparc.study.Utils.createStudyFromTemplate(templateData, this._loadingPage) + .then(newStudyData => { + const studyId = newStudyData["uuid"]; + const openCB = () => { + this._hideLoadingPage(); }; - osparc.data.Resources.fetch("studies", "delete", params); - }; - const isStudyCreation = true; - this._startStudyById(studyId, openCB, cancelCB, isStudyCreation); - }) - .catch(err => { - this._hideLoadingPage(); - osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); - console.error(err); - }); + const cancelCB = () => { + this._hideLoadingPage(); + const params = { + url: { + studyId + } + }; + osparc.data.Resources.fetch("studies", "delete", params); + }; + + const promises = []; + // patch the name + if (newStudyData["name"] !== newName) { + promises.push(osparc.study.StudyOptions.updateName(newStudyData, newName)); + } + // patch the wallet + if (walletSelection.length && walletSelection[0]["walletId"]) { + const walletId = walletSelection[0]["walletId"]; + promises.push(osparc.study.StudyOptions.updateWallet(newStudyData["uuid"], walletId)); + } + // patch the pricing units + // the nodeIds are coming from the original template, they need to be mapped to the newStudy + const workbench = newStudyData["workbench"]; + const nodesIdsListed = []; + Object.keys(workbench).forEach(nodeId => { + const node = workbench[nodeId]; + if (osparc.study.StudyPricingUnits.includeInList(node)) { + nodesIdsListed.push(nodeId); + } + }); + nodesPricingUnits.forEach((nodePricingUnits, idx) => { + const selectedPricingUnitId = nodePricingUnits.getPricingUnits().getSelectedUnitId(); + if (selectedPricingUnitId) { + const nodeId = nodesIdsListed[idx]; + const pricingPlanId = nodePricingUnits.getPricingPlanId(); + promises.push(osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId)); + } + }); + + Promise.all(promises) + .then(() => { + win.close(); + const showStudyOptions = false; + this._startStudyById(studyId, openCB, cancelCB, showStudyOptions); + }); + }) + .catch(err => { + this._hideLoadingPage(); + osparc.FlashMessenger.getInstance().logAs(err.message, "ERROR"); + console.error(err); + }); + }); }, // LAYOUT // diff --git a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js index 34dfc397b37..ffa1431a00e 100644 --- a/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js +++ b/services/static-webserver/client/source/class/osparc/node/TierSelectionView.js @@ -105,7 +105,7 @@ qx.Class.define("osparc.node.TierSelectionView", { if (selection.length) { tierBox.setEnabled(false); const selectedUnitId = selection[0].getModel(); - osparc.study.NodePricingUnits.pricingUnitSelected(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnitId) + osparc.study.NodePricingUnits.patchPricingUnitSelection(studyId, nodeId, pricingPlans["pricingPlanId"], selectedUnitId) .finally(() => { tierBox.setEnabled(true); showSelectedTier(selectedUnitId); diff --git a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js index d8caa28b68f..76918e12b3e 100644 --- a/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js +++ b/services/static-webserver/client/source/class/osparc/study/NodePricingUnits.js @@ -30,8 +30,10 @@ qx.Class.define("osparc.study.NodePricingUnits", { layout: new qx.ui.layout.VBox() }); - this.__studyId = studyId; - this.__nodeId = nodeId; + this.set({ + studyId, + nodeId, + }); if (node instanceof osparc.data.model.Node) { this.__nodeKey = node.getKey(); this.__nodeVersion = node.getVersion(); @@ -43,8 +45,35 @@ qx.Class.define("osparc.study.NodePricingUnits", { } }, + properties: { + studyId: { + check: "String", + init: null, + nullable: false, + }, + + nodeId: { + check: "String", + init: null, + nullable: false, + }, + + pricingPlanId: { + check: "Number", + init: null, + nullable: false, + }, + + patchNode: { + check: "Boolean", + init: true, + nullable: false, + event: "changePatchNode", + }, + }, + statics: { - pricingUnitSelected: function(studyId, nodeId, planId, selectedUnitId) { + patchPricingUnitSelection: function(studyId, nodeId, planId, selectedUnitId) { const params = { url: { studyId, @@ -58,19 +87,18 @@ qx.Class.define("osparc.study.NodePricingUnits", { }, members: { - __studyId: null, - __nodeId: null, __nodeKey: null, __nodeVersion: null, __nodeLabel: null, + __pricingUnits: null, showPricingUnits: function(inGroupBox = true) { return new Promise(resolve => { const nodeKey = this.__nodeKey; const nodeVersion = this.__nodeVersion; const nodeLabel = this.__nodeLabel; - const studyId = this.__studyId; - const nodeId = this.__nodeId; + const studyId = this.getStudyId(); + const nodeId = this.getNodeId(); const plansParams = { url: osparc.data.Resources.getServiceUrl( @@ -79,30 +107,36 @@ qx.Class.define("osparc.study.NodePricingUnits", { ) }; osparc.data.Resources.fetch("services", "pricingPlans", plansParams) - .then(pricingPlans => { - if (pricingPlans) { + .then(pricingPlan => { + if (pricingPlan) { const unitParams = { url: { studyId, nodeId } }; + this.set({ + pricingPlanId: pricingPlan["pricingPlanId"] + }); osparc.data.Resources.fetch("studies", "getPricingUnit", unitParams) .then(preselectedPricingUnit => { - if (pricingPlans && "pricingUnits" in pricingPlans && pricingPlans["pricingUnits"].length) { - const unitButtons = new osparc.study.PricingUnits(pricingPlans["pricingUnits"], preselectedPricingUnit); + if (pricingPlan && "pricingUnits" in pricingPlan && pricingPlan["pricingUnits"].length) { + const pricingUnitButtons = this.__pricingUnits = new osparc.study.PricingUnits(pricingPlan["pricingUnits"], preselectedPricingUnit); if (inGroupBox) { const pricingUnitsLayout = osparc.study.StudyOptions.createGroupBox(nodeLabel); - pricingUnitsLayout.add(unitButtons); + pricingUnitsLayout.add(pricingUnitButtons); this._add(pricingUnitsLayout); } else { - this._add(unitButtons); + this._add(pricingUnitButtons); } - unitButtons.addListener("changeSelectedUnitId", e => { - unitButtons.setEnabled(false); - const selectedPricingUnitId = e.getData(); - this.self().pricingUnitSelected(this.__studyId, this.__nodeId, pricingPlans["pricingPlanId"], selectedPricingUnitId) - .finally(() => unitButtons.setEnabled(true)); + pricingUnitButtons.addListener("changeSelectedUnitId", e => { + if (this.isPatchNode()) { + pricingUnitButtons.setEnabled(false); + const pricingPlanId = this.getPricingPlanId(); + const selectedPricingUnitId = e.getData(); + this.self().patchPricingUnitSelection(studyId, nodeId, pricingPlanId, selectedPricingUnitId) + .finally(() => pricingUnitButtons.setEnabled(true)); + } }); } }) @@ -110,6 +144,10 @@ qx.Class.define("osparc.study.NodePricingUnits", { } }); }); - } + }, + + getPricingUnits: function() { + return this.__pricingUnits; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js index 9922ec017e3..5b0fd30cadb 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyOptions.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyOptions.js @@ -22,8 +22,11 @@ qx.Class.define("osparc.study.StudyOptions", { this.base(arguments); this._setLayout(new qx.ui.layout.VBox(15)); + this.__buildLayout(); - this.setStudyId(studyId); + if (studyId) { + this.setStudyId(studyId); + } }, properties: { @@ -40,7 +43,14 @@ qx.Class.define("osparc.study.StudyOptions", { nullable: true, event: "changeWallet", apply: "__applyWallet" - } + }, + + patchStudy: { + check: "Boolean", + init: true, + nullable: false, + event: "changePatchStudy", + }, }, events: { @@ -78,7 +88,31 @@ qx.Class.define("osparc.study.StudyOptions", { }); box.setLayout(new qx.ui.layout.VBox(5)); return box; - } + }, + + updateName: function(studyData, name) { + return osparc.info.StudyUtils.patchStudyData(studyData, "name", name) + .catch(err => { + console.error(err); + const msg = err.message || qx.locale.Manager.tr("Something went wrong Renaming"); + osparc.FlashMessenger.logAs(msg, "ERROR"); + }); + }, + + updateWallet: function(studyId, walletId) { + const params = { + url: { + studyId, + walletId, + } + }; + return osparc.data.Resources.fetch("studies", "selectWallet", params) + .catch(err => { + console.error(err); + const msg = err.message || qx.locale.Manager.tr("Error selecting Credit Account"); + osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + }); + }, }, members: { @@ -147,6 +181,27 @@ qx.Class.define("osparc.study.StudyOptions", { control = this.self().createGroupBox(this.tr("Tiers")); this.getChildControl("options-layout").add(control); break; + case "study-pricing-units": { + control = new osparc.study.StudyPricingUnits(); + const loadingImage = this.getChildControl("loading-units-spinner"); + const unitsBoxesLayout = this.getChildControl("services-resources-layout"); + const unitsLoading = () => { + loadingImage.show(); + unitsBoxesLayout.exclude(); + }; + const unitsReady = () => { + loadingImage.exclude(); + unitsBoxesLayout.show(); + control.getNodePricingUnits().forEach(nodePricingUnits => { + this.bind("patchStudy", nodePricingUnits, "patchNode"); + }); + }; + unitsLoading(); + control.addListener("loadingUnits", () => unitsLoading()); + control.addListener("unitsReady", () => unitsReady()); + unitsBoxesLayout.add(control); + break; + } case "buttons-layout": control = new qx.ui.container.Composite(new qx.ui.layout.HBox(5).set({ alignX: "right" @@ -192,7 +247,7 @@ qx.Class.define("osparc.study.StudyOptions", { ]) .then(values => { const studyData = values[0]; - this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData); + this.setStudyData(studyData); if (values[1] && "walletId" in values[1]) { this.__studyWalletId = values[1]["walletId"]; @@ -201,6 +256,16 @@ qx.Class.define("osparc.study.StudyOptions", { }); }, + setStudyData: function(studyData) { + this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData); + + const titleField = this.getChildControl("title-field"); + titleField.setValue(this.__studyData["name"]); + + const studyPricingUnits = this.getChildControl("study-pricing-units"); + studyPricingUnits.setStudyData(this.__studyData); + }, + __applyWallet: function(wallet) { if (wallet) { const walletSelector = this.getChildControl("wallet-selector"); @@ -224,9 +289,6 @@ qx.Class.define("osparc.study.StudyOptions", { const store = osparc.store.Store.getInstance(); const titleField = this.getChildControl("title-field"); - if (this.__studyData) { - titleField.setValue(this.__studyData["name"]); - } titleField.addListener("appear", () => { titleField.focus(); titleField.activate(); @@ -261,21 +323,7 @@ qx.Class.define("osparc.study.StudyOptions", { }, __buildOptionsLayout: function() { - const loadingImage = this.getChildControl("loading-units-spinner"); - const unitsBoxesLayout = this.getChildControl("services-resources-layout"); - const unitsLoading = () => { - loadingImage.show(); - unitsBoxesLayout.exclude(); - }; - const unitsReady = () => { - loadingImage.exclude(); - unitsBoxesLayout.show(); - }; - unitsLoading(); - const studyPricingUnits = new osparc.study.StudyPricingUnits(this.__studyData); - studyPricingUnits.addListener("loadingUnits", () => unitsLoading()); - studyPricingUnits.addListener("unitsReady", () => unitsReady()); - unitsBoxesLayout.add(studyPricingUnits); + this.getChildControl("study-pricing-units"); }, __buildButtons: function() { @@ -291,48 +339,34 @@ qx.Class.define("osparc.study.StudyOptions", { const openButton = this.getChildControl("open-button"); openButton.setFetching(true); - // first, update the name if necessary - const titleSelection = this.getChildControl("title-field").getValue(); - if (this.__studyData && this.__studyData["name"] !== titleSelection) { - await this.__updateName(this.__studyData, titleSelection); - } + if (this.isPatchStudy()) { + // first, update the name if necessary + const titleSelection = this.getChildControl("title-field").getValue(); + if (this.__studyData["name"] !== titleSelection) { + await this.self().updateName(this.__studyData, titleSelection); + } - // second, update the wallet if necessary - const store = osparc.store.Store.getInstance(); - const walletSelection = this.getChildControl("wallet-selector").getSelection(); - const studyId = this.getStudyId(); - if (studyId && walletSelection.length && walletSelection[0]["walletId"]) { - const params = { - url: { - studyId, - "walletId": walletSelection[0]["walletId"] - } - }; - osparc.data.Resources.fetch("studies", "selectWallet", params) - .then(() => { - store.setActiveWallet(this.getWallet()); - this.fireEvent("startStudy"); - }) - .catch(err => { - console.error(err); - const msg = err.message || this.tr("Error selecting Credit Account"); - osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); - }) - .finally(() => openButton.setFetching(false)); + // second, update the wallet if necessary + const store = osparc.store.Store.getInstance(); + const walletSelection = this.getChildControl("wallet-selector").getSelection(); + if (walletSelection.length && walletSelection[0]["walletId"]) { + const studyId = this.getStudyId(); + const walletId = walletSelection[0]["walletId"]; + this.self().updateWallet(studyId, walletId) + .then(() => { + store.setActiveWallet(this.getWallet()); + this.fireEvent("startStudy"); + }) + .finally(() => openButton.setFetching(false)); + } else { + store.setActiveWallet(this.getWallet()); + this.fireEvent("startStudy"); + openButton.setFetching(false); + } } else { - store.setActiveWallet(this.getWallet()); this.fireEvent("startStudy"); openButton.setFetching(false); } }, - - __updateName: function(studyData, name) { - return osparc.info.StudyUtils.patchStudyData(studyData, "name", name) - .catch(err => { - console.error(err); - const msg = this.tr("Something went wrong Renaming"); - osparc.FlashMessenger.logAs(msg, "ERROR"); - }); - } } }); diff --git a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js index 793fee5cb34..e3e8514fbaf 100644 --- a/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js +++ b/services/static-webserver/client/source/class/osparc/study/StudyPricingUnits.js @@ -25,9 +25,11 @@ qx.Class.define("osparc.study.StudyPricingUnits", { layout: new qx.ui.layout.VBox(5) }); - this.__studyData = studyData; + this.__nodePricingUnits = []; - this.__showPricingUnits(); + if (studyData) { + this.setStudyData(studyData); + } }, events: { @@ -35,8 +37,20 @@ qx.Class.define("osparc.study.StudyPricingUnits", { "unitsReady": "qx.event.type.Event" }, + statics: { + includeInList: function(node) { + return !osparc.data.model.Node.isFrontend(node); + }, + }, + members: { __studyData: null, + __nodePricingUnits: null, + + setStudyData: function(studyData) { + this.__studyData = studyData; + this.__showPricingUnits(); + }, __showPricingUnits: function() { const unitsLoading = () => this.fireEvent("loadingUnits"); @@ -48,16 +62,20 @@ qx.Class.define("osparc.study.StudyPricingUnits", { const workbench = this.__studyData["workbench"]; Object.keys(workbench).forEach(nodeId => { const node = workbench[nodeId]; - if (osparc.data.model.Node.isFrontend(node)) { - return; + if (this.self().includeInList(node)) { + const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node); + this.__nodePricingUnits.push(nodePricingUnits); + this._add(nodePricingUnits); + promises.push(nodePricingUnits.showPricingUnits()); } - const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node); - this._add(nodePricingUnits); - promises.push(nodePricingUnits.showPricingUnits()); }); } Promise.all(promises) .then(() => unitsAdded()); - } + }, + + getNodePricingUnits: function() { + return this.__nodePricingUnits; + }, } }); diff --git a/services/static-webserver/client/source/class/osparc/study/Utils.js b/services/static-webserver/client/source/class/osparc/study/Utils.js index 0240d263e47..66ed40201f4 100644 --- a/services/static-webserver/client/source/class/osparc/study/Utils.js +++ b/services/static-webserver/client/source/class/osparc/study/Utils.js @@ -255,7 +255,7 @@ qx.Class.define("osparc.study.Utils", { }, this); task.addListener("resultReceived", e => { const studyData = e.getData(); - resolve(studyData["uuid"]); + resolve(studyData); }, this); task.addListener("pollingError", e => { const err = e.getData(); From 0781e6363311f069ff5931041a4a9172f93f6c47 Mon Sep 17 00:00:00 2001 From: Sylvain <35365065+sanderegg@users.noreply.github.com> Date: Thu, 14 Nov 2024 12:16:35 +0100 Subject: [PATCH 20/22] =?UTF-8?q?=F0=9F=8E=A8Computation=20backend:=20refa?= =?UTF-8?q?ctor=20director-v2=20internal=20computational=20scheduler=20to?= =?UTF-8?q?=20be=20less=20resource=20heavy=20(#6696)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../src/servicelib/redis_utils.py | 36 ++- .../service-library/tests/test_redis_utils.py | 127 ++++++-- .../api/dependencies/scheduler.py | 4 +- .../modules/comp_scheduler/__init__.py | 34 +- .../modules/comp_scheduler/_base_scheduler.py | 291 +++++++++++------- .../modules/comp_scheduler/_dask_scheduler.py | 2 +- .../comp_scheduler/_scheduler_factory.py | 52 ++-- .../modules/comp_scheduler/_task.py | 51 --- .../modules/redis.py | 6 + services/director-v2/tests/unit/_helpers.py | 11 - ...t_modules_comp_scheduler_dask_scheduler.py | 150 +++++---- 11 files changed, 481 insertions(+), 283 deletions(-) delete mode 100644 services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py diff --git a/packages/service-library/src/servicelib/redis_utils.py b/packages/service-library/src/servicelib/redis_utils.py index 10f32ae5944..559349cbb0d 100644 --- a/packages/service-library/src/servicelib/redis_utils.py +++ b/packages/service-library/src/servicelib/redis_utils.py @@ -3,7 +3,7 @@ import logging from collections.abc import Awaitable, Callable from datetime import timedelta -from typing import Any +from typing import Any, ParamSpec, TypeVar import arrow @@ -12,10 +12,16 @@ _logger = logging.getLogger(__file__) +P = ParamSpec("P") +R = TypeVar("R") + def exclusive( - redis: RedisClientSDK, *, lock_key: str, lock_value: bytes | str | None = None -): + redis: RedisClientSDK | Callable[..., RedisClientSDK], + *, + lock_key: str | Callable[..., str], + lock_value: bytes | str | None = None, +) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]: """ Define a method to run exclusively across processes by leveraging a Redis Lock. @@ -24,12 +30,30 @@ def exclusive( redis: the redis client SDK lock_key: a string as the name of the lock (good practice: app_name:lock_name) lock_value: some additional data that can be retrieved by another client + + Raises: + - ValueError if used incorrectly + - CouldNotAcquireLockError if the lock could not be acquired """ - def decorator(func): + if not lock_key: + msg = "lock_key cannot be empty string!" + raise ValueError(msg) + + def decorator(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]: @functools.wraps(func) - async def wrapper(*args, **kwargs): - async with redis.lock_context(lock_key=lock_key, lock_value=lock_value): + async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + redis_lock_key = ( + lock_key(*args, **kwargs) if callable(lock_key) else lock_key + ) + assert isinstance(redis_lock_key, str) # nosec + + redis_client = redis(*args, **kwargs) if callable(redis) else redis + assert isinstance(redis_client, RedisClientSDK) # nosec + + async with redis_client.lock_context( + lock_key=redis_lock_key, lock_value=lock_value + ): return await func(*args, **kwargs) return wrapper diff --git a/packages/service-library/tests/test_redis_utils.py b/packages/service-library/tests/test_redis_utils.py index f897fc7c399..26f749cd894 100644 --- a/packages/service-library/tests/test_redis_utils.py +++ b/packages/service-library/tests/test_redis_utils.py @@ -5,6 +5,7 @@ from contextlib import AbstractAsyncContextManager from datetime import timedelta from itertools import chain +from typing import Awaitable from unittest.mock import Mock import arrow @@ -32,39 +33,117 @@ async def _is_locked(redis_client_sdk: RedisClientSDK, lock_name: str) -> bool: @pytest.fixture def lock_name(faker: Faker) -> str: - return faker.uuid4() # type: ignore + return faker.pystr() + + +def _exclusive_sleeping_task( + redis_client_sdk: RedisClientSDK | Callable[..., RedisClientSDK], + lock_name: str | Callable[..., str], + sleep_duration: float, +) -> Callable[..., Awaitable[float]]: + @exclusive(redis_client_sdk, lock_key=lock_name) + async def _() -> float: + resolved_client = ( + redis_client_sdk() if callable(redis_client_sdk) else redis_client_sdk + ) + resolved_lock_name = lock_name() if callable(lock_name) else lock_name + assert await _is_locked(resolved_client, resolved_lock_name) + await asyncio.sleep(sleep_duration) + assert await _is_locked(resolved_client, resolved_lock_name) + return sleep_duration + + return _ + + +@pytest.fixture +def sleep_duration(faker: Faker) -> float: + return faker.pyfloat(positive=True, min_value=0.2, max_value=0.8) -async def _contained_client( +async def test_exclusive_decorator( get_redis_client_sdk: Callable[ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] ], lock_name: str, - task_duration: float, -) -> None: - async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk: - assert not await _is_locked(redis_client_sdk, lock_name) - - @exclusive(redis_client_sdk, lock_key=lock_name) - async def _some_task() -> None: - assert await _is_locked(redis_client_sdk, lock_name) - await asyncio.sleep(task_duration) - assert await _is_locked(redis_client_sdk, lock_name) - - await _some_task() + sleep_duration: float, +): - assert not await _is_locked(redis_client_sdk, lock_name) + async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client: + for _ in range(3): + assert ( + await _exclusive_sleeping_task( + redis_client, lock_name, sleep_duration + )() + == sleep_duration + ) -@pytest.mark.parametrize("task_duration", [0.1, 1, 2]) -async def test_exclusive_sequentially( +async def test_exclusive_decorator_with_key_builder( get_redis_client_sdk: Callable[ [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] ], lock_name: str, - task_duration: float, + sleep_duration: float, ): - await _contained_client(get_redis_client_sdk, lock_name, task_duration) + def _get_lock_name(*args, **kwargs) -> str: + assert args is not None + assert kwargs is not None + return lock_name + + async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client: + for _ in range(3): + assert ( + await _exclusive_sleeping_task( + redis_client, _get_lock_name, sleep_duration + )() + == sleep_duration + ) + + +async def test_exclusive_decorator_with_client_builder( + get_redis_client_sdk: Callable[ + [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] + ], + lock_name: str, + sleep_duration: float, +): + async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client: + + def _get_redis_client_builder(*args, **kwargs) -> RedisClientSDK: + assert args is not None + assert kwargs is not None + return redis_client + + for _ in range(3): + assert ( + await _exclusive_sleeping_task( + _get_redis_client_builder, lock_name, sleep_duration + )() + == sleep_duration + ) + + +async def _acquire_lock_and_exclusively_sleep( + get_redis_client_sdk: Callable[ + [RedisDatabase], AbstractAsyncContextManager[RedisClientSDK] + ], + lock_name: str | Callable[..., str], + sleep_duration: float, +) -> None: + async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk: + redis_lock_name = lock_name() if callable(lock_name) else lock_name + assert not await _is_locked(redis_client_sdk, redis_lock_name) + + @exclusive(redis_client_sdk, lock_key=lock_name) + async def _() -> float: + assert await _is_locked(redis_client_sdk, redis_lock_name) + await asyncio.sleep(sleep_duration) + assert await _is_locked(redis_client_sdk, redis_lock_name) + return sleep_duration + + assert await _() == sleep_duration + + assert not await _is_locked(redis_client_sdk, redis_lock_name) async def test_exclusive_parallel_lock_is_released_and_reacquired( @@ -76,17 +155,19 @@ async def test_exclusive_parallel_lock_is_released_and_reacquired( parallel_tasks = 10 results = await logged_gather( *[ - _contained_client(get_redis_client_sdk, lock_name, task_duration=0.1) + _acquire_lock_and_exclusively_sleep( + get_redis_client_sdk, lock_name, sleep_duration=0.1 + ) for _ in range(parallel_tasks) ], - reraise=False + reraise=False, ) assert results.count(None) == 1 assert [isinstance(x, CouldNotAcquireLockError) for x in results].count( True ) == parallel_tasks - 1 - # check lock is being released + # check lock is released async with get_redis_client_sdk(RedisDatabase.RESOURCES) as redis_client_sdk: assert not await _is_locked(redis_client_sdk, lock_name) @@ -168,7 +249,7 @@ async def test_start_exclusive_periodic_task_parallel_all_finish( _assert_task_completes_once(get_redis_client_sdk, stop_after=60) for _ in range(parallel_tasks) ], - reraise=False + reraise=False, ) # check no error occurred diff --git a/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py b/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py index a0903608789..aa01af1f34b 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/api/dependencies/scheduler.py @@ -1,3 +1,5 @@ +from typing import Annotated + from fastapi import Depends, FastAPI, Request from ...core.settings import ComputationalBackendSettings @@ -11,7 +13,7 @@ def get_scheduler(request: Request) -> BaseCompScheduler: def get_scheduler_settings( - app: FastAPI = Depends(get_app), + app: Annotated[FastAPI, Depends(get_app)] ) -> ComputationalBackendSettings: settings: ComputationalBackendSettings = ( app.state.settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py index 1eb6c3dab10..d06c37457b7 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/__init__.py @@ -1,7 +1,38 @@ +import logging +from collections.abc import Callable, Coroutine +from typing import Any, cast + from fastapi import FastAPI +from servicelib.logging_utils import log_context +from . import _scheduler_factory from ._base_scheduler import BaseCompScheduler -from ._task import on_app_shutdown, on_app_startup + +_logger = logging.getLogger(__name__) + + +def on_app_startup(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]: + async def start_scheduler() -> None: + with log_context( + _logger, level=logging.INFO, msg="starting computational scheduler" + ): + app.state.scheduler = scheduler = await _scheduler_factory.create_from_db( + app + ) + scheduler.recover_scheduling() + + return start_scheduler + + +def on_app_shutdown(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]: + async def stop_scheduler() -> None: + await get_scheduler(app).shutdown() + + return stop_scheduler + + +def get_scheduler(app: FastAPI) -> BaseCompScheduler: + return cast(BaseCompScheduler, app.state.scheduler) def setup(app: FastAPI): @@ -12,4 +43,5 @@ def setup(app: FastAPI): __all__: tuple[str, ...] = ( "setup", "BaseCompScheduler", + "get_scheduler", ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py index cae539596d4..097afd95288 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_base_scheduler.py @@ -12,7 +12,9 @@ """ import asyncio +import contextlib import datetime +import functools import logging from abc import ABC, abstractmethod from dataclasses import dataclass, field @@ -29,9 +31,12 @@ from models_library.users import UserID from networkx.classes.reportviews import InDegreeView from pydantic import PositiveInt +from servicelib.background_task import start_periodic_task, stop_periodic_task from servicelib.common_headers import UNDEFINED_DEFAULT_SIMCORE_USER_AGENT_VALUE +from servicelib.logging_utils import log_context from servicelib.rabbitmq import RabbitMQClient, RabbitMQRPCClient -from servicelib.utils import limited_gather +from servicelib.redis import CouldNotAcquireLockError, RedisClientSDK +from servicelib.redis_utils import exclusive from ...constants import UNDEFINED_STR_METADATA from ...core.errors import ( @@ -76,6 +81,10 @@ _Previous = CompTaskAtDB _Current = CompTaskAtDB _MAX_WAITING_FOR_CLUSTER_TIMEOUT_IN_MIN: Final[int] = 10 +_SCHEDULER_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(seconds=5) +_TASK_NAME_TEMPLATE: Final[ + str +] = "computational-scheduler-{user_id}:{project_id}:{iteration}" @dataclass(frozen=True, slots=True) @@ -134,6 +143,12 @@ class ScheduledPipelineParams: mark_for_cancellation: datetime.datetime | None use_on_demand_clusters: bool + scheduler_task: asyncio.Task | None = None + scheduler_waker: asyncio.Event = field(default_factory=asyncio.Event) + + def wake_up(self) -> None: + self.scheduler_waker.set() + @dataclass class BaseCompScheduler(ABC): @@ -146,6 +161,7 @@ class BaseCompScheduler(ABC): rabbitmq_rpc_client: RabbitMQRPCClient settings: ComputationalBackendSettings service_runtime_heartbeat_interval: datetime.timedelta + redis_client: RedisClientSDK async def run_new_pipeline( self, @@ -178,7 +194,7 @@ async def run_new_pipeline( ) self.scheduled_pipelines[ (user_id, project_id, new_run.iteration) - ] = ScheduledPipelineParams( + ] = pipeline_params = ScheduledPipelineParams( cluster_id=cluster_id, run_metadata=new_run.metadata, use_on_demand_clusters=use_on_demand_clusters, @@ -191,8 +207,8 @@ async def run_new_pipeline( log=f"Project pipeline scheduled using {'on-demand clusters' if use_on_demand_clusters else 'pre-defined clusters'}, starting soon...", log_level=logging.INFO, ) - # ensure the scheduler starts right away - self._wake_up_scheduler_now() + + self._start_scheduling(pipeline_params, user_id, project_id, new_run.iteration) async def stop_pipeline( self, user_id: UserID, project_id: ProjectID, iteration: int | None = None @@ -224,29 +240,76 @@ async def stop_pipeline( (user_id, project_id, selected_iteration) ].mark_for_cancellation = updated_comp_run.cancelled # ensure the scheduler starts right away - self._wake_up_scheduler_now() + self.scheduled_pipelines[ + (user_id, project_id, selected_iteration) + ].wake_up() - async def schedule_all_pipelines(self) -> None: - self.wake_up_event.clear() - # if one of the task throws, the other are NOT cancelled which is what we want - await limited_gather( + def recover_scheduling(self) -> None: + for ( + user_id, + project_id, + iteration, + ), params in self.scheduled_pipelines.items(): + self._start_scheduling(params, user_id, project_id, iteration) + + async def shutdown(self) -> None: + # cancel all current scheduling processes + await asyncio.gather( *( - self._schedule_pipeline( + stop_periodic_task(p.scheduler_task, timeout=3) + for p in self.scheduled_pipelines.values() + if p.scheduler_task + ), + return_exceptions=True, + ) + + def _get_last_iteration(self, user_id: UserID, project_id: ProjectID) -> Iteration: + # if no iteration given find the latest one in the list + possible_iterations = { + it + for u_id, p_id, it in self.scheduled_pipelines + if u_id == user_id and p_id == project_id + } + if not possible_iterations: + msg = f"There are no pipeline scheduled for {user_id}:{project_id}" + raise SchedulerError(msg) + return max(possible_iterations) + + def _start_scheduling( + self, + pipeline_params: ScheduledPipelineParams, + user_id: UserID, + project_id: ProjectID, + iteration: Iteration, + ) -> None: + async def _exclusive_safe_schedule_pipeline( + *, + user_id: UserID, + project_id: ProjectID, + iteration: Iteration, + pipeline_params: ScheduledPipelineParams, + ) -> None: + with contextlib.suppress(CouldNotAcquireLockError): + await self._schedule_pipeline( user_id=user_id, project_id=project_id, iteration=iteration, pipeline_params=pipeline_params, ) - for ( - user_id, - project_id, - iteration, - ), pipeline_params in self.scheduled_pipelines.items() + + pipeline_params.scheduler_task = start_periodic_task( + functools.partial( + _exclusive_safe_schedule_pipeline, + user_id=user_id, + project_id=project_id, + iteration=iteration, + pipeline_params=pipeline_params, + ), + interval=_SCHEDULER_INTERVAL, + task_name=_TASK_NAME_TEMPLATE.format( + user_id=user_id, project_id=project_id, iteration=iteration ), - reraise=False, - log=_logger, - limit=40, - tasks_group_prefix="computational-scheduled-pipeline", + early_wake_up_event=pipeline_params.scheduler_waker, ) async def _get_pipeline_dag(self, project_id: ProjectID) -> nx.DiGraph: @@ -610,6 +673,22 @@ async def _process_completed_tasks( ) -> None: ... + @staticmethod + def _build_exclusive_lock_key(*args, **kwargs) -> str: + assert args # nosec + return f"{kwargs['user_id']}:{kwargs['project_id']}:{kwargs['iteration']}" + + @staticmethod + def _redis_client_getter(*args, **kwargs) -> RedisClientSDK: + assert kwargs # nosec + zelf = args[0] + assert isinstance(zelf, BaseCompScheduler) # nosec + return zelf.redis_client + + @exclusive( + redis=_redis_client_getter, + lock_key=_build_exclusive_lock_key, + ) async def _schedule_pipeline( self, *, @@ -618,98 +697,99 @@ async def _schedule_pipeline( iteration: PositiveInt, pipeline_params: ScheduledPipelineParams, ) -> None: - _logger.debug( - "checking run of project [%s:%s] for user [%s]", - f"{project_id=}", - f"{iteration=}", - f"{user_id=}", - ) - dag: nx.DiGraph = nx.DiGraph() - try: - dag = await self._get_pipeline_dag(project_id) - # 1. Update our list of tasks with data from backend (state, results) - await self._update_states_from_comp_backend( - user_id, project_id, iteration, dag, pipeline_params=pipeline_params - ) - # 2. Any task following a FAILED task shall be ABORTED - comp_tasks = await self._set_states_following_failed_to_aborted( - project_id, dag - ) - # 3. do we want to stop the pipeline now? - if pipeline_params.mark_for_cancellation: - await self._schedule_tasks_to_stop( - user_id, project_id, comp_tasks, pipeline_params + with log_context( + _logger, + level=logging.INFO, + msg=f"scheduling pipeline {user_id=}:{project_id=}:{iteration=}", + ): + dag: nx.DiGraph = nx.DiGraph() + try: + dag = await self._get_pipeline_dag(project_id) + # 1. Update our list of tasks with data from backend (state, results) + await self._update_states_from_comp_backend( + user_id, project_id, iteration, dag, pipeline_params=pipeline_params ) - else: - # let's get the tasks to schedule then - comp_tasks = await self._schedule_tasks_to_start( - user_id=user_id, - project_id=project_id, - comp_tasks=comp_tasks, - dag=dag, - pipeline_params=pipeline_params, + # 2. Any task following a FAILED task shall be ABORTED + comp_tasks = await self._set_states_following_failed_to_aborted( + project_id, dag + ) + # 3. do we want to stop the pipeline now? + if pipeline_params.mark_for_cancellation: + await self._schedule_tasks_to_stop( + user_id, project_id, comp_tasks, pipeline_params + ) + else: + # let's get the tasks to schedule then + comp_tasks = await self._schedule_tasks_to_start( + user_id=user_id, + project_id=project_id, + comp_tasks=comp_tasks, + dag=dag, + pipeline_params=pipeline_params, + ) + # 4. timeout if waiting for cluster has been there for more than X minutes + comp_tasks = await self._timeout_if_waiting_for_cluster_too_long( + user_id, project_id, comp_tasks + ) + # 5. send a heartbeat + await self._send_running_tasks_heartbeat( + user_id, project_id, iteration, dag ) - # 4. timeout if waiting for cluster has been there for more than X minutes - comp_tasks = await self._timeout_if_waiting_for_cluster_too_long( - user_id, project_id, comp_tasks - ) - # 5. send a heartbeat - await self._send_running_tasks_heartbeat( - user_id, project_id, iteration, dag - ) - # 6. Update the run result - pipeline_result = await self._update_run_result_from_tasks( - user_id, project_id, iteration, comp_tasks - ) + # 6. Update the run result + pipeline_result = await self._update_run_result_from_tasks( + user_id, project_id, iteration, comp_tasks + ) - # 7. Are we done scheduling that pipeline? - if not dag.nodes() or pipeline_result in COMPLETED_STATES: - # there is nothing left, the run is completed, we're done here + # 7. Are we done scheduling that pipeline? + if not dag.nodes() or pipeline_result in COMPLETED_STATES: + # there is nothing left, the run is completed, we're done here + self.scheduled_pipelines.pop((user_id, project_id, iteration), None) + _logger.info( + "pipeline %s scheduling completed with result %s", + f"{project_id=}", + f"{pipeline_result=}", + ) + assert pipeline_params.scheduler_task is not None # nosec + pipeline_params.scheduler_task.cancel() + except PipelineNotFoundError: + _logger.warning( + "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler", + f"{project_id=}", + ) + await self._set_run_result( + user_id, project_id, iteration, RunningState.ABORTED + ) self.scheduled_pipelines.pop((user_id, project_id, iteration), None) - _logger.info( - "pipeline %s scheduling completed with result %s", + except InvalidPipelineError as exc: + _logger.warning( + "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s", f"{project_id=}", - f"{pipeline_result=}", + exc, ) - except PipelineNotFoundError: - _logger.warning( - "pipeline %s does not exist in comp_pipeline table, it will be removed from scheduler", - f"{project_id=}", - ) - await self._set_run_result( - user_id, project_id, iteration, RunningState.ABORTED - ) - self.scheduled_pipelines.pop((user_id, project_id, iteration), None) - except InvalidPipelineError as exc: - _logger.warning( - "pipeline %s appears to be misconfigured, it will be removed from scheduler. Please check pipeline:\n%s", - f"{project_id=}", - exc, - ) - await self._set_run_result( - user_id, project_id, iteration, RunningState.ABORTED - ) - self.scheduled_pipelines.pop((user_id, project_id, iteration), None) - except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError): - _logger.exception( - "Unexpected error while connecting with computational backend, aborting pipeline" - ) - tasks: dict[NodeIDStr, CompTaskAtDB] = await self._get_pipeline_tasks( - project_id, dag - ) - comp_tasks_repo = CompTasksRepository(self.db_engine) - await comp_tasks_repo.update_project_tasks_state( - project_id, - [t.node_id for t in tasks.values()], - RunningState.FAILED, - ) - await self._set_run_result( - user_id, project_id, iteration, RunningState.FAILED - ) - self.scheduled_pipelines.pop((user_id, project_id, iteration), None) - except ComputationalBackendNotConnectedError: - _logger.exception("Computational backend is not connected!") + await self._set_run_result( + user_id, project_id, iteration, RunningState.ABORTED + ) + self.scheduled_pipelines.pop((user_id, project_id, iteration), None) + except (DaskClientAcquisisitonError, ClustersKeeperNotAvailableError): + _logger.exception( + "Unexpected error while connecting with computational backend, aborting pipeline" + ) + tasks: dict[NodeIDStr, CompTaskAtDB] = await self._get_pipeline_tasks( + project_id, dag + ) + comp_tasks_repo = CompTasksRepository(self.db_engine) + await comp_tasks_repo.update_project_tasks_state( + project_id, + [t.node_id for t in tasks.values()], + RunningState.FAILED, + ) + await self._set_run_result( + user_id, project_id, iteration, RunningState.FAILED + ) + self.scheduled_pipelines.pop((user_id, project_id, iteration), None) + except ComputationalBackendNotConnectedError: + _logger.exception("Computational backend is not connected!") async def _schedule_tasks_to_stop( self, @@ -910,6 +990,3 @@ async def _timeout_if_waiting_for_cluster_too_long( log_level=logging.ERROR, ) return comp_tasks - - def _wake_up_scheduler_now(self) -> None: - self.wake_up_event.set() diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py index 51fb3b1a3fb..512df1b1712 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_dask_scheduler.py @@ -124,7 +124,7 @@ async def _start_tasks( cluster_id=pipeline_params.cluster_id, tasks={node_id: task.image}, hardware_info=task.hardware_info, - callback=self._wake_up_scheduler_now, + callback=pipeline_params.wake_up, metadata=pipeline_params.run_metadata, ) for node_id, task in scheduled_tasks.items() diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py index f8b648eaf48..4f7812816cc 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_scheduler_factory.py @@ -2,6 +2,8 @@ from fastapi import FastAPI from models_library.clusters import DEFAULT_CLUSTER_ID +from servicelib.logging_utils import log_context +from settings_library.redis import RedisDatabase from ...core.errors import ConfigurationError from ...core.settings import AppSettings @@ -10,10 +12,11 @@ from ..dask_clients_pool import DaskClientsPool from ..db.repositories.comp_runs import CompRunsRepository from ..rabbitmq import get_rabbitmq_client, get_rabbitmq_rpc_client +from ..redis import get_redis_client_manager from ._base_scheduler import BaseCompScheduler, ScheduledPipelineParams from ._dask_scheduler import DaskScheduler -logger = logging.getLogger(__name__) +_logger = logging.getLogger(__name__) async def create_from_db(app: FastAPI) -> BaseCompScheduler: @@ -28,29 +31,32 @@ async def create_from_db(app: FastAPI) -> BaseCompScheduler: filter_by_state=SCHEDULED_STATES ) - logger.debug( + _logger.debug( "Following scheduled comp_runs found still to be scheduled: %s", runs if runs else "NONE", ) - logger.info("Creating Dask-based scheduler...") - app_settings: AppSettings = app.state.settings - return DaskScheduler( - settings=app_settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND, - dask_clients_pool=DaskClientsPool.instance(app), - rabbitmq_client=get_rabbitmq_client(app), - rabbitmq_rpc_client=get_rabbitmq_rpc_client(app), - db_engine=db_engine, - scheduled_pipelines={ - (r.user_id, r.project_uuid, r.iteration): ScheduledPipelineParams( - cluster_id=( - r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID - ), - run_metadata=r.metadata, - mark_for_cancellation=r.cancelled, - use_on_demand_clusters=r.use_on_demand_clusters, - ) - for r in runs - }, - service_runtime_heartbeat_interval=app_settings.SERVICE_TRACKING_HEARTBEAT, - ) + with log_context( + _logger, logging.INFO, msg="Creating Dask-based computational scheduler" + ): + app_settings: AppSettings = app.state.settings + return DaskScheduler( + settings=app_settings.DIRECTOR_V2_COMPUTATIONAL_BACKEND, + dask_clients_pool=DaskClientsPool.instance(app), + rabbitmq_client=get_rabbitmq_client(app), + rabbitmq_rpc_client=get_rabbitmq_rpc_client(app), + redis_client=get_redis_client_manager(app).client(RedisDatabase.LOCKS), + db_engine=db_engine, + scheduled_pipelines={ + (r.user_id, r.project_uuid, r.iteration): ScheduledPipelineParams( + cluster_id=( + r.cluster_id if r.cluster_id is not None else DEFAULT_CLUSTER_ID + ), + run_metadata=r.metadata, + mark_for_cancellation=r.cancelled, + use_on_demand_clusters=r.use_on_demand_clusters, + ) + for r in runs + }, + service_runtime_heartbeat_interval=app_settings.SERVICE_TRACKING_HEARTBEAT, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py b/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py deleted file mode 100644 index 989b310687c..00000000000 --- a/services/director-v2/src/simcore_service_director_v2/modules/comp_scheduler/_task.py +++ /dev/null @@ -1,51 +0,0 @@ -import datetime -import logging -from collections.abc import Callable, Coroutine -from typing import Any, Final - -from fastapi import FastAPI -from servicelib.background_task import start_periodic_task, stop_periodic_task -from servicelib.logging_utils import log_context -from servicelib.redis import RedisClientsManager -from servicelib.redis_utils import exclusive -from settings_library.redis import RedisDatabase - -from ..._meta import APP_NAME -from . import _scheduler_factory - -_logger = logging.getLogger(__name__) - -_COMPUTATIONAL_SCHEDULER_INTERVAL: Final[datetime.timedelta] = datetime.timedelta( - seconds=5 -) -_TASK_NAME: Final[str] = "computational services scheduler" - - -def on_app_startup(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]: - async def start_scheduler() -> None: - with log_context( - _logger, level=logging.INFO, msg="starting computational scheduler" - ): - redis_clients_manager: RedisClientsManager = app.state.redis_clients_manager - lock_key = f"{APP_NAME}:computational_scheduler" - app.state.scheduler = scheduler = await _scheduler_factory.create_from_db( - app - ) - app.state.computational_scheduler_task = start_periodic_task( - exclusive( - redis_clients_manager.client(RedisDatabase.LOCKS), - lock_key=lock_key, - )(scheduler.schedule_all_pipelines), - interval=_COMPUTATIONAL_SCHEDULER_INTERVAL, - task_name=_TASK_NAME, - early_wake_up_event=scheduler.wake_up_event, - ) - - return start_scheduler - - -def on_app_shutdown(app: FastAPI) -> Callable[[], Coroutine[Any, Any, None]]: - async def stop_scheduler() -> None: - await stop_periodic_task(app.state.computational_scheduler_task) - - return stop_scheduler diff --git a/services/director-v2/src/simcore_service_director_v2/modules/redis.py b/services/director-v2/src/simcore_service_director_v2/modules/redis.py index e7da01afef7..273061cb188 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/redis.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/redis.py @@ -1,3 +1,5 @@ +from typing import cast + from fastapi import FastAPI from servicelib.redis import RedisClientsManager, RedisManagerDBConfig from settings_library.redis import RedisDatabase @@ -29,3 +31,7 @@ async def on_shutdown() -> None: app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) + + +def get_redis_client_manager(app: FastAPI) -> RedisClientsManager: + return cast(RedisClientsManager, app.state.redis_clients_manager) diff --git a/services/director-v2/tests/unit/_helpers.py b/services/director-v2/tests/unit/_helpers.py index 2654c63a3e1..779d6cdd117 100644 --- a/services/director-v2/tests/unit/_helpers.py +++ b/services/director-v2/tests/unit/_helpers.py @@ -1,4 +1,3 @@ -import asyncio from dataclasses import dataclass from typing import Any @@ -11,9 +10,6 @@ from simcore_service_director_v2.models.comp_pipelines import CompPipelineAtDB from simcore_service_director_v2.models.comp_runs import CompRunsAtDB from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB -from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import ( - BaseCompScheduler, -) @dataclass @@ -28,13 +24,6 @@ class RunningProject(PublishedProject): runs: CompRunsAtDB -async def trigger_comp_scheduler(scheduler: BaseCompScheduler) -> None: - # trigger the scheduler - scheduler._wake_up_scheduler_now() # pylint: disable=protected-access # noqa: SLF001 - # let the scheduler be actually triggered - await asyncio.sleep(1) - - async def set_comp_task_state( aiopg_engine: aiopg.sa.engine.Engine, node_id: str, state: StateType ) -> None: diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py index f9e5ff33c4b..1df1ae09d39 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_comp_scheduler_dask_scheduler.py @@ -45,6 +45,7 @@ from pytest_mock.plugin import MockerFixture from pytest_simcore.helpers.typing_env import EnvVarsDict from servicelib.rabbitmq import RabbitMQClient +from servicelib.redis import CouldNotAcquireLockError from settings_library.rabbit import RabbitSettings from settings_library.redis import RedisSettings from simcore_postgres_database.models.comp_runs import comp_runs @@ -66,8 +67,12 @@ from simcore_service_director_v2.models.comp_runs import CompRunsAtDB, RunMetadataDict from simcore_service_director_v2.models.comp_tasks import CompTaskAtDB, Image from simcore_service_director_v2.models.dask_subsystem import DaskClientTaskState -from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import ( +from simcore_service_director_v2.modules.comp_scheduler import ( BaseCompScheduler, + get_scheduler, +) +from simcore_service_director_v2.modules.comp_scheduler._base_scheduler import ( + ScheduledPipelineParams, ) from simcore_service_director_v2.modules.comp_scheduler._dask_scheduler import ( DaskScheduler, @@ -155,8 +160,38 @@ async def _assert_comp_tasks_db( ), f"{expected_progress=}, found: {[t.progress for t in tasks]}" -async def run_comp_scheduler(scheduler: BaseCompScheduler) -> None: - await scheduler.schedule_all_pipelines() +async def schedule_all_pipelines(scheduler: BaseCompScheduler) -> None: + # NOTE: we take a copy of the pipelines, as this could change quickly if there are + # misconfigured pipelines that would be removed from the scheduler + # NOTE: we simulate multiple dv-2 replicas by running several times + # the same pipeline scheduling + local_pipelines = deepcopy(scheduler.scheduled_pipelines) + results = await asyncio.gather( + *( + scheduler._schedule_pipeline( # noqa: SLF001 + user_id=user_id, + project_id=project_id, + iteration=iteration, + pipeline_params=params, + ) + for _ in range(3) + for ( + user_id, + project_id, + iteration, + ), params in local_pipelines.items() + ), + return_exceptions=True, + ) + # we should have exceptions 2/3 of the time + could_not_acquire_lock_count = sum( + isinstance(r, CouldNotAcquireLockError) for r in results + ) + total_results_count = len(results) + + # Check if 2/3 of the results are CouldNotAcquireLockError + # checks that scheduling is done exclusively + assert could_not_acquire_lock_count == (2 / 3) * total_results_count @pytest.fixture @@ -185,11 +220,11 @@ def minimal_dask_scheduler_config( def scheduler( minimal_dask_scheduler_config: None, aiopg_engine: aiopg.sa.engine.Engine, - # dask_spec_local_cluster: SpecCluster, minimal_app: FastAPI, ) -> BaseCompScheduler: - assert minimal_app.state.scheduler is not None - return minimal_app.state.scheduler + scheduler = get_scheduler(minimal_app) + assert scheduler is not None + return scheduler @pytest.fixture @@ -220,16 +255,21 @@ def mocked_clean_task_output_fct(mocker: MockerFixture) -> mock.MagicMock: @pytest.fixture -def with_disabled_scheduler_task(mocker: MockerFixture) -> None: +def with_disabled_auto_scheduling(mocker: MockerFixture) -> mock.MagicMock: """disables the scheduler task, note that it needs to be triggered manually then""" - mocker.patch( - "simcore_service_director_v2.modules.comp_scheduler._task.start_periodic_task", - autospec=True, - ) - mocker.patch( - "simcore_service_director_v2.modules.comp_scheduler._task.stop_periodic_task", + def _fake_starter( + self: BaseCompScheduler, + pipeline_params: ScheduledPipelineParams, + *args, + **kwargs, + ) -> None: + pipeline_params.scheduler_task = mocker.MagicMock() + + return mocker.patch( + "simcore_service_director_v2.modules.comp_scheduler._base_scheduler.BaseCompScheduler._start_scheduling", autospec=True, + side_effect=_fake_starter, ) @@ -258,7 +298,7 @@ async def test_scheduler_gracefully_starts_and_stops( minimal_app: FastAPI, ): # check it started correctly - assert minimal_app.state.computational_scheduler_task is not None + assert get_scheduler(minimal_app) is not None @pytest.mark.parametrize( @@ -287,7 +327,7 @@ def test_scheduler_raises_exception_for_missing_dependencies( async def test_empty_pipeline_is_not_scheduled( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, scheduler: BaseCompScheduler, registered_user: Callable[..., dict[str, Any]], project: Callable[..., Awaitable[ProjectAtDB]], @@ -319,9 +359,6 @@ async def test_empty_pipeline_is_not_scheduled( use_on_demand_clusters=False, ) assert len(scheduler.scheduled_pipelines) == 0 - assert ( - scheduler.wake_up_event.is_set() is False - ), "the scheduler was woken up on an empty pipeline!" # check the database is empty async with aiopg_engine.acquire() as conn: result = await conn.scalar( @@ -334,7 +371,7 @@ async def test_empty_pipeline_is_not_scheduled( async def test_misconfigured_pipeline_is_not_scheduled( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, scheduler: BaseCompScheduler, registered_user: Callable[..., dict[str, Any]], project: Callable[..., Awaitable[ProjectAtDB]], @@ -361,9 +398,6 @@ async def test_misconfigured_pipeline_is_not_scheduled( use_on_demand_clusters=False, ) assert len(scheduler.scheduled_pipelines) == 1 - assert ( - scheduler.wake_up_event.is_set() is True - ), "the scheduler was NOT woken up on the scheduled pipeline!" for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items(): assert u_id == user["id"] assert p_id == sleepers_project.uuid @@ -380,7 +414,7 @@ async def test_misconfigured_pipeline_is_not_scheduled( run_entry = CompRunsAtDB.parse_obj(await result.first()) assert run_entry.result == RunningState.PUBLISHED # let the scheduler kick in - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) # check the scheduled pipelines is again empty since it's misconfigured assert len(scheduler.scheduled_pipelines) == 0 # check the database entry is correctly updated @@ -412,9 +446,6 @@ async def _assert_start_pipeline( use_on_demand_clusters=False, ) assert len(scheduler.scheduled_pipelines) == 1, "the pipeline is not scheduled!" - assert ( - scheduler.wake_up_event.is_set() is True - ), "the scheduler was NOT woken up on the scheduled pipeline!" for (u_id, p_id, it), params in scheduler.scheduled_pipelines.items(): assert u_id == published_project.project.prj_owner assert p_id == published_project.project.uuid @@ -434,7 +465,7 @@ async def _assert_start_pipeline( return exp_published_tasks -async def _assert_schedule_pipeline_PENDING( +async def _assert_schedule_pipeline_PENDING( # noqa: N802 aiopg_engine, published_project: PublishedProject, published_tasks: list[CompTaskAtDB], @@ -452,7 +483,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState] return [DaskClientTaskState.PENDING for job_id in job_ids] mocked_dask_client.get_tasks_status.side_effect = _return_tasks_pending - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) _assert_dask_client_correctly_initialized(mocked_dask_client, scheduler) await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PUBLISHED) await _assert_comp_tasks_db( @@ -471,6 +502,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState] expected_progress=None, # since we bypass the API entrypoint this is correct ) # tasks were send to the backend + assert published_project.project.prj_owner is not None mocked_dask_client.send_computation_tasks.assert_has_calls( calls=[ mock.call( @@ -478,7 +510,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState] project_id=published_project.project.uuid, cluster_id=DEFAULT_CLUSTER_ID, tasks={f"{p.node_id}": p.image}, - callback=scheduler._wake_up_scheduler_now, # noqa: SLF001 + callback=mock.ANY, metadata=mock.ANY, hardware_info=mock.ANY, ) @@ -490,7 +522,7 @@ async def _return_tasks_pending(job_ids: list[str]) -> list[DaskClientTaskState] mocked_dask_client.get_tasks_status.assert_not_called() mocked_dask_client.get_task_result.assert_not_called() # there is a second run of the scheduler to move comp_runs to pending, the rest does not change - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PENDING) await _assert_comp_tasks_db( aiopg_engine, @@ -616,7 +648,7 @@ async def _trigger_progress_event( @pytest.mark.acceptance_test() async def test_proper_pipeline_is_scheduled( # noqa: PLR0915 - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, mocked_dask_client: mock.MagicMock, scheduler: BaseCompScheduler, aiopg_engine: aiopg.sa.engine.Engine, @@ -661,7 +693,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_tasks_status.side_effect = _return_1st_task_running - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) await _assert_comp_run_db(aiopg_engine, published_project, RunningState.PENDING) await _assert_comp_tasks_db( @@ -707,7 +739,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta node_id=exp_started_task.node_id, ) - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) # comp_run, the comp_task switch to STARTED await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED) await _assert_comp_tasks_db( @@ -771,7 +803,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData: return TaskOutputData.parse_obj({"out_1": None, "out_2": 45}) mocked_dask_client.get_task_result.side_effect = _return_random_task_result - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED) await _assert_comp_tasks_db( aiopg_engine, @@ -819,7 +851,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData: tasks={ f"{next_pending_task.node_id}": next_pending_task.image, }, - callback=scheduler._wake_up_scheduler_now, # noqa: SLF001 + callback=mock.ANY, metadata=mock.ANY, hardware_info=mock.ANY, ) @@ -866,7 +898,7 @@ async def _return_2nd_task_running(job_ids: list[str]) -> list[DaskClientTaskSta project_id=exp_started_task.project_id, node_id=exp_started_task.node_id, ) - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED) await _assert_comp_tasks_db( aiopg_engine, @@ -908,7 +940,7 @@ async def _return_2nd_task_failed(job_ids: list[str]) -> list[DaskClientTaskStat mocked_dask_client.get_tasks_status.side_effect = _return_2nd_task_failed mocked_dask_client.get_task_result.side_effect = None - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) await _assert_comp_run_db(aiopg_engine, published_project, RunningState.STARTED) await _assert_comp_tasks_db( aiopg_engine, @@ -955,7 +987,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta mocked_dask_client.get_task_result.side_effect = _return_random_task_result # trigger the scheduler, it should switch to FAILED, as we are done - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) await _assert_comp_run_db(aiopg_engine, published_project, RunningState.FAILED) await _assert_comp_tasks_db( @@ -991,7 +1023,7 @@ async def _return_3rd_task_success(job_ids: list[str]) -> list[DaskClientTaskSta async def test_task_progress_triggers( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, mocked_dask_client: mock.MagicMock, scheduler: BaseCompScheduler, aiopg_engine: aiopg.sa.engine.Engine, @@ -1054,7 +1086,7 @@ async def test_task_progress_triggers( ], ) async def test_handling_of_disconnected_dask_scheduler( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, mocked_dask_client: mock.MagicMock, scheduler: BaseCompScheduler, aiopg_engine: aiopg.sa.engine.Engine, @@ -1098,7 +1130,7 @@ async def test_handling_of_disconnected_dask_scheduler( project_id=published_project.project.uuid, ) # we ensure the scheduler was run - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) # after this step the tasks are marked as ABORTED await _assert_comp_tasks_db( aiopg_engine, @@ -1112,7 +1144,7 @@ async def test_handling_of_disconnected_dask_scheduler( expected_progress=1, ) # then we have another scheduler run - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) # now the run should be ABORTED await _assert_comp_run_db(aiopg_engine, published_project, RunningState.ABORTED) @@ -1197,7 +1229,7 @@ class RebootState: ], ) async def test_handling_scheduling_after_reboot( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, mocked_dask_client: mock.MagicMock, aiopg_engine: aiopg.sa.engine.Engine, running_project: RunningProject, @@ -1222,7 +1254,7 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData: mocked_dask_client.get_task_result.side_effect = mocked_get_task_result - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) # the status will be called once for all RUNNING tasks mocked_dask_client.get_tasks_status.assert_called_once() if reboot_state.expected_run_state in COMPLETED_STATES: @@ -1279,7 +1311,7 @@ async def mocked_get_task_result(_job_id: str) -> TaskOutputData: async def test_handling_cancellation_of_jobs_after_reboot( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, mocked_dask_client: mock.MagicMock, aiopg_engine: aiopg.sa.engine.Engine, running_project_mark_for_cancellation: RunningProject, @@ -1309,7 +1341,7 @@ async def mocked_get_tasks_status(job_ids: list[str]) -> list[DaskClientTaskStat mocked_dask_client.get_tasks_status.side_effect = mocked_get_tasks_status # Running the scheduler, should actually cancel the run now - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) mocked_dask_client.abort_computation_task.assert_called() assert mocked_dask_client.abort_computation_task.call_count == len( [ @@ -1346,7 +1378,7 @@ async def _return_random_task_result(job_id) -> TaskOutputData: raise TaskCancelledError mocked_dask_client.get_task_result.side_effect = _return_random_task_result - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) # now should be stopped await _assert_comp_tasks_db( aiopg_engine, @@ -1373,7 +1405,7 @@ def with_fast_service_heartbeat_s(monkeypatch: pytest.MonkeyPatch) -> int: async def test_running_pipeline_triggers_heartbeat( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, with_fast_service_heartbeat_s: int, mocked_dask_client: mock.MagicMock, scheduler: BaseCompScheduler, @@ -1420,7 +1452,7 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta project_id=exp_started_task.project_id, node_id=exp_started_task.node_id, ) - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) messages = await _assert_message_received( resource_tracking_rabbit_client_parser, @@ -1432,8 +1464,8 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta # ------------------------------------------------------------------------------- # 3. wait a bit and run again we should get another heartbeat, but only one! await asyncio.sleep(with_fast_service_heartbeat_s + 1) - await run_comp_scheduler(scheduler) - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) + await schedule_all_pipelines(scheduler) messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, @@ -1444,8 +1476,8 @@ async def _return_1st_task_running(job_ids: list[str]) -> list[DaskClientTaskSta # ------------------------------------------------------------------------------- # 4. wait a bit and run again we should get another heartbeat, but only one! await asyncio.sleep(with_fast_service_heartbeat_s + 1) - await run_comp_scheduler(scheduler) - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) + await schedule_all_pipelines(scheduler) messages = await _assert_message_received( resource_tracking_rabbit_client_parser, 1, @@ -1463,7 +1495,7 @@ async def mocked_get_or_create_cluster(mocker: MockerFixture) -> mock.Mock: async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, scheduler: BaseCompScheduler, aiopg_engine: aiopg.sa.engine.Engine, published_project: PublishedProject, @@ -1501,7 +1533,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( published_project.tasks[1], published_project.tasks[3], ] - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) mocked_get_or_create_cluster.assert_called() assert mocked_get_or_create_cluster.call_count == 1 mocked_get_or_create_cluster.reset_mock() @@ -1516,7 +1548,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( expected_progress=None, ) # again will trigger the same response - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) mocked_get_or_create_cluster.assert_called() assert mocked_get_or_create_cluster.call_count == 1 mocked_get_or_create_cluster.reset_mock() @@ -1537,7 +1569,7 @@ async def test_pipeline_with_on_demand_cluster_with_not_ready_backend_waits( [ClustersKeeperNotAvailableError], ) async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( - with_disabled_scheduler_task: None, + with_disabled_auto_scheduling: None, scheduler: BaseCompScheduler, aiopg_engine: aiopg.sa.engine.Engine, published_project: PublishedProject, @@ -1570,7 +1602,7 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( published_project.tasks[1], published_project.tasks[3], ] - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) mocked_get_or_create_cluster.assert_called() assert mocked_get_or_create_cluster.call_count == 1 mocked_get_or_create_cluster.reset_mock() @@ -1583,7 +1615,7 @@ async def test_pipeline_with_on_demand_cluster_with_no_clusters_keeper_fails( expected_progress=1.0, ) # again will not re-trigger the call to clusters-keeper - await run_comp_scheduler(scheduler) + await schedule_all_pipelines(scheduler) mocked_get_or_create_cluster.assert_not_called() await _assert_comp_run_db(aiopg_engine, published_project, RunningState.FAILED) await _assert_comp_tasks_db( From 9e6ca9969619c034bc502294376f801cfde67b34 Mon Sep 17 00:00:00 2001 From: Andrei Neagu <5694077+GitHK@users.noreply.github.com> Date: Thu, 14 Nov 2024 14:08:25 +0100 Subject: [PATCH 21/22] =?UTF-8?q?=F0=9F=8E=A8=20Adding=20tracing=20to=20`a?= =?UTF-8?q?gent`=20and=20`dynamic-sidecar`=20(=F0=9F=8F=97=EF=B8=8F=20DEVO?= =?UTF-8?q?PS)=20(#6691)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andrei Neagu --- .env-devel | 1 + services/agent/src/simcore_service_agent/core/application.py | 4 ++++ services/agent/src/simcore_service_agent/core/settings.py | 5 +++++ services/agent/tests/conftest.py | 1 + .../modules/dynamic_sidecar/docker_service_specs/sidecar.py | 5 +++++ ...t_modules_dynamic_sidecar_docker_service_specs_sidecar.py | 1 + .../test_modules_dynamic_sidecar_docker_service_specs.py | 1 + services/docker-compose-ops.yml | 1 + services/docker-compose.yml | 4 ++++ .../src/simcore_service_dynamic_sidecar/core/application.py | 4 ++++ .../src/simcore_service_dynamic_sidecar/core/settings.py | 5 +++++ services/dynamic-sidecar/tests/conftest.py | 1 + 12 files changed, 33 insertions(+) diff --git a/.env-devel b/.env-devel index 8f979751926..52fb6e84bfd 100644 --- a/.env-devel +++ b/.env-devel @@ -17,6 +17,7 @@ AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001 AGENT_VOLUMES_CLEANUP_S3_PROVIDER=MINIO AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1 AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678 +AGENT_TRACING={} API_SERVER_DEV_FEATURES_ENABLED=0 API_SERVER_LOGLEVEL=INFO diff --git a/services/agent/src/simcore_service_agent/core/application.py b/services/agent/src/simcore_service_agent/core/application.py index 41c80b07d61..84bc71e24c5 100644 --- a/services/agent/src/simcore_service_agent/core/application.py +++ b/services/agent/src/simcore_service_agent/core/application.py @@ -5,6 +5,7 @@ get_common_oas_options, override_fastapi_openapi_method, ) +from servicelib.fastapi.tracing import setup_tracing from servicelib.logging_utils import config_all_loggers from .._meta import ( @@ -59,6 +60,9 @@ def create_app() -> FastAPI: setup_rest_api(app) setup_rpc_api_routes(app) + if settings.AGENT_TRACING: + setup_tracing(app, settings.AGENT_TRACING, APP_NAME) + async def _on_startup() -> None: print(APP_STARTED_BANNER_MSG, flush=True) # noqa: T201 diff --git a/services/agent/src/simcore_service_agent/core/settings.py b/services/agent/src/simcore_service_agent/core/settings.py index 756bf2cac28..f11350968f4 100644 --- a/services/agent/src/simcore_service_agent/core/settings.py +++ b/services/agent/src/simcore_service_agent/core/settings.py @@ -6,6 +6,7 @@ from settings_library.base import BaseCustomSettings from settings_library.r_clone import S3Provider from settings_library.rabbit import RabbitSettings +from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings @@ -77,6 +78,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): auto_default_from_env=True, description="settings for service/rabbitmq" ) + AGENT_TRACING: TracingSettings | None = Field( + auto_default_from_env=True, description="settings for opentelemetry tracing" + ) + @validator("LOGLEVEL") @classmethod def valid_log_level(cls, value) -> LogLevel: diff --git a/services/agent/tests/conftest.py b/services/agent/tests/conftest.py index 4632ca84102..5fe2cad817e 100644 --- a/services/agent/tests/conftest.py +++ b/services/agent/tests/conftest.py @@ -58,6 +58,7 @@ def mock_environment( "RABBIT_SECURE": "false", "RABBIT_USER": "test", "AGENT_DOCKER_NODE_ID": docker_node_id, + "AGENT_TRACING": "null", }, ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py index b788e455cf3..44e2ff575e7 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py @@ -175,6 +175,11 @@ def _get_environment_variables( "S3_SECRET_KEY": r_clone_settings.R_CLONE_S3.S3_SECRET_KEY, "SC_BOOT_MODE": f"{app_settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR.DYNAMIC_SIDECAR_SC_BOOT_MODE}", "SSL_CERT_FILE": app_settings.DIRECTOR_V2_SELF_SIGNED_SSL_FILENAME, + "DYNAMIC_SIDECAR_TRACING": ( + app_settings.DIRECTOR_V2_TRACING.json() + if app_settings.DIRECTOR_V2_TRACING + else "null" + ), # For background info on this special env-var above, see # - https://stackoverflow.com/questions/31448854/how-to-force-requests-use-the-certificates-on-my-ubuntu-system#comment78596389_37447847 "SIMCORE_HOST_NAME": scheduler_data.service_name, diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py index 4a73b3e7210..f4870a140c4 100644 --- a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_docker_service_specs_sidecar.py @@ -37,6 +37,7 @@ "DY_SIDECAR_USER_SERVICES_HAVE_INTERNET_ACCESS", "DYNAMIC_SIDECAR_COMPOSE_NAMESPACE", "DYNAMIC_SIDECAR_LOG_LEVEL", + "DYNAMIC_SIDECAR_TRACING", "NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS", "POSTGRES_DB", "POSTGRES_ENDPOINT", diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py index a05e4cd84da..ab835039262 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_service_specs.py @@ -243,6 +243,7 @@ def expected_dynamic_sidecar_spec( "FORWARD_ENV_DISPLAY": ":0", "NODE_PORTS_400_REQUEST_TIMEOUT_ATTEMPTS": "3", "DYNAMIC_SIDECAR_LOG_LEVEL": "DEBUG", + "DYNAMIC_SIDECAR_TRACING": "null", "DY_DEPLOYMENT_REGISTRY_SETTINGS": ( '{"REGISTRY_AUTH": false, "REGISTRY_PATH": null, ' '"REGISTRY_URL": "foo.bar.com", "REGISTRY_USER": ' diff --git a/services/docker-compose-ops.yml b/services/docker-compose-ops.yml index 9beacf76c34..c80befe2316 100644 --- a/services/docker-compose-ops.yml +++ b/services/docker-compose-ops.yml @@ -111,6 +111,7 @@ services: - "4318:4318" # OTLP HTTP receiver networks: - simcore_default + - interactive_services_subnet environment: TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE: ${TRACING_OPENTELEMETRY_COLLECTOR_BATCH_SIZE} TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE: ${TRACING_OPENTELEMETRY_COLLECTOR_SAMPLING_PERCENTAGE} diff --git a/services/docker-compose.yml b/services/docker-compose.yml index 35dd3782609..2f039977889 100644 --- a/services/docker-compose.yml +++ b/services/docker-compose.yml @@ -1053,6 +1053,10 @@ services: RABBIT_USER: ${RABBIT_USER} RABBIT_SECURE: ${RABBIT_SECURE} + AGENT_TRACING: ${AGENT_TRACING} + TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT: ${TRACING_OPENTELEMETRY_COLLECTOR_ENDPOINT} + TRACING_OPENTELEMETRY_COLLECTOR_PORT: ${TRACING_OPENTELEMETRY_COLLECTOR_PORT} + dask-sidecar: image: ${DOCKER_REGISTRY:-itisfoundation}/dask-sidecar:${DOCKER_IMAGE_TAG:-latest} init: true diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index ce5f48a8b21..59547f40119 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -9,6 +9,7 @@ get_common_oas_options, override_fastapi_openapi_method, ) +from servicelib.fastapi.tracing import setup_tracing from servicelib.logging_utils import config_all_loggers from simcore_sdk.node_ports_common.exceptions import NodeNotFound @@ -190,6 +191,9 @@ def create_app(): if application_settings.are_prometheus_metrics_enabled: setup_prometheus_metrics(app) + if application_settings.DYNAMIC_SIDECAR_TRACING: + setup_tracing(app, application_settings.DYNAMIC_SIDECAR_TRACING, PROJECT_NAME) + # ERROR HANDLERS ------------ app.add_exception_handler(NodeNotFound, node_not_found_error_handler) app.add_exception_handler(BaseDynamicSidecarError, http_error_handler) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py index 214d51ad11b..024465913bd 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py @@ -23,6 +23,7 @@ from settings_library.resource_usage_tracker import ( DEFAULT_RESOURCE_USAGE_HEARTBEAT_INTERVAL, ) +from settings_library.tracing import TracingSettings from settings_library.utils_logging import MixinLoggingSettings @@ -167,6 +168,10 @@ class ApplicationSettings(BaseCustomSettings, MixinLoggingSettings): SYSTEM_MONITOR_SETTINGS: SystemMonitorSettings = Field(auto_default_from_env=True) + DYNAMIC_SIDECAR_TRACING: TracingSettings | None = Field( + auto_default_from_env=True, description="settings for opentelemetry tracing" + ) + @property def are_prometheus_metrics_enabled(self) -> bool: return self.DY_SIDECAR_CALLBACKS_MAPPING.metrics is not None diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index 8b4760b26dd..a9ec557c6dc 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -199,6 +199,7 @@ def base_mock_envs( "REGISTRY_SSL": "false", } ), + "DYNAMIC_SIDECAR_TRACING": "null", } From cb74ff72e21e4489470b6288cd5954c4ce98ac6f Mon Sep 17 00:00:00 2001 From: Odei Maiz <33152403+odeimaiz@users.noreply.github.com> Date: Thu, 14 Nov 2024 16:02:17 +0100 Subject: [PATCH 22/22] =?UTF-8?q?=F0=9F=8E=A8=20[Frontend]=20Enh:=20Tag=20?= =?UTF-8?q?management=20(#6720)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../client/source/class/osparc/NewRelease.js | 20 +- .../source/class/osparc/NewUITracker.js | 33 ++-- .../source/class/osparc/dashboard/CardBase.js | 2 +- .../class/osparc/dashboard/Dashboard.js | 3 - .../class/osparc/dashboard/GridButtonItem.js | 2 +- .../class/osparc/dashboard/ListButtonItem.js | 2 +- .../dashboard/ResourceContainerManager.js | 10 +- .../class/osparc/dashboard/ResourceFilter.js | 13 +- .../class/osparc/dashboard/SearchBarFilter.js | 17 +- .../source/class/osparc/data/model/Tag.js | 86 +++++++++ .../source/class/osparc/desktop/MainPage.js | 2 +- .../class/osparc/desktop/MainPageDesktop.js | 2 +- .../desktop/preferences/pages/TagsPage.js | 11 +- .../class/osparc/filter/UserTagsFilter.js | 6 +- .../source/class/osparc/form/tag/TagItem.js | 181 ++++++++++-------- .../class/osparc/form/tag/TagManager.js | 15 +- .../class/osparc/form/tag/TagToggleButton.js | 8 +- .../source/class/osparc/info/StudyUtils.js | 4 +- .../osparc/notification/NotificationUI.js | 12 +- .../notification/NotificationsContainer.js | 7 +- .../source/class/osparc/store/Folders.js | 3 +- .../source/class/osparc/store/Services.js | 6 +- .../client/source/class/osparc/store/Tags.js | 132 +++++++++++++ .../source/class/osparc/ui/basic/Tag.js | 16 +- 24 files changed, 427 insertions(+), 166 deletions(-) create mode 100644 services/static-webserver/client/source/class/osparc/data/model/Tag.js create mode 100644 services/static-webserver/client/source/class/osparc/store/Tags.js diff --git a/services/static-webserver/client/source/class/osparc/NewRelease.js b/services/static-webserver/client/source/class/osparc/NewRelease.js index af6c23f34eb..bac9d1efb25 100644 --- a/services/static-webserver/client/source/class/osparc/NewRelease.js +++ b/services/static-webserver/client/source/class/osparc/NewRelease.js @@ -44,13 +44,19 @@ qx.Class.define("osparc.NewRelease", { /** * Compare the latest version provided by the backend with the one loaded in the browser (might be an old cached one) */ - isMyFrontendOld: async function() { - const lastUICommit = await osparc.store.AppSummary.getLatestUIFromBE(); - const thisUICommit = osparc.utils.LibVersions.getVcsRefUI(); - if (lastUICommit && thisUICommit) { - return lastUICommit !== thisUICommit; - } - return false; + isMyFrontendOld: function() { + return new Promise((resolve, reject) => { + osparc.store.AppSummary.getLatestUIFromBE() + .then(lastUICommit => { + const thisUICommit = osparc.utils.LibVersions.getVcsRefUI(); + if (lastUICommit && thisUICommit) { + resolve(lastUICommit !== thisUICommit) + } else { + reject(); + } + }) + .catch(() => reject()); + }); } }, diff --git a/services/static-webserver/client/source/class/osparc/NewUITracker.js b/services/static-webserver/client/source/class/osparc/NewUITracker.js index 04a19536128..c85fb3f9390 100644 --- a/services/static-webserver/client/source/class/osparc/NewUITracker.js +++ b/services/static-webserver/client/source/class/osparc/NewUITracker.js @@ -27,21 +27,24 @@ qx.Class.define("osparc.NewUITracker", { __checkInterval: null, startTracker: function() { - const checkNewUI = async () => { - const newReleaseAvailable = await osparc.NewRelease.isMyFrontendOld(); - if (newReleaseAvailable) { - let msg = ""; - msg += qx.locale.Manager.tr("A new version of the application is now available."); - msg += "
"; - msg += qx.locale.Manager.tr("Click the Reload button to get the latest features."); - // permanent message - const flashMessage = osparc.FlashMessenger.getInstance().logAs(msg, "INFO", 0).set({ - maxWidth: 500 - }); - const reloadButton = osparc.utils.Utils.reloadNoCacheButton(); - flashMessage.addWidget(reloadButton); - this.stopTracker(); - } + const checkNewUI = () => { + osparc.NewRelease.isMyFrontendOld() + .then(newReleaseAvailable => { + if (newReleaseAvailable) { + let msg = ""; + msg += qx.locale.Manager.tr("A new version of the application is now available."); + msg += "
"; + msg += qx.locale.Manager.tr("Click the Reload button to get the latest features."); + // permanent message + const flashMessage = osparc.FlashMessenger.getInstance().logAs(msg, "INFO", 0).set({ + maxWidth: 500 + }); + const reloadButton = osparc.utils.Utils.reloadNoCacheButton(); + flashMessage.addWidget(reloadButton); + this.stopTracker(); + } + }) + .catch(() => setTimeout(() => checkNewUI(), 5*1000)); }; checkNewUI(); this.__checkInterval = setInterval(checkNewUI, this.self().CHECK_INTERVAL); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js index 8d59dee3728..1b7a8fe6e82 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/CardBase.js @@ -926,7 +926,7 @@ qx.Class.define("osparc.dashboard.CardBase", { }, _filterTags: function(tags) { - const checks = this.getTags().map(tag => tag.id); + const checks = this.getTags().map(tag => tag.getTagId()); return this.self().filterTags(checks, tags); }, diff --git a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js index cc714440242..4a1420ade43 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/Dashboard.js @@ -181,9 +181,6 @@ qx.Class.define("osparc.dashboard.Dashboard", { const store = osparc.store.Store.getInstance(); preResourcePromises.push(store.getAllGroupsAndMembers()); preResourcePromises.push(osparc.store.Services.getServicesLatest(false)); - if (permissions.canDo("study.tag")) { - preResourcePromises.push(osparc.data.Resources.get("tags")); - } Promise.all(preResourcePromises) .then(() => { [ diff --git a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js index 148a6b114bb..828a0c74ba7 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/GridButtonItem.js @@ -262,7 +262,7 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { tagsContainer.setVisibility(tags.length ? "visible" : "excluded"); tagsContainer.removeAll(); tags.forEach(tag => { - const tagUI = new osparc.ui.basic.Tag(tag.name, tag.color, "searchBarFilter"); + const tagUI = new osparc.ui.basic.Tag(tag, "searchBarFilter"); tagUI.set({ font: "text-12", toolTipText: this.tr("Click to filter by this Tag") diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js index e89e03a0943..71f59b970df 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ListButtonItem.js @@ -237,7 +237,7 @@ qx.Class.define("osparc.dashboard.ListButtonItem", { const tagsContainer = this.getChildControl("tags"); tagsContainer.removeAll(); tags.forEach(tag => { - const tagUI = new osparc.ui.basic.Tag(tag.name, tag.color, "searchBarFilter"); + const tagUI = new osparc.ui.basic.Tag(tag, "searchBarFilter"); tagUI.set({ alignY: "middle", font: "text-12", diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js index b28b5d89a04..fa99ba050dd 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceContainerManager.js @@ -208,7 +208,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { }, __createCard: function(resourceData) { - const tags = resourceData.tags ? osparc.store.Store.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.id)) : []; + const tags = resourceData.tags ? osparc.store.Tags.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.getTagId())) : []; const card = this.getMode() === "grid" ? new osparc.dashboard.GridButtonItem() : new osparc.dashboard.ListButtonItem(); card.set({ appearance: resourceData.type ? `pb-${resourceData.type}` : `pb-${resourceData.resourceType}`, @@ -434,7 +434,7 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { }, __groupByTags: function(cards, resourceData) { - const tags = resourceData.tags ? osparc.store.Store.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.id)) : []; + const tags = resourceData.tags ? osparc.store.Tags.getInstance().getTags().filter(tag => resourceData.tags.includes(tag.getTagId())) : []; if (tags.length === 0) { let noGroupContainer = this.__getGroupContainer("no-group"); const card = this.__createCard(resourceData); @@ -443,9 +443,11 @@ qx.Class.define("osparc.dashboard.ResourceContainerManager", { cards.push(card); } else { tags.forEach(tag => { - let groupContainer = this.__getGroupContainer(tag.id); + let groupContainer = this.__getGroupContainer(tag.getTagId()); if (groupContainer === null) { - groupContainer = this.__createGroupContainer(tag.id, tag.name, tag.color); + groupContainer = this.__createGroupContainer(tag.getTagId(), tag.getName(), tag.getColor()); + tag.bind("name", groupContainer, "headerLabel"); + tag.bind("color", groupContainer, "headerColor"); groupContainer.setHeaderIcon("@FontAwesome5Solid/tag/24"); this.__groupedContainers.add(groupContainer); this.__groupedContainers.getChildren().sort((a, b) => a.getHeaderLabel().localeCompare(b.getHeaderLabel())); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js index 142cdab7d3f..0c452e3e33a 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/ResourceFilter.js @@ -158,16 +158,15 @@ qx.Class.define("osparc.dashboard.ResourceFilter", { const maxTags = 5; this.__tagButtons = []; layout.removeAll(); - osparc.store.Store.getInstance().getTags().forEach((tag, idx) => { - const button = new qx.ui.form.ToggleButton(tag.name, "@FontAwesome5Solid/tag/18"); + osparc.store.Tags.getInstance().getTags().forEach((tag, idx) => { + const button = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/tag/18"); + button.id = tag.getTagId(); + tag.bind("name", button, "label"); + tag.bind("color", button.getChildControl("icon"), "textColor"); osparc.utils.Utils.setIdToWidget(button, this.__resourceType + "-tagFilterItem"); - button.id = tag.id; button.set({ appearance: "filter-toggle-button", - value: selectedTagIds.includes(tag.id) - }); - button.getChildControl("icon").set({ - textColor: tag.color + value: selectedTagIds.includes(tag.getTagId()) }); layout.add(button); diff --git a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js index b836a93ef44..5b376a6b404 100644 --- a/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js +++ b/services/static-webserver/client/source/class/osparc/dashboard/SearchBarFilter.js @@ -208,14 +208,14 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { }, __addTags: function(menuButton) { - const tags = osparc.store.Store.getInstance().getTags(); + const tags = osparc.store.Tags.getInstance().getTags(); menuButton.setVisibility(tags.length ? "visible" : "excluded"); if (tags.length) { const tagsMenu = new qx.ui.menu.Menu(); osparc.utils.Utils.setIdToWidget(tagsMenu, "searchBarFilter-tags-menu"); tags.forEach(tag => { - const tagButton = new qx.ui.menu.Button(tag.name, "@FontAwesome5Solid/tag/12"); - tagButton.getChildControl("icon").setTextColor(tag.color); + const tagButton = new qx.ui.menu.Button(tag.getName(), "@FontAwesome5Solid/tag/12"); + tagButton.getChildControl("icon").setTextColor(tag.getColor()); tagsMenu.add(tagButton); tagButton.addListener("execute", () => this.addTagActiveFilter(tag), this); }); @@ -271,16 +271,17 @@ qx.Class.define("osparc.dashboard.SearchBarFilter", { }, addTagActiveFilter: function(tag) { - this.__addChip("tag", tag.id, tag.name); + this.__addChip("tag", tag.getTagId(), tag.getName()); }, setTagsActiveFilter: function(tagIds) { - const tags = osparc.store.Store.getInstance().getTags(); + const tags = osparc.store.Tags.getInstance().getTags(); tags.forEach(tag => { - if (tagIds.includes(tag.id)) { - this.__addChip("tag", tag.id, tag.name); + const tagId = tag.getTagId(); + if (tagIds.includes(tagId)) { + this.__addChip("tag", tagId, tag.getName()); } else { - this.__removeChip("tag", tag.id, tag.name); + this.__removeChip("tag", tagId, tag.getName()); } }); }, diff --git a/services/static-webserver/client/source/class/osparc/data/model/Tag.js b/services/static-webserver/client/source/class/osparc/data/model/Tag.js new file mode 100644 index 00000000000..fc7e00a5fcc --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/data/model/Tag.js @@ -0,0 +1,86 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Class that stores Tag data. + */ + +qx.Class.define("osparc.data.model.Tag", { + extend: qx.core.Object, + + /** + * @param tagData {Object} Object containing the serialized Tag Data + */ + construct: function(tagData) { + this.base(arguments); + + this.set({ + tagId: tagData.id, + name: tagData.name, + description: tagData.description, + color: tagData.color, + accessRights: tagData.accessRights, + }); + }, + + properties: { + tagId: { + check: "Number", + nullable: true, + init: null, + event: "changeTagId" + }, + + name: { + check: "String", + nullable: false, + init: null, + event: "changeName" + }, + + description: { + check: "String", + nullable: true, + init: null, + event: "changeDescription" + }, + + color: { + check: "Color", + event: "changeColor", + init: "#303030" + }, + + accessRights: { + check: "Object", + nullable: false, + init: null, + event: "changeAccessRights" + }, + }, + + members: { + serialize: function() { + const jsonObject = {}; + const propertyKeys = this.self().getProperties(); + propertyKeys.forEach(key => { + jsonObject[key] = this.get(key); + }); + return jsonObject; + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js index d2b72acfdcc..0ccb9bbe8b9 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPage.js @@ -66,7 +66,7 @@ qx.Class.define("osparc.desktop.MainPage", { preloadPromises.push(store.reloadWallets()); } preloadPromises.push(store.getAllClassifiers(true)); - preloadPromises.push(store.getTags()); + preloadPromises.push(osparc.store.Tags.getInstance().fetchTags()); Promise.all(preloadPromises) .then(() => { const mainStack = this.__createMainStack(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js b/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js index 93f5f50c74d..40c99616a40 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js +++ b/services/static-webserver/client/source/class/osparc/desktop/MainPageDesktop.js @@ -61,7 +61,7 @@ qx.Class.define("osparc.desktop.MainPageDesktop", { preloadPromises.push(store.reloadWallets()); } preloadPromises.push(store.getAllClassifiers(true)); - preloadPromises.push(store.getTags()); + preloadPromises.push(osparc.store.Tags.getInstance().fetchTags()); Promise.all(preloadPromises) .then(() => { const desktopCenter = new osparc.desktop.credits.DesktopCenter(); diff --git a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js index 7265c65cebd..add2f2f3040 100644 --- a/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js +++ b/services/static-webserver/client/source/class/osparc/desktop/preferences/pages/TagsPage.js @@ -48,13 +48,10 @@ qx.Class.define("osparc.desktop.preferences.pages.TagsPage", { icon: "@FontAwesome5Solid/plus/14" }); osparc.utils.Utils.setIdToWidget(this.__addTagButton, "addTagBtn"); - osparc.data.Resources.get("tags") - .then(tags => { - this.__tagItems = tags.map(tag => new osparc.form.tag.TagItem().set({...tag})); - this.__renderLayout(); - this.__attachEventHandlers(); - }) - .catch(err => console.error(err)); + const tags = osparc.store.Tags.getInstance().getTags(); + this.__tagItems = tags.map(tag => new osparc.form.tag.TagItem().set({tag})); + this.__renderLayout(); + this.__attachEventHandlers(); }, __renderLayout: function() { diff --git a/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js b/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js index c0a74265e01..caf5914e5d3 100644 --- a/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js +++ b/services/static-webserver/client/source/class/osparc/filter/UserTagsFilter.js @@ -18,11 +18,11 @@ qx.Class.define("osparc.filter.UserTagsFilter", { }, members: { __buildMenu: function() { - osparc.store.Store.getInstance().getTags() + osparc.store.Tags.getInstance().getTags() .forEach(tag => { - const menuButton = this._addOption(tag.name); + const menuButton = this._addOption(tag.getName()); menuButton.setIcon("@FontAwesome5Solid/square/12"); - menuButton.getChildControl("icon").setTextColor(tag.color); + menuButton.getChildControl("icon").setTextColor(tag.getColor()); }); }, __attachEventListeners: function(filterId, filterGroupId) { diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js b/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js index 7e79bb54bf3..77282a5db7f 100644 --- a/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js +++ b/services/static-webserver/client/source/class/osparc/form/tag/TagItem.js @@ -26,37 +26,51 @@ qx.Class.define("osparc.form.tag.TagItem", { }, properties: { + tag: { + check: "osparc.data.model.Tag", + nullable: false, + init: null, + event: "changeTag", + apply: "__applyTag", + }, + id: { check: "Integer" }, + name: { check: "String", event: "changeName", init: "" }, + description: { check: "String", nullable: true, event: "changeDescription", init: "" }, + color: { check: "Color", event: "changeColor", init: "#303030" }, + accessRights: { check: "Object", nullable: false, + event: "changeAccessRights", apply: "__renderLayout", - event: "changeAccessRights" }, + mode: { check: "String", init: "display", nullable: false, apply: "_applyMode" }, + appearance: { init: "tagitem", refine: true @@ -78,57 +92,7 @@ qx.Class.define("osparc.form.tag.TagItem", { __colorButton: null, __loadingIcon: null, __validationManager: null, - /** - * Renders this tag item from scratch. - */ - __renderLayout: function() { - this._removeAll(); - if (this.getMode() === this.self().modes.EDIT) { - this.__renderEditMode(); - } else if (this.getMode() === this.self().modes.DISPLAY) { - this.__renderDisplayMode(); - } - }, - __renderEditMode: function() { - const nameContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({ - width: 90 - }); - nameContainer.add(new qx.ui.basic.Label(this.tr("Name")).set({ - buddy: this.getChildControl("nameinput") - })); - nameContainer.add(this.getChildControl("nameinput").set({ - value: this.getName() - })); - this._add(nameContainer); - const descInputContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()); - descInputContainer.add(new qx.ui.basic.Label(this.tr("Description")).set({ - buddy: this.getChildControl("descriptioninput") - })); - descInputContainer.add(this.getChildControl("descriptioninput").set({ - value: this.getDescription() - })); - this._add(descInputContainer, { - flex: 1 - }); - this._add(this.__colorPicker()); - this._add(this.__tagItemEditButtons()); - }, - __renderDisplayMode: function() { - const tagContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox()).set({ - width: 100 - }); - tagContainer.add(this.getChildControl("tag")); - this._add(tagContainer); - const descriptionContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox()); - descriptionContainer.add(this.getChildControl("description"), { - width: "100%" - }); - this._add(descriptionContainer, { - flex: 1 - }); - this._add(this.__tagItemButtons()); - this.resetBackgroundColor(); - }, + _createChildControlImpl: function(id) { let control; switch (id) { @@ -151,7 +115,7 @@ qx.Class.define("osparc.form.tag.TagItem", { } control = this.__description; break; - case "nameinput": + case "name-input": // Tag name input in edit mode if (this.__nameInput === null) { this.__nameInput = new qx.ui.form.TextField().set({ @@ -162,7 +126,7 @@ qx.Class.define("osparc.form.tag.TagItem", { } control = this.__nameInput; break; - case "descriptioninput": + case "description-input": // Tag description input in edit mode if (this.__descriptionInput === null) { this.__descriptionInput = new qx.ui.form.TextArea().set({ @@ -172,7 +136,7 @@ qx.Class.define("osparc.form.tag.TagItem", { } control = this.__descriptionInput; break; - case "colorinput": + case "color-input": // Color input in edit mode if (this.__colorInput === null) { this.__colorInput = new qx.ui.form.TextField().set({ @@ -180,20 +144,20 @@ qx.Class.define("osparc.form.tag.TagItem", { width: 60, required: true }); - this.__colorInput.bind("value", this.getChildControl("colorbutton"), "backgroundColor"); - this.__colorInput.bind("value", this.getChildControl("colorbutton"), "textColor", { + this.__colorInput.bind("value", this.getChildControl("color-button"), "backgroundColor"); + this.__colorInput.bind("value", this.getChildControl("color-button"), "textColor", { converter: value => osparc.utils.Utils.getContrastedBinaryColor(value) }); this.__validationManager.add(this.__colorInput, osparc.utils.Validators.hexColor); } control = this.__colorInput; break; - case "colorbutton": + case "color-button": // Random color generator button in edit mode if (this.__colorButton === null) { this.__colorButton = new qx.ui.form.Button(null, "@FontAwesome5Solid/sync-alt/12"); this.__colorButton.addListener("execute", () => { - this.getChildControl("colorinput").setValue(osparc.utils.Utils.getRandomColor()); + this.getChildControl("color-input").setValue(osparc.utils.Utils.getRandomColor()); }, this); } control = this.__colorButton; @@ -201,6 +165,69 @@ qx.Class.define("osparc.form.tag.TagItem", { } return control || this.base(arguments, id); }, + + __applyTag: function(tag) { + tag.bind("tagId", this, "id"); + tag.bind("name", this, "name"); + tag.bind("description", this, "description"); + tag.bind("color", this, "color"); + tag.bind("accessRights", this, "accessRights"); + }, + + /** + * Renders this tag item from scratch. + */ + __renderLayout: function() { + this._removeAll(); + if (this.getMode() === this.self().modes.EDIT) { + this.__renderEditMode(); + } else if (this.getMode() === this.self().modes.DISPLAY) { + this.__renderDisplayMode(); + } + }, + + __renderEditMode: function() { + const nameContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()).set({ + width: 90 + }); + nameContainer.add(new qx.ui.basic.Label(this.tr("Name")).set({ + buddy: this.getChildControl("name-input") + })); + nameContainer.add(this.getChildControl("name-input").set({ + value: this.getName() + })); + this._add(nameContainer); + const descInputContainer = new qx.ui.container.Composite(new qx.ui.layout.VBox()); + descInputContainer.add(new qx.ui.basic.Label(this.tr("Description")).set({ + buddy: this.getChildControl("description-input") + })); + descInputContainer.add(this.getChildControl("description-input").set({ + value: this.getDescription() + })); + this._add(descInputContainer, { + flex: 1 + }); + this._add(this.__colorPicker()); + this._add(this.__tagItemEditButtons()); + }, + + __renderDisplayMode: function() { + const tagContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox()).set({ + width: 100 + }); + tagContainer.add(this.getChildControl("tag")); + this._add(tagContainer); + const descriptionContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox()); + descriptionContainer.add(this.getChildControl("description"), { + width: "100%" + }); + this._add(descriptionContainer, { + flex: 1 + }); + this._add(this.__tagItemButtons()); + this.resetBackgroundColor(); + }, + /** * Generates and returns the buttons for deleting and editing an existing label (display mode) */ @@ -224,12 +251,7 @@ qx.Class.define("osparc.form.tag.TagItem", { editButton.addListener("execute", () => this.setMode(this.self().modes.EDIT), this); deleteButton.addListener("execute", () => { deleteButton.setFetching(true); - const params = { - url: { - tagId: this.getId() - } - }; - osparc.data.Resources.fetch("tags", "delete", params) + osparc.store.Tags.getInstance().deleteTag(this.getId()) .then(() => this.fireEvent("deleteTag")) .catch(console.error) .finally(() => deleteButton.setFetching(false)); @@ -256,21 +278,15 @@ qx.Class.define("osparc.form.tag.TagItem", { saveButton.addListener("execute", () => { if (this.__validationManager.validate()) { const data = this.__serializeData(); - const params = { - data - }; saveButton.setFetching(true); let fetch; if (this.isPropertyInitialized("id")) { - params.url = { - tagId: this.getId() - }; - fetch = osparc.data.Resources.fetch("tags", "put", params); + fetch = osparc.store.Tags.getInstance().putTag(this.getId(), data); } else { - fetch = osparc.data.Resources.fetch("tags", "post", params); + fetch = osparc.store.Tags.getInstance().postTag(data); } fetch - .then(tag => this.set(tag)) + .then(tag => this.setTag(tag)) .catch(console.error) .finally(() => { this.fireEvent("tagSaved"); @@ -295,24 +311,27 @@ qx.Class.define("osparc.form.tag.TagItem", { __colorPicker: function() { const container = new qx.ui.container.Composite(new qx.ui.layout.VBox()); container.add(new qx.ui.basic.Label(this.tr("Color")).set({ - buddy: this.getChildControl("colorinput") + buddy: this.getChildControl("color-input") })); const innerContainer = new qx.ui.container.Composite(new qx.ui.layout.HBox()); - const refreshButton = this.getChildControl("colorbutton"); - const colorInput = this.getChildControl("colorinput"); + const refreshButton = this.getChildControl("color-button"); + const colorInput = this.getChildControl("color-input"); innerContainer.add(refreshButton); innerContainer.add(colorInput); container.add(innerContainer); return container; }, /** - * Creates an object containing the udpated tag info + * Creates an object containing the updated tag info */ __serializeData: function() { + const name = this.getChildControl("name-input").getValue(); + const description = this.getChildControl("description-input").getValue(); + const color = this.getChildControl("color-input").getValue(); return { - name: this.getChildControl("nameinput").getValue().trim(), - description: this.getChildControl("descriptioninput").getValue().trim(), - color: this.getChildControl("colorinput").getValue() + name: name.trim(), + description: description ? description.trim() : "", + color: color }; }, _applyMode: function() { diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js index ae3ef918adb..6f704c1f222 100644 --- a/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js +++ b/services/static-webserver/client/source/class/osparc/form/tag/TagManager.js @@ -88,8 +88,8 @@ qx.Class.define("osparc.form.tag.TagManager", { newItem.addListener("tagSaved", () => this.__repopulateTags(), this); newItem.addListener("cancelNewTag", e => tagsContainer.remove(e.getTarget()), this); newItem.addListener("deleteTag", e => tagsContainer.remove(e.getTarget()), this); - this.__repopulateTags(); tagsContainer.add(newItem); + this.__repopulateTags(); }); this._add(addTagButton); @@ -119,25 +119,26 @@ qx.Class.define("osparc.form.tag.TagManager", { __repopulateTags: function() { this.__tagsContainer.removeAll(); - const tags = osparc.store.Store.getInstance().getTags(); + const tags = osparc.store.Tags.getInstance().getTags(); tags.forEach(tag => this.__tagsContainer.add(this.__tagButton(tag))); }, __tagButton: function(tag) { - const tagButton = new osparc.form.tag.TagToggleButton(tag, this.__selectedTags.includes(tag.id)); + const tagId = tag.getTagId(); + const tagButton = new osparc.form.tag.TagToggleButton(tag, this.__selectedTags.includes(tagId)); tagButton.addListener("changeValue", evt => { const selected = evt.getData(); if (this.isLiveUpdate()) { tagButton.setFetching(true); if (selected) { - this.__saveAddTag(tag.id, tagButton); + this.__saveAddTag(tagId, tagButton); } else { - this.__saveRemoveTag(tag.id, tagButton); + this.__saveRemoveTag(tagId, tagButton); } } else if (selected) { - this.__selectedTags.push(tag.id); + this.__selectedTags.push(tagId); } else { - this.__selectedTags.remove(tag.id); + this.__selectedTags.remove(tagId); } }, this); tagButton.subscribeToFilterGroup("studyBrowserTagManager"); diff --git a/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js b/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js index 3075d738cf3..35feee0c3bc 100644 --- a/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js +++ b/services/static-webserver/client/source/class/osparc/form/tag/TagToggleButton.js @@ -23,11 +23,11 @@ qx.Class.define("osparc.form.tag.TagToggleButton", { appearance: "tagbutton" }); this.setIcon("@FontAwesome5Solid/square/14"); - this.getChildControl("icon").setTextColor(tag.color); - if (tag.description) { - this.setLabel(tag.name + " : " + tag.description); + this.getChildControl("icon").setTextColor(tag.getColor()); + if (tag.getDescription()) { + this.setLabel(tag.getName() + " : " + tag.getDescription()); } else { - this.setLabel(tag.name); + this.setLabel(tag.getName()); } this.getChildControl("check"); diff --git a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js index 95ea7f20b7f..f1d2c3449e5 100644 --- a/services/static-webserver/client/source/class/osparc/info/StudyUtils.js +++ b/services/static-webserver/client/source/class/osparc/info/StudyUtils.js @@ -211,12 +211,12 @@ qx.Class.define("osparc.info.StudyUtils", { tagsContainer.removeAll(); const noTagsLabel = new qx.ui.basic.Label(qx.locale.Manager.tr("Add tags")); tagsContainer.add(noTagsLabel); - osparc.store.Store.getInstance().getTags().filter(tag => model.getTags().includes(tag.id)) + osparc.store.Tags.getInstance().getTags().filter(tag => model.getTags().includes(tag.getTagId())) .forEach(selectedTag => { if (tagsContainer.indexOf(noTagsLabel) > -1) { tagsContainer.remove(noTagsLabel); } - tagsContainer.add(new osparc.ui.basic.Tag(selectedTag.name, selectedTag.color)); + tagsContainer.add(new osparc.ui.basic.Tag(selectedTag)); }); }; study.addListener("changeTags", () => addTags(study), this); diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js index da49db7f0a4..67194c84418 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationUI.js @@ -22,6 +22,7 @@ qx.Class.define("osparc.notification.NotificationUI", { this.base(arguments); this.set({ + margin: 4, maxWidth: this.self().MAX_WIDTH, padding: this.self().PADDING, cursor: "pointer" @@ -216,9 +217,14 @@ qx.Class.define("osparc.notification.NotificationUI", { } }); - notification.bind("read", this, "backgroundColor", { - converter: read => read ? "background-main-3" : "background-main-4" - }); + const highlight = mouseOn => { + this.set({ + backgroundColor: mouseOn ? "strong-main" : "transparent" + }) + }; + this.addListener("mouseover", () => highlight(true)); + this.addListener("mouseout", () => highlight(false)); + highlight(false); }, __notificationTapped: function() { diff --git a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js index 34757474f64..c59a8a94a4c 100644 --- a/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js +++ b/services/static-webserver/client/source/class/osparc/notification/NotificationsContainer.js @@ -27,9 +27,14 @@ qx.Class.define("osparc.notification.NotificationsContainer", { zIndex: osparc.utils.Utils.FLOATING_Z_INDEX, maxWidth: osparc.notification.NotificationUI.MAX_WIDTH, maxHeight: 250, - backgroundColor: "background-main-3", + backgroundColor: "background-main", decorator: "rounded", }); + let color = qx.theme.manager.Color.getInstance().resolve("text"); + color = qx.util.ColorUtil.stringToRgb(color); + color.push(0.3); // add transparency + color = qx.util.ColorUtil.rgbToRgbString(color); + osparc.utils.Utils.addBorder(this, 1, color); osparc.utils.Utils.setIdToWidget(this, "notificationsContainer"); const root = qx.core.Init.getApplication().getRoot(); diff --git a/services/static-webserver/client/source/class/osparc/store/Folders.js b/services/static-webserver/client/source/class/osparc/store/Folders.js index 7deb66618bb..d6e83d8fb23 100644 --- a/services/static-webserver/client/source/class/osparc/store/Folders.js +++ b/services/static-webserver/client/source/class/osparc/store/Folders.js @@ -172,6 +172,7 @@ qx.Class.define("osparc.store.Folders", { __addToCache: function(folderData) { let folder = this.foldersCached.find(f => f.getFolderId() === folderData["folderId"] && f.getWorkspaceId() === folderData["workspaceId"]); if (folder) { + const props = Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.Folder)); // put Object.keys(folderData).forEach(key => { if (key === "createdAt") { @@ -180,7 +181,7 @@ qx.Class.define("osparc.store.Folders", { folder.set("lastModified", new Date(folderData["modifiedAt"])); } else if (key === "trashedAt") { folder.set("trashedAt", new Date(folderData["trashedAt"])); - } else { + } else if (props.includes(key)) { folder.set(key, folderData[key]); } }); diff --git a/services/static-webserver/client/source/class/osparc/store/Services.js b/services/static-webserver/client/source/class/osparc/store/Services.js index f6851b3aa43..c2abeed32ec 100644 --- a/services/static-webserver/client/source/class/osparc/store/Services.js +++ b/services/static-webserver/client/source/class/osparc/store/Services.js @@ -44,7 +44,11 @@ qx.Class.define("osparc.store.Services", { resolve(servicesObj); }) - .catch(err => console.error("getServices failed", err)); + .catch(err => { + const msg = err.message || qx.locale.Manager.tr("Unable to fetch Services"); + osparc.FlashMessenger.getInstance().logAs(msg, "ERROR"); + console.error(err); + }); }); }, diff --git a/services/static-webserver/client/source/class/osparc/store/Tags.js b/services/static-webserver/client/source/class/osparc/store/Tags.js new file mode 100644 index 00000000000..4ffd9f5cd4f --- /dev/null +++ b/services/static-webserver/client/source/class/osparc/store/Tags.js @@ -0,0 +1,132 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2024 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +qx.Class.define("osparc.store.Tags", { + extend: qx.core.Object, + type: "singleton", + + construct: function() { + this.base(arguments); + + this.tagsCached = []; + }, + + events: { + "tagAdded": "qx.event.type.Data", + "tagRemoved": "qx.event.type.Data", + }, + + members: { + tagsCached: null, + + fetchTags: function() { + if (osparc.auth.Data.getInstance().isGuest()) { + return new Promise(resolve => { + resolve([]); + }); + } + + return osparc.data.Resources.get("tags") + .then(tagsData => { + const tags = []; + tagsData.forEach(tagData => { + const tag = this.__addToCache(tagData); + tags.push(tag); + }); + return tags; + }); + }, + + getTags: function() { + return this.tagsCached; + }, + + postTag: function(newTagData) { + const params = { + data: newTagData + }; + return osparc.data.Resources.getInstance().fetch("tags", "post", params) + .then(tagData => { + const tag = this.__addToCache(tagData); + this.fireDataEvent("tagAdded", tag); + return tag; + }); + }, + + deleteTag: function(tagId) { + const params = { + url: { + tagId + } + }; + return osparc.data.Resources.getInstance().fetch("tags", "delete", params) + .then(() => { + const tag = this.getTag(tagId); + if (tag) { + this.__deleteFromCache(tagId); + this.fireDataEvent("tagRemoved", tag); + } + }) + .catch(console.error); + }, + + putTag: function(tagId, updateData) { + const params = { + url: { + tagId + }, + data: updateData + }; + return osparc.data.Resources.getInstance().fetch("tags", "put", params) + .then(tagData => { + return this.__addToCache(tagData); + }) + .catch(console.error); + }, + + getTag: function(tagId = null) { + return this.tagsCached.find(f => f.getTagId() === tagId); + }, + + __addToCache: function(tagData) { + let tag = this.tagsCached.find(f => f.getTagId() === tagData["id"]); + if (tag) { + const props = Object.keys(qx.util.PropertyUtil.getProperties(osparc.data.model.Tag)); + // put + Object.keys(tagData).forEach(key => { + if (props.includes(key)) { + tag.set(key, tagData[key]); + } + }); + } else { + // get and post + tag = new osparc.data.model.Tag(tagData); + this.tagsCached.unshift(tag); + } + return tag; + }, + + __deleteFromCache: function(tagId) { + const idx = this.tagsCached.findIndex(f => f.getTagId() === tagId); + if (idx > -1) { + this.tagsCached.splice(idx, 1); + return true; + } + return false; + } + } +}); diff --git a/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js b/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js index 4b23fc0efde..64930674e25 100644 --- a/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js +++ b/services/static-webserver/client/source/class/osparc/ui/basic/Tag.js @@ -13,17 +13,19 @@ qx.Class.define("osparc.ui.basic.Tag", { extend: qx.ui.basic.Label, /** * Constructor for the Tag element. - * @param {String} value Short text to be shown on the tag - * @param {String} color Color for the background, must be in hex3 or hex6 form + * @param {osparc.data.model.Tag} tag Short text to be shown on the tag * @param {String} [filterGroupId] If present, clicking on the tab will dispatch a bus message with the * id ``GroupIdTagsTrigger`` to be subscribed by a filter. */ - construct: function(value, color, filterGroupId) { - this.base(arguments, value); - this.setFont("text-11"); - if (color) { - this.setColor(color); + construct: function(tag, filterGroupId) { + this.base(arguments); + + if (tag) { + tag.bind("name", this, "value"); + tag.bind("color", this, "color"); } + this.setFont("text-11"); + if (filterGroupId) { this.setCursor("pointer"); this.addListener("tap", e => {