Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into update-master-20
Browse files Browse the repository at this point in the history
  • Loading branch information
sanderegg committed Nov 14, 2024
2 parents f335bca + cb74ff7 commit db084cd
Show file tree
Hide file tree
Showing 240 changed files with 5,492 additions and 2,466 deletions.
19 changes: 18 additions & 1 deletion .env-devel
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@ AGENT_VOLUMES_CLEANUP_S3_ENDPOINT=http://172.17.0.1:9001
AGENT_VOLUMES_CLEANUP_S3_PROVIDER=MINIO
AGENT_VOLUMES_CLEANUP_S3_REGION=us-east-1
AGENT_VOLUMES_CLEANUP_S3_SECRET_KEY=12345678
AGENT_TRACING={}

API_SERVER_DEV_FEATURES_ENABLED=0
API_SERVER_LOGLEVEL=INFO
API_SERVER_PROFILING=1
API_SERVER_TRACING={}
TRAEFIK_API_SERVER_INFLIGHTREQ_AMOUNT=25

AUTOSCALING_DASK=null
Expand All @@ -33,6 +35,7 @@ AUTOSCALING_LOGLEVEL=INFO
AUTOSCALING_NODES_MONITORING=null
AUTOSCALING_POLL_INTERVAL="00:00:10"
AUTOSCALING_SSM_ACCESS=null
AUTOSCALING_TRACING={}

AWS_S3_CLI_S3=null

Expand All @@ -44,6 +47,7 @@ CATALOG_PORT=8000
CATALOG_PROFILING=1
CATALOG_SERVICES_DEFAULT_RESOURCES='{"CPU": {"limit": 0.1, "reservation": 0.1}, "RAM": {"limit": 2147483648, "reservation": 2147483648}}'
CATALOG_SERVICES_DEFAULT_SPECIFICATIONS='{}'
CATALOG_TRACING={}

CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest
Expand All @@ -57,6 +61,7 @@ CLUSTERS_KEEPER_MAX_MISSED_HEARTBEATS_BEFORE_CLUSTER_TERMINATION=5
CLUSTERS_KEEPER_PRIMARY_EC2_INSTANCES=null
CLUSTERS_KEEPER_TASK_INTERVAL=30
CLUSTERS_KEEPER_WORKERS_EC2_INSTANCES=null
CLUSTERS_KEEPER_TRACING={}

DASK_SCHEDULER_HOST=dask-scheduler
DASK_SCHEDULER_PORT=8786
Expand All @@ -70,6 +75,7 @@ DIRECTOR_HOST=director
DIRECTOR_PORT=8080
DIRECTOR_REGISTRY_CACHING_TTL=900
DIRECTOR_REGISTRY_CACHING=True
DIRECTOR_TRACING={}

EFS_USER_ID=8006
EFS_USER_NAME=efs
Expand All @@ -79,6 +85,10 @@ EFS_DNS_NAME=fs-xxx.efs.us-east-1.amazonaws.com
EFS_MOUNTED_PATH=/tmp/efs
EFS_PROJECT_SPECIFIC_DATA_DIRECTORY=project-specific-data
EFS_ONLY_ENABLED_FOR_USERIDS=[]
EFS_GUARDIAN_TRACING={}

# DATCORE_ADAPTER
DATCORE_ADAPTER_TRACING={}

# DIRECTOR_V2 ----
COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_ca_file":"/home/scu/.dask/dask-crt.pem","tls_client_cert":"/home/scu/.dask/dask-crt.pem","tls_client_key":"/home/scu/.dask/dask-key.pem"}'
Expand All @@ -105,10 +115,13 @@ DYNAMIC_SIDECAR_LOG_LEVEL=DEBUG
DYNAMIC_SIDECAR_PROMETHEUS_MONITORING_NETWORKS=[]
DYNAMIC_SIDECAR_PROMETHEUS_SERVICE_LABELS={}
DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT=01:00:00
# DIRECTOR_V2 ----
DIRECTOR_V2_TRACING={}

# DYNAMIC_SCHEDULER ----
DYNAMIC_SCHEDULER_LOGLEVEL=DEBUG
DYNAMIC_SCHEDULER_PROFILING=1
DYNAMIC_SCHEDULER_STOP_SERVICE_TIMEOUT=01:00:00
DYNAMIC_SCHEDULER_TRACING={}

FUNCTION_SERVICES_AUTHORS='{"UN": {"name": "Unknown", "email": "[email protected]", "affiliation": "unknown"}}'

Expand All @@ -122,6 +135,7 @@ INVITATIONS_PORT=8000
INVITATIONS_SECRET_KEY='REPLACE_ME_with_result__Fernet_generate_key='
INVITATIONS_SWAGGER_API_DOC_ENABLED=1
INVITATIONS_USERNAME=admin
INVITATIONS_TRACING={}

LOG_FORMAT_LOCAL_DEV_ENABLED=1
LOG_FILTER_MAPPING='{}'
Expand All @@ -146,6 +160,7 @@ PAYMENTS_STRIPE_API_SECRET='REPLACE_ME_with_api_secret'
PAYMENTS_STRIPE_URL=https://api.stripe.com
PAYMENTS_SWAGGER_API_DOC_ENABLED=1
PAYMENTS_USERNAME=admin
PAYMENTS_TRACING={}

POSTGRES_DB=simcoredb
POSTGRES_ENDPOINT=postgres:5432
Expand Down Expand Up @@ -185,6 +200,7 @@ RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_CHECK_ENABLED=1
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_COUNTER_FAIL=6
RESOURCE_USAGE_TRACKER_MISSED_HEARTBEAT_INTERVAL_SEC=300
RESOURCE_USAGE_TRACKER_S3=null
RESOURCE_USAGE_TRACKER_TRACING={}

# NOTE: 172.17.0.1 is the docker0 interface, which redirect from inside a container onto the host network interface.
R_CLONE_OPTION_BUFFER_SIZE=0M
Expand Down Expand Up @@ -218,6 +234,7 @@ STORAGE_HOST=storage
STORAGE_LOGLEVEL=INFO
STORAGE_PORT=8080
STORAGE_PROFILING=1
STORAGE_TRACING={}
# STORAGE ----

SWARM_STACK_NAME=master-simcore
Expand Down
30 changes: 27 additions & 3 deletions api/specs/web-server/_folders.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from models_library.workspaces import WorkspaceID
from pydantic import Json
from simcore_service_webserver._meta import API_VTAG
from simcore_service_webserver.folders._folders_handlers import FoldersPathParams
from simcore_service_webserver.folders._models import FolderFilters, FoldersPathParams

router = APIRouter(
prefix=f"/{API_VTAG}",
Expand All @@ -30,8 +30,6 @@
],
)

### Folders


@router.post(
"/folders",
Expand All @@ -57,6 +55,32 @@ async def list_folders(
example='{"field": "name", "direction": "desc"}',
),
] = '{"field": "modified_at", "direction": "desc"}',
filters: Annotated[
Json | None,
Query(description=FolderFilters.schema_json(indent=1)),
] = None,
):
...


@router.get(
"/folders:search",
response_model=Envelope[list[FolderGet]],
)
async def list_folders_full_search(
params: Annotated[PageQueryParameters, Depends()],
text: str | None = None,
order_by: Annotated[
Json,
Query(
description="Order by field (modified_at|name|description) and direction (asc|desc). The default sorting order is ascending.",
example='{"field": "name", "direction": "desc"}',
),
] = '{"field": "modified_at", "direction": "desc"}',
filters: Annotated[
Json | None,
Query(description=FolderFilters.schema_json(indent=1)),
] = None,
):
...

Expand Down
6 changes: 5 additions & 1 deletion api/specs/web-server/_projects_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from simcore_service_webserver.projects._common_models import ProjectPathParams
from simcore_service_webserver.projects._crud_handlers import ProjectCreateParams
from simcore_service_webserver.projects._crud_handlers_models import (
ProjectFilters,
ProjectListFullSearchParams,
ProjectListParams,
)
Expand Down Expand Up @@ -83,7 +84,10 @@ async def list_projects(
example='{"field": "last_change_date", "direction": "desc"}',
),
] = '{"field": "last_change_date", "direction": "desc"}',
filters: Annotated[Json | None, Query()] = None,
filters: Annotated[
Json | None,
Query(description=ProjectFilters.schema_json(indent=1)),
] = None,
):
...

Expand Down
41 changes: 39 additions & 2 deletions api/specs/web-server/_trash.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,14 @@

from fastapi import APIRouter, Depends, status
from simcore_service_webserver._meta import API_VTAG
from simcore_service_webserver.projects._trash_handlers import (
ProjectPathParams,
from simcore_service_webserver.folders._models import (
FoldersPathParams,
RemoveQueryParams,
)
from simcore_service_webserver.projects._trash_handlers import ProjectPathParams
from simcore_service_webserver.projects._trash_handlers import (
RemoveQueryParams as RemoveQueryParams_duplicated,
)

router = APIRouter(
prefix=f"/{API_VTAG}",
Expand Down Expand Up @@ -59,3 +63,36 @@ def untrash_project(
_p: Annotated[ProjectPathParams, Depends()],
):
...


_extra_tags = ["folders"]


@router.post(
"/folders/{folder_id}:trash",
tags=_extra_tags,
status_code=status.HTTP_204_NO_CONTENT,
responses={
status.HTTP_404_NOT_FOUND: {"description": "Not such a folder"},
status.HTTP_409_CONFLICT: {
"description": "One or more projects is in use and cannot be trashed"
},
status.HTTP_503_SERVICE_UNAVAILABLE: {"description": "Trash service error"},
},
)
def trash_folder(
_p: Annotated[FoldersPathParams, Depends()],
_q: Annotated[RemoveQueryParams_duplicated, Depends()],
):
...


@router.post(
"/folders/{folder_id}:untrash",
tags=_extra_tags,
status_code=status.HTTP_204_NO_CONTENT,
)
def untrash_folder(
_p: Annotated[FoldersPathParams, Depends()],
):
...
2 changes: 0 additions & 2 deletions packages/aws-library/requirements/_base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,6 @@ arrow==1.3.0
# -r requirements/../../../packages/service-library/requirements/../../../packages/models-library/requirements/_base.in
# -r requirements/../../../packages/service-library/requirements/_base.in
# -r requirements/_base.in
async-timeout==4.0.3
# via redis
attrs==24.2.0
# via
# aiohttp
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class FolderGet(OutputSchema):
description: str
created_at: datetime
modified_at: datetime
trashed_at: datetime | None
owner: GroupID
my_access_rights: AccessRights
access_rights: dict[GroupID, AccessRights]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class FolderGet(OutputSchema):
name: str
created_at: datetime
modified_at: datetime
trashed_at: datetime | None
owner: GroupID
workspace_id: WorkspaceID | None
my_access_rights: AccessRights
Expand Down
40 changes: 39 additions & 1 deletion packages/models-library/src/models_library/folders.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,42 @@
from datetime import datetime
from enum import auto
from typing import TypeAlias

from pydantic import BaseModel, ConfigDict, Field, PositiveInt
from pydantic import BaseModel, ConfigDict, Field, PositiveInt, field_validator

from .access_rights import AccessRights
from .users import GroupID, UserID
from .utils.enums import StrAutoEnum
from .workspaces import WorkspaceID

FolderID: TypeAlias = PositiveInt


class FolderScope(StrAutoEnum):
ROOT = auto()
SPECIFIC = auto()
ALL = auto()


class FolderQuery(BaseModel):
folder_scope: FolderScope
folder_id: PositiveInt | None = None

@field_validator("folder_id", mode="before")
@classmethod
def validate_folder_id(cls, value, values):
scope = values.get("folder_scope")
if scope == FolderScope.SPECIFIC and value is None:
raise ValueError(
"folder_id must be provided when folder_scope is SPECIFIC."
)
if scope != FolderScope.SPECIFIC and value is not None:
raise ValueError(
"folder_id should be None when folder_scope is not SPECIFIC."
)
return value


#
# DB
#
Expand All @@ -30,7 +58,17 @@ class FolderDB(BaseModel):
...,
description="Timestamp of last modification",
)
trashed_at: datetime | None = Field(
...,
)

user_id: UserID | None
workspace_id: WorkspaceID | None

model_config = ConfigDict(from_attributes=True)


class UserFolderAccessRightsDB(FolderDB):
my_access_rights: AccessRights

model_config = ConfigDict(from_attributes=True)
1 change: 1 addition & 0 deletions packages/models-library/src/models_library/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,5 +182,6 @@ class Project(BaseProjectModel):
default=None,
alias="trashedAt",
)
trashed_explicitly: bool = Field(default=False, alias="trashedExplicitly")

model_config = ConfigDict(title="osparc-simcore project", extra="forbid")
34 changes: 31 additions & 3 deletions packages/models-library/src/models_library/workspaces.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,41 @@
from datetime import datetime
from enum import auto
from typing import TypeAlias

from models_library.access_rights import AccessRights
from models_library.users import GroupID
from pydantic import BaseModel, ConfigDict, Field, PositiveInt
from pydantic import BaseModel, ConfigDict, Field, PositiveInt, field_validator

from .access_rights import AccessRights
from .users import GroupID
from .utils.enums import StrAutoEnum

WorkspaceID: TypeAlias = PositiveInt


class WorkspaceScope(StrAutoEnum):
PRIVATE = auto()
SHARED = auto()
ALL = auto()


class WorkspaceQuery(BaseModel):
workspace_scope: WorkspaceScope
workspace_id: PositiveInt | None = None

@field_validator("workspace_id", mode="before")
@classmethod
def validate_workspace_id(cls, value, values):
scope = values.get("workspace_scope")
if scope == WorkspaceScope.SHARED and value is None:
raise ValueError(
"workspace_id must be provided when workspace_scope is SHARED."
)
if scope != WorkspaceScope.SHARED and value is not None:
raise ValueError(
"workspace_id should be None when workspace_scope is not SHARED."
)
return value


#
# DB
#
Expand Down
Loading

0 comments on commit db084cd

Please sign in to comment.