diff --git a/.gitignore b/.gitignore index 020f0df5..d04e64a2 100644 --- a/.gitignore +++ b/.gitignore @@ -28,6 +28,7 @@ wheels/ .installed.cfg *.egg MANIFEST +pyvenv.cfg # PyInstaller # Usually these files are written by a python script from a template diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 1f66fdb9..6204104d 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,5 +1,6 @@ services: stac-dev: + platform: linux/amd64 image: pc-apis-stac-dev build: context: . @@ -63,6 +64,7 @@ services: depends_on: - stac tiler-dev: + platform: linux/amd64 image: pc-apis-tiler-dev # For Mac OS M1 user, you'll need to add `platform: linux/amd64`. # see https://github.com/developmentseed/titiler/discussions/387#discussioncomment-1643110 diff --git a/docker-compose.yml b/docker-compose.yml index 71e4d413..48528498 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,7 @@ services: stac: + platform: linux/amd64 image: pc-apis-stac build: context: . @@ -22,7 +23,7 @@ services: image: pc-apis-tiler # For Mac OS M1 user, you'll need to add `platform: linux/amd64`. # see https://github.com/developmentseed/titiler/discussions/387#discussioncomment-1643110 - # platform: linux/amd64 + platform: linux/amd64 build: context: . dockerfile: pctiler/Dockerfile @@ -39,10 +40,11 @@ services: - ./pccommon:/opt/src/pccommon depends_on: - database - command: [ "uvicorn", "pctiler.main:app", "--host", "0.0.0.0", "--port", "8082", "--reload", "--proxy-headers" ] + command: [ "uvicorn", "pctiler.main:app", "--host", "0.0.0.0", "--port", "8082", "--reload", "--proxy-headers", "--root-path", "/data" ] funcs: image: pc-apis-funcs + platform: linux/amd64 build: context: . dockerfile: pcfuncs/Dockerfile diff --git a/pccommon/pccommon/cli.py b/pccommon/pccommon/cli.py index b853601b..06c1d376 100644 --- a/pccommon/pccommon/cli.py +++ b/pccommon/pccommon/cli.py @@ -61,12 +61,12 @@ def dump(account: str, table: str, type: str, **kwargs: Any) -> int: if id: col_config = col_config_table.get_config(id) assert col_config - result[id] = col_config.dict() + result[id] = col_config.model_dump() else: for _, collection_id, col_config in col_config_table.get_all(): assert collection_id assert col_config - result[collection_id] = col_config.dict() + result[collection_id] = col_config.model_dump() elif type == "container": con_config_table = ContainerConfigTable.from_environment( @@ -77,11 +77,11 @@ def dump(account: str, table: str, type: str, **kwargs: Any) -> int: assert con_account con_config = con_config_table.get_config(con_account, id) assert con_config - result[f"{con_account}/{id}"] = con_config.dict() + result[f"{con_account}/{id}"] = con_config.model_dump() else: for storage_account, container, con_config in con_config_table.get_all(): assert con_config - result[f"{storage_account}/{container}"] = con_config.dict() + result[f"{storage_account}/{container}"] = con_config.model_dump() else: print(f"Unknown type: {type}") return 1 diff --git a/pccommon/pccommon/config/collections.py b/pccommon/pccommon/config/collections.py index c6ac12b6..bcbc145b 100644 --- a/pccommon/pccommon/config/collections.py +++ b/pccommon/pccommon/config/collections.py @@ -1,12 +1,11 @@ from enum import Enum from typing import Any, Dict, List, Optional, Tuple -import orjson from humps import camelize from pydantic import BaseModel, Field from pccommon.tables import ModelTableService -from pccommon.utils import get_param_str, orjson_dumps +from pccommon.utils import get_param_str class RenderOptionType(str, Enum): @@ -19,11 +18,13 @@ def __str__(self) -> str: class CamelModel(BaseModel): - class Config: - alias_generator = camelize - allow_population_by_field_name = True - json_loads = orjson.loads - json_dumps = orjson_dumps + + model_config = { + # TODO, see if we can use pydantic native function + # https://docs.pydantic.dev/latest/api/config/#pydantic.alias_generators.to_camel + "alias_generator": camelize, + "populate_by_name": True, + } class VectorTileset(CamelModel): @@ -137,10 +138,6 @@ def should_add_collection_links(self) -> bool: def should_add_item_links(self) -> bool: return self.create_links and (not self.hidden) - class Config: - json_loads = orjson.loads - json_dumps = orjson_dumps - class Mosaics(CamelModel): """ @@ -187,11 +184,11 @@ class LegendConfig(CamelModel): showing legend labels as scaled values. """ - type: Optional[str] - labels: Optional[List[str]] - trim_start: Optional[int] - trim_end: Optional[int] - scale_factor: Optional[float] + type: Optional[str] = None + labels: Optional[List[str]] = None + trim_start: Optional[int] = None + trim_end: Optional[int] = None + scale_factor: Optional[float] = None class VectorTileOptions(CamelModel): @@ -216,10 +213,10 @@ class VectorTileOptions(CamelModel): tilejson_key: str source_layer: str - fill_color: Optional[str] - stroke_color: Optional[str] - stroke_width: Optional[int] - filter: Optional[List[Any]] + fill_color: Optional[str] = None + stroke_color: Optional[str] = None + stroke_width: Optional[int] = None + filter: Optional[List[Any]] = None class RenderOptionCondition(CamelModel): @@ -329,10 +326,6 @@ class CollectionConfig(BaseModel): render_config: DefaultRenderConfig mosaic_info: MosaicInfo - class Config: - json_loads = orjson.loads - json_dumps = orjson_dumps - class CollectionConfigTable(ModelTableService[CollectionConfig]): _model = CollectionConfig diff --git a/pccommon/pccommon/config/containers.py b/pccommon/pccommon/config/containers.py index a40875b2..045640a1 100644 --- a/pccommon/pccommon/config/containers.py +++ b/pccommon/pccommon/config/containers.py @@ -1,18 +1,20 @@ from typing import Optional -import orjson from pydantic import BaseModel from pccommon.tables import ModelTableService -from pccommon.utils import orjson_dumps class ContainerConfig(BaseModel): has_cdn: bool = False - class Config: - json_loads = orjson.loads - json_dumps = orjson_dumps + # json_loads/json_dumps config have been removed + # the authors seems to indicate that parsing/serialization + # in Rust (pydantic-core) is fast (but maybe not as fast as orjson) + # https://github.com/pydantic/pydantic/discussions/6388 + # class Config: + # json_loads = orjson.loads + # json_dumps = orjson_dumps class ContainerConfigTable(ModelTableService[ContainerConfig]): diff --git a/pccommon/pccommon/config/core.py b/pccommon/pccommon/config/core.py index 7e9f8d6f..d3049b33 100644 --- a/pccommon/pccommon/config/core.py +++ b/pccommon/pccommon/config/core.py @@ -4,7 +4,8 @@ from cachetools import Cache, LRUCache, cachedmethod from cachetools.func import lru_cache from cachetools.keys import hashkey -from pydantic import BaseModel, BaseSettings, Field, PrivateAttr, validator +from pydantic import BaseModel, Field, PrivateAttr, field_validator +from pydantic_settings import BaseSettings from pccommon.config.collections import CollectionConfigTable from pccommon.config.containers import ContainerConfigTable @@ -23,7 +24,7 @@ class TableConfig(BaseModel): table_name: str account_url: Optional[str] = None - @validator("account_url") + @field_validator("account_url") def validate_url(cls, value: str) -> str: if value and not value.startswith("http://azurite:"): raise ValueError( @@ -39,7 +40,7 @@ class PCAPIsConfig(BaseSettings): app_insights_instrumentation_key: Optional[str] = Field( # type: ignore default=None, - env=APP_INSIGHTS_INSTRUMENTATION_KEY, + validation_alias=APP_INSIGHTS_INSTRUMENTATION_KEY, ) collection_config: TableConfig container_config: TableConfig @@ -55,6 +56,15 @@ class PCAPIsConfig(BaseSettings): debug: bool = False + model_config = { + "env_prefix": ENV_VAR_PCAPIS_PREFIX, + "env_nested_delimiter": "__", + # Mypy is complaining about this with + # error: Incompatible types (expression has type "str", + # TypedDict item "extra" has type "Extra") + "extra": "ignore", # type: ignore + } + @cachedmethod(cache=lambda self: self._cache, key=lambda _: hashkey("collection")) def get_collection_config_table(self) -> CollectionConfigTable: return CollectionConfigTable.from_environment( @@ -86,8 +96,3 @@ def get_ip_exception_list_table(self) -> IPExceptionListTable: @lru_cache(maxsize=1) def from_environment(cls) -> "PCAPIsConfig": return PCAPIsConfig() # type: ignore - - class Config: - env_prefix = ENV_VAR_PCAPIS_PREFIX - extra = "ignore" - env_nested_delimiter = "__" diff --git a/pccommon/pccommon/logging.py b/pccommon/pccommon/logging.py index 1b7e6e2b..01aa2125 100644 --- a/pccommon/pccommon/logging.py +++ b/pccommon/pccommon/logging.py @@ -60,6 +60,10 @@ def filter(self, record: logging.LogRecord) -> bool: # Prevent successful health check pings from being logged class HealthCheckFilter(logging.Filter): + def __init__(self, app_root_path: str): + super().__init__() + self.app_root_path = app_root_path + def filter(self, record: logging.LogRecord) -> bool: if record.args is not None and len(record.args) != 5: return True @@ -67,7 +71,7 @@ def filter(self, record: logging.LogRecord) -> bool: args = cast(Tuple[str, str, str, str, int], record.args) endpoint = args[2] status = args[4] - if endpoint == "/_mgmt/ping" and status == 200: + if f"{self.app_root_path}/_mgmt/ping" == endpoint and status == 200: return False return True @@ -75,11 +79,11 @@ def filter(self, record: logging.LogRecord) -> bool: # Initialize logging, including a console handler, and sending all logs containing # custom_dimensions to Application Insights -def init_logging(service_name: str) -> None: +def init_logging(service_name: str, app_root_path: str) -> None: config = get_apis_config() # Exclude health check endpoint pings from the uvicorn logs - logging.getLogger("uvicorn.access").addFilter(HealthCheckFilter()) + logging.getLogger("uvicorn.access").addFilter(HealthCheckFilter(app_root_path)) # Setup logging for current package and pccommon for package in [PACKAGES[service_name], "pccommon"]: diff --git a/pccommon/pccommon/tables.py b/pccommon/pccommon/tables.py index 2a523745..006bb6a1 100644 --- a/pccommon/pccommon/tables.py +++ b/pccommon/pccommon/tables.py @@ -45,8 +45,10 @@ class TableError(Exception): pass +# TODO: mypy is complaining locally about +# "BaseModel" has no attribute "model_dump_json" def encode_model(m: BaseModel) -> str: - return m.json() + return m.model_dump_json() # type: ignore def decode_dict(s: str) -> Dict[str, Any]: diff --git a/pccommon/pccommon/tracing.py b/pccommon/pccommon/tracing.py index 8e28e6c0..bd4cc7e6 100644 --- a/pccommon/pccommon/tracing.py +++ b/pccommon/pccommon/tracing.py @@ -3,7 +3,6 @@ import re from typing import List, Optional, Tuple, Union, cast -import fastapi from fastapi import Request from opencensus.ext.azure.trace_exporter import AzureExporter from opencensus.trace import execution_context @@ -211,7 +210,7 @@ def _iter_cql(cql: dict, property_name: str) -> Optional[Union[str, List[str]]]: return None -def add_stac_attributes_from_search(search_json: str, request: fastapi.Request) -> None: +def add_stac_attributes_from_search(search_json: str, request: Request) -> None: """ Try to add the Collection ID and Item ID from a search to the current span. """ diff --git a/pccommon/requirements.txt b/pccommon/requirements.txt index 475c2d07..e1832b0b 100644 --- a/pccommon/requirements.txt +++ b/pccommon/requirements.txt @@ -4,11 +4,13 @@ # # pip-compile --extra=server --output-file=pccommon/requirements.txt ./pccommon/setup.py # -anyio==4.3.0 +annotated-types==0.7.0 + # via pydantic +anyio==4.4.0 # via starlette async-timeout==4.0.3 # via redis -azure-core==1.30.1 +azure-core==1.30.2 # via # azure-data-tables # azure-identity @@ -28,27 +30,27 @@ cachetools==5.3.3 # via # google-auth # pccommon (pccommon/setup.py) -certifi==2024.2.2 +certifi==2024.7.4 # via requests cffi==1.16.0 # via cryptography charset-normalizer==3.3.2 # via requests -cryptography==42.0.5 +cryptography==42.0.8 # via # azure-identity # azure-storage-blob # msal # pyjwt -exceptiongroup==1.2.0 +exceptiongroup==1.2.1 # via anyio -fastapi==0.90.1 +fastapi-slim==0.111.0 # via pccommon (pccommon/setup.py) -google-api-core==2.18.0 +google-api-core==2.19.0 # via opencensus -google-auth==2.29.0 +google-auth==2.30.0 # via google-api-core -googleapis-common-protos==1.63.0 +googleapis-common-protos==1.63.1 # via google-api-core html-sanitizer==2.4.4 # via pccommon (pccommon/setup.py) @@ -62,7 +64,7 @@ isodate==0.6.1 # via # azure-data-tables # azure-storage-blob -lxml==5.2.1 +lxml==5.2.2 # via # html-sanitizer # lxml-html-clean @@ -70,11 +72,11 @@ lxml-html-clean==0.1.0 # via # html-sanitizer # pccommon (pccommon/setup.py) -msal==1.28.0 +msal==1.28.1 # via # azure-identity # msal-extensions -msal-extensions==0.3.1 +msal-extensions==1.1.0 # via azure-identity multidict==6.0.5 # via yarl @@ -88,8 +90,10 @@ opencensus-ext-azure==1.1.13 # via pccommon (pccommon/setup.py) opencensus-ext-logging==0.1.1 # via pccommon (pccommon/setup.py) -orjson==3.10.4 +orjson==3.10.5 # via pccommon (pccommon/setup.py) +packaging==24.1 + # via msal-extensions portalocker==2.8.2 # via msal-extensions proto-plus==1.23.0 @@ -101,22 +105,29 @@ protobuf==4.25.3 # proto-plus psutil==5.9.8 # via opencensus-ext-azure -pyasn1==0.5.1 +pyasn1==0.6.0 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 +pyasn1-modules==0.4.0 # via google-auth -pycparser==2.21 +pycparser==2.22 # via cffi -pydantic==1.10.14 +pydantic==2.7.4 # via - # fastapi + # fastapi-slim # pccommon (pccommon/setup.py) + # pydantic-settings +pydantic-core==2.18.4 + # via pydantic +pydantic-settings==2.3.3 + # via pccommon (pccommon/setup.py) pyhumps==3.5.3 # via pccommon (pccommon/setup.py) pyjwt[crypto]==2.8.0 # via msal +python-dotenv==1.0.1 + # via pydantic-settings redis==4.6.0 # via pccommon (pccommon/setup.py) requests==2.32.3 @@ -137,19 +148,21 @@ sniffio==1.3.1 # via anyio soupsieve==2.5 # via beautifulsoup4 -starlette==0.22.0 +starlette==0.37.2 # via - # fastapi + # fastapi-slim # pccommon (pccommon/setup.py) types-cachetools==4.2.9 # via pccommon (pccommon/setup.py) -typing-extensions==4.10.0 +typing-extensions==4.12.2 # via # anyio # azure-core # azure-data-tables # azure-storage-blob + # fastapi-slim # pydantic + # pydantic-core # starlette urllib3==2.2.2 # via diff --git a/pccommon/setup.py b/pccommon/setup.py index 71d596ea..66e771f0 100644 --- a/pccommon/setup.py +++ b/pccommon/setup.py @@ -4,15 +4,16 @@ # Runtime requirements. inst_reqs = [ - "fastapi==0.90.1", - "starlette>=0.22.0,<0.23.0", + "fastapi-slim==0.111.0", + "starlette>=0.37.2,<0.38.0", "opencensus-ext-azure==1.1.13", "opencensus-ext-logging==0.1.1", "orjson>=3.10.4", "azure-identity==1.16.1", "azure-data-tables==12.5.0", "azure-storage-blob>=12.20.0", - "pydantic>=1.10, <2.0.0", + "pydantic>=2.7,<2.8.0", + "pydantic-settings>=2.3,<2.4", "cachetools~=5.3", "types-cachetools==4.2.9", "pyhumps==3.5.3", @@ -22,12 +23,11 @@ "html-sanitizer==2.4.4", # Soon available as lxml[html_clean] "lxml_html_clean==0.1.0", - "urllib3>=1.26.18", + "urllib3>=2.2.2", ] extra_reqs = { - "test": ["pytest", "pytest-asyncio", "types-redis", "types-requests"], - "dev": ["pytest", "pytest-asyncio", "types-redis"], + "dev": ["pytest", "pytest-asyncio", "types-redis", "types-requests"], } setup( diff --git a/pccommon/tests/config/test_mosaic_info.py b/pccommon/tests/config/test_mosaic_info.py index c3c427e1..3eebdcdf 100644 --- a/pccommon/tests/config/test_mosaic_info.py +++ b/pccommon/tests/config/test_mosaic_info.py @@ -38,8 +38,8 @@ def test_parse() -> None: ], "defaultLocation": {"zoom": 8, "coordinates": [47.1113, -120.8578]}, } - model = MosaicInfo.parse_obj(d) - serialized = model.dict(by_alias=True, exclude_unset=True) + model = MosaicInfo.model_validate(d) + serialized = model.model_dump(by_alias=True, exclude_unset=True) assert d == serialized @@ -122,7 +122,7 @@ def test_parse_with_legend() -> None: "defaultLocation": {"zoom": 10, "coordinates": [24.21647, 91.015209]}, } - model = MosaicInfo.parse_obj(d) - serialized = model.dict(by_alias=True, exclude_unset=True) + model = MosaicInfo.model_validate(d) + serialized = model.model_dump(by_alias=True, exclude_unset=True) assert d == serialized diff --git a/pccommon/tests/config/test_render_config.py b/pccommon/tests/config/test_render_config.py index 6cd34240..43ee6698 100644 --- a/pccommon/tests/config/test_render_config.py +++ b/pccommon/tests/config/test_render_config.py @@ -74,12 +74,12 @@ def test_get_render_config() -> None: def test_render_config_parse_max_items() -> None: config = { - "render_params": [], + "render_params": {}, "minzoom": 8, "max_items_per_tile": 10, } - parsed = DefaultRenderConfig.parse_obj(config) + parsed = DefaultRenderConfig.model_validate(config) assert parsed.max_items_per_tile == config["max_items_per_tile"] diff --git a/pccommon/tests/test_timeouts.py b/pccommon/tests/test_timeouts.py index b32ce056..a357c0d4 100644 --- a/pccommon/tests/test_timeouts.py +++ b/pccommon/tests/test_timeouts.py @@ -3,9 +3,7 @@ import pytest from fastapi import FastAPI - -# from fastapi.responses import PlainTextResponse -from httpx import AsyncClient +from httpx import ASGITransport, AsyncClient from starlette.status import HTTP_504_GATEWAY_TIMEOUT from pccommon.middleware import add_timeout @@ -13,29 +11,28 @@ TIMEOUT_SECONDS = 2 BASE_URL = "http://test" -# Setup test app and endpoints to test middleware on -# ================================== - -app = FastAPI() -app.state.service_name = "test" - - -@app.get("/asleep") -async def asleep() -> Any: - await asyncio.sleep(1) - return {} - -# Run this after registering the routes +@pytest.mark.asyncio +async def test_add_timeout() -> None: -add_timeout(app, timeout_seconds=0.001) + # Setup test app and endpoints to test middleware on + # ================================== + app = FastAPI() + app.state.service_name = "test" -@pytest.mark.asyncio -async def test_add_timeout() -> None: + @app.get("/asleep") + async def asleep() -> Any: + await asyncio.sleep(1) + return {} - client = AsyncClient(app=app, base_url=BASE_URL) + # Run this after registering the routes + add_timeout(app, timeout_seconds=0.001) - response = await client.get("/asleep") + async with AsyncClient( + transport=ASGITransport(app=app), # type: ignore + base_url=BASE_URL, + ) as client: + response = await client.get("/asleep") assert response.status_code == HTTP_504_GATEWAY_TIMEOUT diff --git a/pcfuncs/Dockerfile b/pcfuncs/Dockerfile index 323859d3..888f90ce 100644 --- a/pcfuncs/Dockerfile +++ b/pcfuncs/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/azure-functions/python:4-python3.8 +FROM mcr.microsoft.com/azure-functions/python:4-python3.10 # git required for pip installs from git RUN apt update && apt install -y git diff --git a/pcfuncs/animation/models.py b/pcfuncs/animation/models.py index 28b8a683..39290cda 100644 --- a/pcfuncs/animation/models.py +++ b/pcfuncs/animation/models.py @@ -3,7 +3,7 @@ from dateutil.relativedelta import relativedelta from funclib.models import RenderOptions -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Field, field_validator from .constants import MAX_FRAMES @@ -44,12 +44,12 @@ class AnimationRequest(BaseModel): data_api_url: Optional[str] = None """Override for the data API URL. Useful for testing.""" - @validator("render_params") + @field_validator("render_params") def _validate_render_params(cls, v: str) -> str: RenderOptions.from_query_params(v) return v - @validator("unit") + @field_validator("unit") def _validate_unit(cls, v: str) -> str: if v not in _deltas: raise ValueError( diff --git a/pcfuncs/animation/settings.py b/pcfuncs/animation/settings.py index e9d99d00..7edd2aa4 100644 --- a/pcfuncs/animation/settings.py +++ b/pcfuncs/animation/settings.py @@ -18,9 +18,10 @@ class AnimationSettings(BaseExporterSettings): output_storage_url: str = DEFAULT_ANIMATION_CONTAINER_URL tile_request_concurrency: int = DEFAULT_CONCURRENCY - class Config: - env_prefix = ANIMATION_SETTINGS_PREFIX - env_nested_delimiter = "__" + model_config = { + "env_prefix": ANIMATION_SETTINGS_PREFIX, + "env_nested_delimiter": "__", # type: ignore + } @classmethod @cachedmethod(lambda cls: cls._cache) diff --git a/pcfuncs/funclib/models.py b/pcfuncs/funclib/models.py index 9afeaeba..978d45c7 100644 --- a/pcfuncs/funclib/models.py +++ b/pcfuncs/funclib/models.py @@ -78,7 +78,7 @@ class RenderOptions(BaseModel): @property def encoded_query_string(self) -> str: - options = self.dict( + options = self.model_dump( exclude_defaults=True, exclude_none=True, exclude_unset=True ) encoded_options: List[str] = [] diff --git a/pcfuncs/funclib/settings.py b/pcfuncs/funclib/settings.py index b4a44e23..c34136a1 100644 --- a/pcfuncs/funclib/settings.py +++ b/pcfuncs/funclib/settings.py @@ -2,7 +2,7 @@ from typing import Optional from azure.storage.blob import ContainerClient -from pydantic import BaseSettings +from pydantic_settings import BaseSettings from pccommon.blob import get_container_client diff --git a/pcfuncs/image/__init__.py b/pcfuncs/image/__init__.py index b18c933b..0c692db8 100644 --- a/pcfuncs/image/__init__.py +++ b/pcfuncs/image/__init__.py @@ -9,7 +9,7 @@ from pydantic import ValidationError from .models import ImageRequest, ImageResponse -from .settings import ImageSettings +from .settings import get_settings from .utils import get_min_zoom, upload_image logger = logging.getLogger(__name__) @@ -62,7 +62,7 @@ async def main(req: func.HttpRequest) -> func.HttpResponse: async def handle_request(req: ImageRequest) -> ImageResponse: - settings = ImageSettings.get() + settings = get_settings() geom = req.get_geometry() bbox = Bbox.from_geom(geom) render_options = req.get_render_options() diff --git a/pcfuncs/image/models.py b/pcfuncs/image/models.py index 7079ff16..4bc8c6a3 100644 --- a/pcfuncs/image/models.py +++ b/pcfuncs/image/models.py @@ -2,9 +2,9 @@ from funclib.models import RenderOptions from funclib.raster import ExportFormats -from pydantic import BaseModel, Field, validator +from pydantic import BaseModel, Field, ValidationInfo, field_validator -from .settings import ImageSettings +from .settings import get_settings from .utils import get_geom_from_cql @@ -53,12 +53,14 @@ def get_geometry(self) -> Dict[str, Any]: def get_render_options(self) -> RenderOptions: return RenderOptions.from_query_params(self.render_params) - @validator("geometry") + @field_validator("geometry") def _validate_cql( - cls, v: Optional[Dict[str, Any]], values: Dict[str, Any] + cls, + v: Optional[Dict[str, Any]], + info: ValidationInfo, ) -> Dict[str, Any]: if not v: - cql = values["cql"] + cql = info.data["cql"] v = get_geom_from_cql(cql) if not v: raise ValueError( @@ -67,15 +69,15 @@ def _validate_cql( ) return v - @validator("render_params") + @field_validator("render_params") def _validate_render_params(cls, v: str) -> str: RenderOptions.from_query_params(v) return v - @validator("rows") - def _validate_rows(cls, v: int, values: Dict[str, Any]) -> int: - settings = ImageSettings.get() - cols = int(values["cols"]) + @field_validator("rows") + def _validate_rows(cls, v: int, info: ValidationInfo) -> int: + settings = get_settings() + cols = int(info.data["cols"]) if cols * v > settings.max_pixels: raise ValueError( f"Too many pixels requested: {cols * v} > {settings.max_pixels}. " @@ -83,10 +85,10 @@ def _validate_rows(cls, v: int, values: Dict[str, Any]) -> int: ) return v - @validator("show_branding") - def _validate_show_branding(cls, v: bool, values: Dict[str, Any]) -> bool: + @field_validator("show_branding") + def _validate_show_branding(cls, v: bool, info: ValidationInfo) -> bool: if v: - if values["format"] != ExportFormats.PNG: + if info.data["format"] != ExportFormats.PNG: raise ValueError("Branding is only supported for PNG images.") return v diff --git a/pcfuncs/image/settings.py b/pcfuncs/image/settings.py index becebdd5..368881c4 100644 --- a/pcfuncs/image/settings.py +++ b/pcfuncs/image/settings.py @@ -1,13 +1,8 @@ import logging -import os -from typing import Optional -from azure.storage.blob import ContainerClient -from cachetools import Cache, LRUCache, cachedmethod +from cachetools import LRUCache, cached from funclib.settings import BaseExporterSettings -from pccommon.blob import get_container_client - IMAGE_SETTINGS_PREFIX = "IMAGE_" DEFAULT_CONCURRENCY = 10 @@ -15,40 +10,21 @@ class ImageSettings(BaseExporterSettings): - _cache: Cache = LRUCache(maxsize=100) - tile_request_concurrency: int = DEFAULT_CONCURRENCY # Maximum tiles to fetch for a single request max_tile_count: int = 144 max_pixels: int = 144 * 512 * 512 - def get_container_client(self) -> ContainerClient: - return get_container_client( - self.output_storage_url, - ) - - def get_register_url(self, data_api_url_override: Optional[str] = None) -> str: - return os.path.join( - data_api_url_override or self.api_root_url, "mosaic/register/" - ) - - def get_mosaic_info_url( - self, collection_id: str, data_api_url_override: Optional[str] = None - ) -> str: - return os.path.join( - data_api_url_override or self.api_root_url, - f"mosaic/info?collection={collection_id}", - ) - - class Config: - env_prefix = IMAGE_SETTINGS_PREFIX - env_nested_delimiter = "__" - - @classmethod - @cachedmethod(lambda cls: cls._cache) - def get(cls) -> "ImageSettings": - settings = ImageSettings() # type: ignore - logger.info(f"API URL: {settings.api_root_url}") - logger.info(f"Concurrency limit: {settings.tile_request_concurrency}") - return settings + model_config = { + "env_prefix": IMAGE_SETTINGS_PREFIX, + "env_nested_delimiter": "__", # type: ignore + } + + +@cached(LRUCache(maxsize=100)) # type: ignore +def get_settings() -> ImageSettings: + settings = ImageSettings() # type: ignore + logger.info(f"API URL: {settings.api_root_url}") + logger.info(f"Concurrency limit: {settings.tile_request_concurrency}") + return settings diff --git a/pcfuncs/image/utils.py b/pcfuncs/image/utils.py index 9680cd13..f1e027b5 100644 --- a/pcfuncs/image/utils.py +++ b/pcfuncs/image/utils.py @@ -7,13 +7,13 @@ import aiohttp from funclib.models import RenderOptions -from .settings import ImageSettings +from .settings import get_settings async def get_min_zoom( collection_id: str, data_api_url_override: Optional[str] = None ) -> Optional[int]: - settings = ImageSettings.get() + settings = get_settings() async with aiohttp.ClientSession() as session: resp = await session.get( settings.get_mosaic_info_url(collection_id, data_api_url_override) @@ -32,7 +32,7 @@ async def get_min_zoom( def upload_image(gif: io.BytesIO, collection_name: str) -> str: - settings = ImageSettings.get() + settings = get_settings() filename = f"mspc-{collection_name}-{uuid4().hex}.png" blob_url = os.path.join(settings.output_storage_url, filename) with settings.get_container_client() as container_client: @@ -108,7 +108,7 @@ async def register_search_and_get_tile_url( render_options: RenderOptions, data_api_url_override: Optional[str] = None, ) -> str: - settings = ImageSettings.get() + settings = get_settings() register_url = settings.get_register_url(data_api_url_override) async with aiohttp.ClientSession() as session: diff --git a/pcfuncs/ipban/config.py b/pcfuncs/ipban/config.py index 4261461d..63e5c054 100644 --- a/pcfuncs/ipban/config.py +++ b/pcfuncs/ipban/config.py @@ -1,17 +1,16 @@ # config.py -from pydantic import BaseSettings, Field +from pydantic import Field +from pydantic_settings import BaseSettings class Settings(BaseSettings): - storage_account_url: str = Field(env="STORAGE_ACCOUNT_URL") - banned_ip_table: str = Field(env="BANNED_IP_TABLE") - log_analytics_workspace_id: str = Field(env="LOG_ANALYTICS_WORKSPACE_ID") + storage_account_url: str + banned_ip_table: str + log_analytics_workspace_id: str # Time and threshold settings - time_window_in_hours: int = Field(default=24, env="TIME_WINDOW_IN_HOURS") - threshold_read_count_in_gb: int = Field( - default=5120, env="THRESHOLD_READ_COUNT_IN_GB" - ) + time_window_in_hours: int = Field(default=24) + threshold_read_count_in_gb: int = Field(default=5120) # Create a global settings instance diff --git a/pcfuncs/requirements-deploy.txt b/pcfuncs/requirements-deploy.txt index 38dfa8a3..0dc9275c 100644 --- a/pcfuncs/requirements-deploy.txt +++ b/pcfuncs/requirements-deploy.txt @@ -12,7 +12,7 @@ dateutils==0.6.12 mercantile==1.2.1 pillow==10.3.0 pyproj==3.3.1 -pydantic>=1.9,<2.0.0 +pydantic>=2.7,<2.8 rasterio==1.3.* azure-monitor-query==1.3.0 pytest-mock==3.14.0 diff --git a/pcfuncs/requirements.txt b/pcfuncs/requirements.txt index fdaab225..f0053bf0 100644 --- a/pcfuncs/requirements.txt +++ b/pcfuncs/requirements.txt @@ -12,7 +12,7 @@ dateutils==0.6.12 mercantile==1.2.1 pillow==10.3.0 pyproj==3.3.1 -pydantic>=1.9,<2.0.0 +pydantic>=2.7,<2.8 rasterio==1.3.* azure-monitor-query==1.3.0 pytest-mock==3.14.0 diff --git a/pcstac/pcstac/client.py b/pcstac/pcstac/client.py index 017f6034..3568692f 100644 --- a/pcstac/pcstac/client.py +++ b/pcstac/pcstac/client.py @@ -45,18 +45,6 @@ class PCClient(CoreCrudClient): extra_conformance_classes: List[str] = attr.ib(factory=list) - def conformance_classes(self) -> List[str]: - """Generate conformance classes list.""" - base_conformance_classes = self.base_conformance_classes.copy() - - for extension in self.extensions: - extension_classes = getattr(extension, "conformance_classes", []) - base_conformance_classes.extend(extension_classes) - - base_conformance_classes.extend(self.extra_conformance_classes) - - return sorted(list(set(base_conformance_classes))) - def inject_collection_extras( self, collection: Collection, @@ -227,7 +215,7 @@ async def _fetch() -> ItemCollection: ) return item_collection - search_json = search_request.json() + search_json = search_request.model_dump_json() add_stac_attributes_from_search(search_json, request) logger.info( diff --git a/pcstac/pcstac/config.py b/pcstac/pcstac/config.py index e994042d..6bc23e28 100644 --- a/pcstac/pcstac/config.py +++ b/pcstac/pcstac/config.py @@ -2,7 +2,8 @@ from urllib.parse import urljoin from fastapi import Request -from pydantic import BaseModel, BaseSettings, Field +from pydantic import BaseModel, Field +from pydantic_settings import BaseSettings from stac_fastapi.extensions.core import ( FieldsExtension, FilterExtension, @@ -88,17 +89,38 @@ class Settings(BaseSettings): version of application """ - api = PCAPIsConfig.from_environment() + api: PCAPIsConfig = PCAPIsConfig.from_environment() debug: bool = False - tiler_href: str = Field(env=TILER_HREF_ENV_VAR, default="") - db_max_conn_size: int = Field(env=DB_MAX_CONN_ENV_VAR, default=1) - db_min_conn_size: int = Field(env=DB_MIN_CONN_ENV_VAR, default=1) + tiler_href: str = Field( + default="", + validation_alias=TILER_HREF_ENV_VAR, + ) + db_max_conn_size: int = Field( + default=1, + validation_alias=DB_MAX_CONN_ENV_VAR, + ) + db_min_conn_size: int = Field( + default=1, + validation_alias=DB_MIN_CONN_ENV_VAR, + ) openapi_url: str = "/openapi.json" api_version: str = f"v{API_VERSION}" rate_limits: RateLimits = RateLimits() back_pressures: BackPressures = BackPressures() - request_timeout: int = Field(env=REQUEST_TIMEOUT_ENV_VAR, default=30) + request_timeout: int = Field( + default=30, + validation_alias=REQUEST_TIMEOUT_ENV_VAR, + ) + + model_config = { + "env_prefix": ENV_VAR_PCAPIS_PREFIX, + "env_nested_delimiter": "__", + # Mypi is complaining about this with + # error: Incompatible types (expression has type "str", + # TypedDict item "extra" has type "Extra") + "extra": "ignore", # type: ignore + } def get_tiler_href(self, request: Request) -> str: """Generates the tiler HREF. @@ -113,11 +135,6 @@ def get_tiler_href(self, request: Request) -> str: else: return self.tiler_href - class Config: - env_prefix = ENV_VAR_PCAPIS_PREFIX - extra = "ignore" - env_nested_delimiter = "__" - @lru_cache def get_settings() -> Settings: diff --git a/pcstac/pcstac/filter.py b/pcstac/pcstac/filter.py index 11a9f111..67147bb1 100644 --- a/pcstac/pcstac/filter.py +++ b/pcstac/pcstac/filter.py @@ -2,7 +2,6 @@ from buildpg import render from fastapi import Request -from fastapi.responses import JSONResponse from stac_fastapi.pgstac.extensions.filter import FiltersClient from stac_fastapi.types.errors import NotFoundError @@ -13,7 +12,7 @@ class PCFiltersClient(FiltersClient): async def get_queryables( self, request: Request, collection_id: Optional[str] = None, **kwargs: Any - ) -> JSONResponse: + ) -> Dict[str, Any]: """Override pgstac backend get_queryables to make use of cached results""" async def _fetch() -> Dict: @@ -34,6 +33,4 @@ async def _fetch() -> Dict: return queryables cache_key = f"{CACHE_KEY_QUERYABLES}:{collection_id}" - queryables = await cached_result(_fetch, cache_key, request) - headers = {"Content-Type": "application/schema+json"} - return JSONResponse(queryables, headers=headers) + return await cached_result(_fetch, cache_key, request) diff --git a/pcstac/pcstac/main.py b/pcstac/pcstac/main.py index ad6fc55d..b935cf08 100644 --- a/pcstac/pcstac/main.py +++ b/pcstac/pcstac/main.py @@ -2,16 +2,26 @@ import logging import os -from typing import Any, Dict +from contextlib import asynccontextmanager +from typing import Any, AsyncGenerator, Dict +from brotli_asgi import BrotliMiddleware from fastapi import FastAPI, Request from fastapi.exceptions import RequestValidationError, StarletteHTTPException from fastapi.openapi.utils import get_openapi from fastapi.responses import ORJSONResponse from stac_fastapi.api.errors import DEFAULT_STATUS_CODES -from stac_fastapi.api.models import create_get_request_model, create_post_request_model +from stac_fastapi.api.middleware import ProxyHeaderMiddleware +from stac_fastapi.api.models import ( + create_get_request_model, + create_post_request_model, + create_request_model, +) +from stac_fastapi.extensions.core import TokenPaginationExtension from stac_fastapi.pgstac.config import Settings from stac_fastapi.pgstac.db import close_db_connection, connect_to_db +from stac_fastapi.types.search import APIRequest +from starlette.middleware import Middleware from starlette.middleware.cors import CORSMiddleware from starlette.responses import PlainTextResponse @@ -29,16 +39,19 @@ get_settings, ) from pcstac.errors import PC_DEFAULT_STATUS_CODES -from pcstac.search import PCSearch, PCSearchGetRequest, RedisBaseItemCache +from pcstac.search import ( + PCItemCollectionUri, + PCSearch, + PCSearchGetRequest, + RedisBaseItemCache, +) DEBUG: bool = os.getenv("DEBUG") == "TRUE" or False +APP_ROOT_PATH = os.environ.get("APP_ROOT_PATH", "") # Initialize logging -init_logging(ServiceName.STAC) +init_logging(ServiceName.STAC, APP_ROOT_PATH) logger = logging.getLogger(__name__) - -# Get the root path if set in the environment -APP_ROOT_PATH = os.environ.get("APP_ROOT_PATH", "") logger.info(f"APP_ROOT_PATH: {APP_ROOT_PATH}") hydrate_mode_label = os.environ.get("USE_API_HYDRATE", "False") @@ -46,11 +59,30 @@ app_settings = get_settings() +items_get_request_model: APIRequest = PCItemCollectionUri +if any(isinstance(ext, TokenPaginationExtension) for ext in EXTENSIONS): + items_get_request_model = create_request_model( + model_name="ItemCollectionUri", + base_model=PCItemCollectionUri, + mixins=[TokenPaginationExtension().GET], + request_type="GET", + ) + search_get_request_model = create_get_request_model( EXTENSIONS, base_model=PCSearchGetRequest ) search_post_request_model = create_post_request_model(EXTENSIONS, base_model=PCSearch) + +@asynccontextmanager +async def lifespan(app: FastAPI) -> AsyncGenerator: + """FastAPI Lifespan.""" + await connect_to_db(app) + await connect_to_redis(app) + yield + await close_db_connection(app) + + api = PCStacApi( title=API_TITLE, description=API_DESCRIPTION, @@ -63,11 +95,29 @@ ), client=PCClient.create(post_request_model=search_post_request_model), extensions=EXTENSIONS, - app=FastAPI(root_path=APP_ROOT_PATH, default_response_class=ORJSONResponse), + app=FastAPI( + root_path=APP_ROOT_PATH, + default_response_class=ORJSONResponse, + lifespan=lifespan, + ), + items_get_request_model=items_get_request_model, search_get_request_model=search_get_request_model, search_post_request_model=search_post_request_model, response_class=ORJSONResponse, exceptions={**DEFAULT_STATUS_CODES, **PC_DEFAULT_STATUS_CODES}, + middlewares=[ + Middleware(BrotliMiddleware), + Middleware(ProxyHeaderMiddleware), + Middleware(TraceMiddleware, service_name=ServiceName.STAC), + # Note: If requests are being sent through an application gateway like + # nginx-ingress, you may need to configure CORS through that system. + Middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["GET", "POST"], + allow_headers=["*"], + ), + ], ) app: FastAPI = api.app @@ -76,31 +126,6 @@ add_timeout(app, app_settings.request_timeout) -app.add_middleware(TraceMiddleware, service_name=app.state.service_name) - -# Note: If requests are being sent through an application gateway like -# nginx-ingress, you may need to configure CORS through that system. -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_methods=["GET", "POST"], - allow_headers=["*"], -) - - -@app.on_event("startup") -async def startup_event() -> None: - """Connect to database on startup.""" - await connect_to_db(app) - await connect_to_redis(app) - - -@app.on_event("shutdown") -async def shutdown_event() -> None: - """Close database connection.""" - await close_db_connection(app) - - app.add_exception_handler(Exception, http_exception_handler) diff --git a/pcstac/pcstac/search.py b/pcstac/pcstac/search.py index 931866c7..ce71ba39 100644 --- a/pcstac/pcstac/search.py +++ b/pcstac/pcstac/search.py @@ -1,79 +1,44 @@ import logging -from typing import Any, Callable, Coroutine, Dict, List, Optional, Union +import re +from typing import Any, Callable, Coroutine, Dict, Optional import attr -from geojson_pydantic.geometries import ( - GeometryCollection, - LineString, - MultiLineString, - MultiPoint, - MultiPolygon, - Point, - Polygon, -) -from pydantic import validator -from pydantic.types import conint -from pystac.utils import str_to_datetime +from fastapi import Query +from pydantic import Field, field_validator from stac_fastapi.api.models import BaseSearchGetRequest, ItemCollectionUri from stac_fastapi.pgstac.types.base_item_cache import BaseItemCache from stac_fastapi.pgstac.types.search import PgstacSearch +from stac_fastapi.types.rfc3339 import DateTimeType, str_to_interval from starlette.requests import Request +from typing_extensions import Annotated from pccommon.redis import cached_result from pcstac.contants import CACHE_KEY_BASE_ITEM -DEFAULT_LIMIT = 250 +DEFAULT_LIMIT: int = 250 +LEGACY_ITEM_DEFAULT_LIMIT: int = 10 logger = logging.getLogger(__name__) +def _patch_datetime(value: str) -> str: + values = value.split("/") + for ix, v in enumerate(values): + if re.match(r"^(\d\d\d\d)\-(\d\d)\-(\d\d)$", v): + values[ix] = f"{v}T00:00:00Z" + return "/".join(values) + + class PCSearch(PgstacSearch): # Increase the default limit for performance # Ignore "Illegal type annotation: call expression not allowed" - limit: Optional[conint(ge=1, le=1000)] = DEFAULT_LIMIT # type:ignore - - # Can be removed when - # https://github.com/stac-utils/stac-fastapi/issues/187 is closed - intersects: Optional[ - Union[ - Point, - MultiPoint, - LineString, - MultiLineString, - Polygon, - MultiPolygon, - GeometryCollection, - ] - ] - - @validator("datetime") - def validate_datetime(cls, v: str) -> str: - """Validate datetime. - - Custom to allow for users to supply dates only. - """ - if "/" in v: - values = v.split("/") - else: - # Single date is interpreted as end date - values = ["..", v] - - dates: List[str] = [] - for value in values: - if value == "..": - dates.append(value) - continue - - str_to_datetime(value) - dates.append(value) - - if ".." not in dates: - if str_to_datetime(dates[0]) > str_to_datetime(dates[1]): - raise ValueError( - "Invalid datetime range, must match format (begin_date, end_date)" - ) - - return v + limit: Annotated[Optional[int], Field(strict=True, ge=1, le=1000)] = DEFAULT_LIMIT + + @field_validator("datetime", mode="before") + @classmethod + def validate_datetime_before(cls, value: str) -> str: + """Add HH-MM-SS and Z to YYYY-MM-DD datetime.""" + return _patch_datetime(value) class RedisBaseItemCache(BaseItemCache): @@ -106,9 +71,21 @@ async def _fetch() -> Dict[str, Any]: @attr.s class PCItemCollectionUri(ItemCollectionUri): - limit: Optional[int] = attr.ib(default=DEFAULT_LIMIT) # type:ignore + limit: Annotated[Optional[int], Query()] = attr.ib( + default=LEGACY_ITEM_DEFAULT_LIMIT + ) + + +def patch_and_convert(interval: Optional[str]) -> Optional[DateTimeType]: + """Patch datetime to add hh-mm-ss and timezone info.""" + if interval: + interval = _patch_datetime(interval) + return str_to_interval(interval) @attr.s class PCSearchGetRequest(BaseSearchGetRequest): - limit: Optional[int] = attr.ib(default=DEFAULT_LIMIT) # type:ignore + datetime: Annotated[Optional[DateTimeType], Query()] = attr.ib( + default=None, converter=patch_and_convert + ) + limit: Annotated[Optional[int], Query()] = attr.ib(default=DEFAULT_LIMIT) diff --git a/pcstac/requirements-server.txt b/pcstac/requirements-server.txt index 3be02356..8dd3dbef 100644 --- a/pcstac/requirements-server.txt +++ b/pcstac/requirements-server.txt @@ -4,7 +4,9 @@ # # pip-compile --extra=server --output-file=pcstac/requirements-server.txt ./pcstac/setup.py # -anyio==3.7.1 +annotated-types==0.7.0 + # via pydantic +anyio==4.4.0 # via # starlette # watchfiles @@ -14,8 +16,6 @@ asyncpg==0.29.0 # via stac-fastapi-pgstac attrs==23.2.0 # via - # stac-fastapi-api - # stac-fastapi-extensions # stac-fastapi-pgstac # stac-fastapi-types brotli==1.1.0 @@ -34,13 +34,13 @@ click==8.1.7 # uvicorn dateparser==1.2.0 # via pygeofilter -exceptiongroup==1.2.0 +exceptiongroup==1.2.1 # via anyio -fastapi==0.110.0 +fastapi-slim==0.111.0 # via stac-fastapi-types fire==0.4.0 # via pypgstac -geojson-pydantic==0.6.3 +geojson-pydantic==1.1.0 # via stac-pydantic h11==0.14.0 # via uvicorn @@ -50,7 +50,7 @@ idna==3.7 # via # anyio # pcstac (pcstac/setup.py) -iso8601==1.1.0 +iso8601==2.1.0 # via stac-fastapi-types lark==0.12.0 # via pygeofilter @@ -67,28 +67,28 @@ psycopg-binary==3.1.18 # via psycopg psycopg-pool==3.1.9 # via pypgstac -pydantic[dotenv]==1.10.14 +pydantic==2.7.4 # via - # fastapi + # fastapi-slim # geojson-pydantic + # pydantic-settings # pypgstac - # stac-fastapi-api - # stac-fastapi-extensions # stac-fastapi-pgstac - # stac-fastapi-types # stac-pydantic +pydantic-core==2.18.4 + # via pydantic +pydantic-settings==2.3.3 + # via stac-fastapi-types pygeofilter==0.2.1 # via stac-fastapi-pgstac -pygeoif==1.4.0 +pygeoif==1.5.0 # via pygeofilter -pypgstac[psycopg]==0.7.10 +pypgstac[psycopg]==0.8.6 # via # pcstac (pcstac/setup.py) # stac-fastapi-pgstac pystac==1.10.1 - # via - # pcstac (pcstac/setup.py) - # stac-fastapi-types + # via pcstac (pcstac/setup.py) python-dateutil==2.8.2 # via # dateparser @@ -96,64 +96,67 @@ python-dateutil==2.8.2 # pystac python-dotenv==1.0.1 # via - # pydantic + # pydantic-settings # uvicorn pytz==2024.1 # via dateparser pyyaml==6.0.1 # via uvicorn -regex==2023.12.25 +regex==2024.5.15 # via dateparser six==1.16.0 # via # fire # python-dateutil -smart-open==6.4.0 +smart-open==7.0.4 # via pypgstac sniffio==1.3.1 # via anyio -stac-fastapi-api==2.4.8 +stac-fastapi-api==3.0.0b2 # via # pcstac (pcstac/setup.py) # stac-fastapi-extensions # stac-fastapi-pgstac -stac-fastapi-extensions==2.4.8 +stac-fastapi-extensions==3.0.0b2 # via # pcstac (pcstac/setup.py) # stac-fastapi-pgstac -stac-fastapi-pgstac==2.4.9 +stac-fastapi-pgstac==3.0.0a4 # via pcstac (pcstac/setup.py) -stac-fastapi-types==2.4.8 +stac-fastapi-types==3.0.0b2 # via # pcstac (pcstac/setup.py) # stac-fastapi-api # stac-fastapi-extensions # stac-fastapi-pgstac -stac-pydantic==2.0.3 +stac-pydantic==3.1.0 # via - # stac-fastapi-api - # stac-fastapi-extensions # stac-fastapi-pgstac # stac-fastapi-types -starlette==0.36.3 +starlette==0.37.2 # via # brotli-asgi - # fastapi + # fastapi-slim tenacity==8.1.0 # via pypgstac termcolor==2.4.0 # via fire -typing-extensions==4.10.0 +typing-extensions==4.12.2 # via - # fastapi + # anyio + # fastapi-slim + # pcstac (pcstac/setup.py) # psycopg # psycopg-pool # pydantic + # pydantic-core # pygeoif # starlette # uvicorn tzlocal==5.2 # via dateparser +urllib3==2.2.2 + # via pcstac (pcstac/setup.py) uvicorn[standard]==0.30.1 # via pcstac (pcstac/setup.py) uvloop==0.19.0 @@ -164,3 +167,5 @@ watchfiles==0.22.0 # via uvicorn websockets==12.0 # via uvicorn +wrapt==1.16.0 + # via smart-open diff --git a/pcstac/setup.py b/pcstac/setup.py index 11cd128b..149354b2 100644 --- a/pcstac/setup.py +++ b/pcstac/setup.py @@ -5,14 +5,16 @@ # Runtime requirements. inst_reqs = [ "idna>=3.7.0", - "stac-fastapi.api==2.4.8", - "stac-fastapi.extensions==2.4.8", - "stac-fastapi.pgstac==2.4.9", - "stac-fastapi.types==2.4.8", + "stac-fastapi.api==3.0.0b2", + "stac-fastapi.extensions==3.0.0b2", + "stac-fastapi.pgstac==3.0.0a4", + "stac-fastapi.types==3.0.0b2", "orjson==3.10.4", # Required due to some imports related to pypgstac CLI usage in startup script - "pypgstac[psycopg]>=0.7.10,<0.8", + "pypgstac[psycopg]>=0.8.5,<0.9", "pystac==1.10.1", + "typing_extensions>=4.6.1", + "urllib3>=2.2.2", ] extra_reqs = { diff --git a/pcstac/tests/conftest.py b/pcstac/tests/conftest.py index b76f64a1..9da1c526 100644 --- a/pcstac/tests/conftest.py +++ b/pcstac/tests/conftest.py @@ -8,19 +8,25 @@ import pytest from fastapi import FastAPI from fastapi.responses import ORJSONResponse -from httpx import AsyncClient +from httpx import ASGITransport, AsyncClient from pypgstac.db import PgstacDB from pypgstac.migrate import Migrate -from stac_fastapi.api.models import create_get_request_model, create_post_request_model +from stac_fastapi.api.models import ( + create_get_request_model, + create_post_request_model, + create_request_model, +) +from stac_fastapi.extensions.core import TokenPaginationExtension from stac_fastapi.pgstac.config import Settings from stac_fastapi.pgstac.db import close_db_connection, connect_to_db +from stac_fastapi.types.search import APIRequest from pccommon.logging import ServiceName from pccommon.redis import connect_to_redis from pcstac.api import PCStacApi from pcstac.client import PCClient from pcstac.config import EXTENSIONS, TILER_HREF_ENV_VAR -from pcstac.search import PCSearch, PCSearchGetRequest +from pcstac.search import PCItemCollectionUri, PCSearch, PCSearchGetRequest DATA_DIR = os.path.join(os.path.dirname(__file__), "data") @@ -66,6 +72,15 @@ def api_client(pqe_pg): search_post_request_model = create_post_request_model( EXTENSIONS, base_model=PCSearch ) + items_get_request_model: APIRequest = PCItemCollectionUri + if any(isinstance(ext, TokenPaginationExtension) for ext in EXTENSIONS): + items_get_request_model = create_request_model( + model_name="ItemCollectionUri", + base_model=PCItemCollectionUri, + mixins=[TokenPaginationExtension().GET], + request_type="GET", + ) + api = PCStacApi( title="test title", description="test description", @@ -78,6 +93,7 @@ def api_client(pqe_pg): app=FastAPI(root_path="/stac", default_response_class=ORJSONResponse), search_get_request_model=search_get_request_model, search_post_request_model=search_post_request_model, + items_get_request_model=items_get_request_model, ) app: FastAPI = api.app @@ -102,7 +118,9 @@ async def app(api_client) -> AsyncGenerator[FastAPI, None]: @pytest.fixture(scope="session") async def app_client(app) -> AsyncGenerator[AsyncClient, None]: async with AsyncClient( - app=app, base_url="http://test/stac", headers={"X-Forwarded-For": "127.0.0.1"} + transport=ASGITransport(app=app), + base_url="http://test/stac", + headers={"X-Forwarded-For": "127.0.0.1"}, ) as c: yield c diff --git a/pcstac/tests/resources/test_item.py b/pcstac/tests/resources/test_item.py index d0e84df5..5a80fb02 100644 --- a/pcstac/tests/resources/test_item.py +++ b/pcstac/tests/resources/test_item.py @@ -1,17 +1,22 @@ import json -from datetime import datetime, timedelta +from datetime import timedelta from typing import Callable, Dict from urllib.parse import parse_qs, urlparse import pystac import pytest from geojson_pydantic.geometries import Polygon +from pydantic import TypeAdapter from stac_fastapi.pgstac.models.links import CollectionLinks -from stac_pydantic.shared import DATETIME_RFC339 +from stac_pydantic.shared import UtcDatetime from starlette.requests import Request from pcstac.config import get_settings +# Use a TypeAdapter to parse any datetime strings in a consistent manner +UtcDatetimeAdapter = TypeAdapter(UtcDatetime) +DATETIME_RFC339 = "%Y-%m-%dT%H:%M:%S.%fZ" + @pytest.mark.asyncio async def test_get_collection(app_client, load_test_data: Callable): @@ -116,7 +121,9 @@ async def test_item_search_temporal_query_post(app_client): assert items_resp.status_code == 200 first_item = items_resp.json()["features"][0] - item_date = datetime.strptime(first_item["properties"]["datetime"], DATETIME_RFC339) + item_date = UtcDatetimeAdapter.validate_strings( + first_item["properties"]["datetime"] + ) item_date = item_date + timedelta(seconds=1) params = { @@ -138,7 +145,9 @@ async def test_item_search_temporal_window_post(app_client): assert items_resp.status_code == 200 first_item = items_resp.json()["features"][0] - item_date = datetime.strptime(first_item["properties"]["datetime"], DATETIME_RFC339) + item_date = UtcDatetimeAdapter.validate_strings( + first_item["properties"]["datetime"] + ) item_date_before = item_date - timedelta(seconds=1) item_date_after = item_date + timedelta(seconds=1) @@ -153,6 +162,31 @@ async def test_item_search_temporal_window_post(app_client): assert resp_json["features"][0]["id"] == first_item["id"] +@pytest.mark.asyncio +async def test_item_search_temporal_window_post_date_only(app_client): + """Test POST search with spatio-temporal query (core)""" + items_resp = await app_client.get("/collections/naip/items") + assert items_resp.status_code == 200 + + first_item = items_resp.json()["features"][0] + item_date = UtcDatetimeAdapter.validate_strings( + first_item["properties"]["datetime"] + ) + item_date_before = item_date - timedelta(days=1) + item_date_after = item_date + timedelta(days=1) + + params = { + "collections": [first_item["collection"]], + "intersects": first_item["geometry"], + "datetime": f"{item_date_before.strftime('%Y-%m-%d')}/" + f"{item_date_after.strftime('%Y-%m-%d')}", + } + resp = await app_client.post("/search", json=params) + assert resp.status_code == 200 + resp_json = resp.json() + assert resp_json["features"][0]["id"] == first_item["id"] + + @pytest.mark.asyncio async def test_item_search_by_id_get(app_client): """Test GET search by item id (core)""" @@ -212,7 +246,9 @@ async def test_item_search_temporal_window_get(app_client): assert items_resp.status_code == 200 first_item = items_resp.json()["features"][0] - item_date = datetime.strptime(first_item["properties"]["datetime"], DATETIME_RFC339) + item_date = UtcDatetimeAdapter.validate_strings( + first_item["properties"]["datetime"] + ) item_date_before = item_date - timedelta(seconds=1) item_date_after = item_date + timedelta(seconds=1) @@ -234,7 +270,9 @@ async def test_item_search_temporal_window_get_date_only(app_client): assert items_resp.status_code == 200 first_item = items_resp.json()["features"][0] - item_date = datetime.strptime(first_item["properties"]["datetime"], DATETIME_RFC339) + item_date = UtcDatetimeAdapter.validate_strings( + first_item["properties"]["datetime"] + ) item_date_before = item_date - timedelta(days=1) item_date_after = item_date + timedelta(days=1) @@ -247,10 +285,6 @@ async def test_item_search_temporal_window_get_date_only(app_client): resp = await app_client.get("/search", params=params) assert resp.status_code == 200 resp_json = resp.json() - import json - - print(json.dumps(resp_json, indent=2)) - assert resp_json["features"][0]["id"] == first_item["id"] @@ -454,8 +488,11 @@ async def test_pagination_token_idempotent(app_client): # Construct a search that should return all items, but limit to a few # so that a "next" link is returned page = await app_client.get( - "/search", params={"datetime": "1900-01-01/2030-01-01", "limit": 3} + "/search", + params={"datetime": "1900-01-01T00:00:00Z/2030-01-01T00:00:00Z", "limit": 3}, ) + assert page.status_code == 200 + # Get the next link page_data = page.json() next_link = list(filter(lambda l: l["rel"] == "next", page_data["links"])) @@ -500,7 +537,7 @@ async def test_field_extension_exclude_default_includes(app_client): async def test_search_intersects_and_bbox(app_client): """Test POST search intersects and bbox are mutually exclusive (core)""" bbox = [-118, 34, -117, 35] - geoj = Polygon.from_bounds(*bbox).dict(exclude_none=True) + geoj = Polygon.from_bounds(*bbox).model_dump(exclude_none=True) params = {"bbox": bbox, "intersects": geoj} resp = await app_client.post("/search", json=params) assert resp.status_code == 400 diff --git a/pcstac/tests/test_rate_limit.py b/pcstac/tests/test_rate_limit.py index 8dc41031..bc5977df 100644 --- a/pcstac/tests/test_rate_limit.py +++ b/pcstac/tests/test_rate_limit.py @@ -2,7 +2,7 @@ import pytest from fastapi import FastAPI -from httpx import AsyncClient +from httpx import ASGITransport, AsyncClient from pccommon.constants import HTTP_429_TOO_MANY_REQUESTS @@ -13,7 +13,9 @@ async def test_rate_limit_collection(app: FastAPI): # set the ip to one that doesn't have the rate limit exception async with AsyncClient( - app=app, base_url="http://test", headers={"X-Forwarded-For": "127.0.0.2"} + transport=ASGITransport(app=app), + base_url="http://test", + headers={"X-Forwarded-For": "127.0.0.2"}, ) as app_client: resp = None for _ in range(0, 400): @@ -40,7 +42,9 @@ async def test_rate_limit_collection_ip_Exception(app_client: AsyncClient): async def test_reregistering_rate_limit_script(app: FastAPI, app_client: AsyncClient): # set the ip to one that doesn't have the rate limit exception async with AsyncClient( - app=app, base_url="http://test", headers={"X-Forwarded-For": "127.0.0.2"} + transport=ASGITransport(app=app), + base_url="http://test", + headers={"X-Forwarded-For": "127.0.0.2"}, ) as app_client: async def _hash_exists(): diff --git a/pctiler/Dockerfile b/pctiler/Dockerfile index 10deaa29..cadb53d0 100644 --- a/pctiler/Dockerfile +++ b/pctiler/Dockerfile @@ -41,7 +41,8 @@ ENV MALLOC_TRIM_THRESHOLD_=0 # TiTiler mosaic config ENV MOSAIC_CONCURRENCY 1 +ENV APP_ROOT_PATH="" ENV APP_HOST=0.0.0.0 ENV APP_PORT=80 -CMD uvicorn pctiler.main:app --host ${APP_HOST} --port ${APP_PORT} --log-level info +CMD uvicorn pctiler.main:app --host ${APP_HOST} --port ${APP_PORT} --root-path ${APP_ROOT_PATH} --log-level info diff --git a/pctiler/pctiler/colormaps/__init__.py b/pctiler/pctiler/colormaps/__init__.py index 58436a70..77b2285c 100644 --- a/pctiler/pctiler/colormaps/__init__.py +++ b/pctiler/pctiler/colormaps/__init__.py @@ -1,13 +1,11 @@ -from enum import Enum -from typing import Dict, Optional +from typing import Dict -from fastapi import Query from rio_tiler.colormap import cmap from rio_tiler.types import ColorMapType -from titiler.core.dependencies import ColorMapParams from .alos_palsar_mosaic import alos_palsar_mosaic_colormaps from .chloris import chloris_colormaps +from .dependencies import create_colormap_dependency from .io_bii import io_bii_colormaps from .jrc import jrc_colormaps from .lidarusgs import lidar_colormaps @@ -39,22 +37,11 @@ } for k, v in custom_colormaps.items(): - registered_cmaps = registered_cmaps.register({k: v}) + # rio-tiler 6.6.1 doesn't support upper case cmap names + registered_cmaps = registered_cmaps.register({k.lower(): v}) -PCColorMapNames = Enum( # type: ignore - "ColorMapNames", [(a, a) for a in sorted(registered_cmaps.list())] -) - - -def PCColorMapParams( - colormap_name: PCColorMapNames = Query(None, description="Colormap name"), - colormap: str = Query(None, description="JSON encoded custom Colormap"), -) -> Optional[ColorMapType]: - if colormap_name: - cm = custom_colormaps.get(colormap_name.value) - if cm: - return cm - return ColorMapParams(colormap_name, colormap) +all_cmap_keys = list(custom_colormaps.keys()) + list(cmap.data.keys()) +PCColorMapParams = create_colormap_dependency(registered_cmaps, all_cmap_keys) # Placeholder for non-discrete range colormaps (unsupported) diff --git a/pctiler/pctiler/colormaps/dependencies.py b/pctiler/pctiler/colormaps/dependencies.py new file mode 100644 index 00000000..7308ce11 --- /dev/null +++ b/pctiler/pctiler/colormaps/dependencies.py @@ -0,0 +1,52 @@ +# flake8: noqa + +import json +from typing import Callable, List, Literal, Optional, Sequence, Union + +from fastapi import HTTPException, Query +from rio_tiler.colormap import ColorMaps, parse_color +from rio_tiler.types import ColorMapType +from typing_extensions import Annotated + + +# Port of titiler.core.dependencies.create_colormap_dependency (0.18.3) which +# supports case-sensitive keys in QueryParams and the pydantic validation response +def create_colormap_dependency( + cmap: ColorMaps, original_casing_keys: List[str] +) -> Callable: + """Create Colormap Dependency.""" + + def deps( # type: ignore + colormap_name: Annotated[ # type: ignore + Literal[tuple(original_casing_keys)], + Query(description="Colormap name"), + ] = None, + colormap: Annotated[ + Optional[str], Query(description="JSON encoded custom Colormap") + ] = None, + ) -> Union[ColorMapType, None]: + if colormap_name: + return cmap.get(colormap_name.lower()) + + if colormap: + try: + c = json.loads( + colormap, + object_hook=lambda x: { + int(k): parse_color(v) for k, v in x.items() + }, + ) + + # Make sure to match colormap type + if isinstance(c, Sequence): + c = [(tuple(inter), parse_color(v)) for (inter, v) in c] + + return c + except json.JSONDecodeError as e: + raise HTTPException( + status_code=400, detail="Could not parse the colormap value." + ) from e + + return None + + return deps diff --git a/pctiler/pctiler/config.py b/pctiler/pctiler/config.py index 87a89932..d282f791 100644 --- a/pctiler/pctiler/config.py +++ b/pctiler/pctiler/config.py @@ -4,7 +4,8 @@ from urllib.parse import urljoin from fastapi import Request -from pydantic import BaseSettings, Field +from pydantic import Field +from pydantic_settings import BaseSettings # Hostname to fetch STAC information from STAC_API_URL_ENV_VAR = "STAC_API_URL" @@ -39,14 +40,21 @@ class Settings(BaseSettings): mosaic_endpoint_prefix: str = "/mosaic" legend_endpoint_prefix: str = "/legend" vector_tile_endpoint_prefix: str = "/vector" - vector_tile_sa_base_url: str = Field(env=VECTORTILE_SA_BASE_URL_ENV_VAR, default="") + vector_tile_sa_base_url: str = Field( + default="", + validation_alias=VECTORTILE_SA_BASE_URL_ENV_VAR, + ) debug: bool = os.getenv("TILER_DEBUG", "False").lower() == "true" api_version: str = "1.0" default_max_items_per_tile: int = Field( - env=DEFAULT_MAX_ITEMS_PER_TILE_ENV_VAR, default=10 + default=10, + validation_alias=DEFAULT_MAX_ITEMS_PER_TILE_ENV_VAR, + ) + request_timeout: int = Field( + default=30, + validation_alias=REQUEST_TIMEOUT_ENV_VAR, ) - request_timeout: int = Field(env=REQUEST_TIMEOUT_ENV_VAR, default=30) feature_flags: FeatureFlags = FeatureFlags() diff --git a/pctiler/pctiler/endpoints/item.py b/pctiler/pctiler/endpoints/item.py index e9d8ef9e..e82a09fc 100644 --- a/pctiler/pctiler/endpoints/item.py +++ b/pctiler/pctiler/endpoints/item.py @@ -1,11 +1,19 @@ +import logging +from typing import Annotated, Callable, Optional from urllib.parse import quote_plus, urljoin -from fastapi import Query, Request, Response +import fastapi +import pystac +import starlette +from fastapi import Body, Depends, HTTPException, Query, Request, Response from fastapi.templating import Jinja2Templates +from geojson_pydantic.features import Feature from html_sanitizer.sanitizer import Sanitizer from starlette.responses import HTMLResponse -from titiler.core.factory import MultiBaseTilerFactory -from titiler.pgstac.dependencies import ItemPathParams # removed in titiler.pgstac 3.0 +from titiler.core.dependencies import CoordCRSParams, DstCRSParams +from titiler.core.factory import MultiBaseTilerFactory, img_endpoint_params +from titiler.core.resources.enums import ImageType +from titiler.pgstac.dependencies import get_stac_item from pccommon.config import get_render_config from pctiler.colormaps import PCColorMapParams @@ -18,6 +26,17 @@ # Try backported to PY<39 `importlib_resources`. from importlib_resources import files as resources_files # type: ignore +logger = logging.getLogger(__name__) + + +def ItemPathParams( + request: Request, + collection: str = Query(..., description="STAC Collection ID"), + item: str = Query(..., description="STAC Item ID"), +) -> pystac.Item: + """STAC Item dependency.""" + return get_stac_item(request.app.state.dbpool, collection, item) + # TODO: mypy fails in python 3.9, we need to find a proper way to do this templates = Jinja2Templates( @@ -65,12 +84,91 @@ def map( ) return templates.TemplateResponse( - "item_preview.html", + request, + name="item_preview.html", context={ - "request": request, "tileJson": tilejson_url, "collectionId": collection_sanitized, "itemId": item_sanitized, "itemUrl": item_url, }, ) + + +@pc_tile_factory.router.post( + r"/crop", + **img_endpoint_params, +) +@pc_tile_factory.router.post( + r"/crop.{format}", + **img_endpoint_params, +) +@pc_tile_factory.router.post( + r"/crop/{width}x{height}.{format}", + **img_endpoint_params, +) +def geojson_crop( # type: ignore + request: fastapi.Request, + geojson: Annotated[ + Feature, Body(description="GeoJSON Feature.") # noqa: F722,E501 + ], + format: Annotated[ + ImageType, + "Default will be automatically defined if the output image needs a mask (png) or not (jpeg).", # noqa: E501,F722 + ] = None, # type: ignore[assignment] + src_path=Depends(pc_tile_factory.path_dependency), + coord_crs=Depends(CoordCRSParams), + dst_crs=Depends(DstCRSParams), + layer_params=Depends(pc_tile_factory.layer_dependency), + dataset_params=Depends(pc_tile_factory.dataset_dependency), + image_params=Depends(pc_tile_factory.img_part_dependency), + post_process=Depends(pc_tile_factory.process_dependency), + rescale=Depends(pc_tile_factory.rescale_dependency), + color_formula: Annotated[ + Optional[str], + Query( + title="Color Formula", # noqa: F722 + description="rio-color formula (info: https://github.com/mapbox/rio-color)", # noqa: E501,F722 + ), + ] = None, + colormap=Depends(pc_tile_factory.colormap_dependency), + render_params=Depends(pc_tile_factory.render_dependency), + reader_params=Depends(pc_tile_factory.reader_dependency), + env=Depends(pc_tile_factory.environment_dependency), +) -> Response: + """Create image from a geojson feature.""" + endpoint = get_endpoint_function( + pc_tile_factory.router, path="/feature", method=request.method + ) + result = endpoint( + geojson=geojson, + format=format, + src_path=src_path, + coord_crs=coord_crs, + dst_crs=dst_crs, + layer_params=layer_params, + dataset_params=dataset_params, + image_params=image_params, + post_process=post_process, + rescale=rescale, + color_formula=color_formula, + colormap=colormap, + render_params=render_params, + reader_params=reader_params, + env=env, + ) + return result + + +def get_endpoint_function( + router: fastapi.APIRouter, path: str, method: str +) -> Callable: + for route in router.routes: + match, _ = route.matches({"type": "http", "path": path, "method": method}) + if match == starlette.routing.Match.FULL: + # The abstract BaseRoute doesn't have a `.endpoint` attribute, + # but all of its subclasses do. + return route.endpoint # type: ignore [attr-defined] + + logger.warning(f"Could not find endpoint. method={method} path={path}") + raise HTTPException(detail="Internal system error", status_code=500) diff --git a/pctiler/pctiler/endpoints/legend.py b/pctiler/pctiler/endpoints/legend.py index 30cbb81b..fed317e4 100644 --- a/pctiler/pctiler/endpoints/legend.py +++ b/pctiler/pctiler/endpoints/legend.py @@ -1,3 +1,5 @@ +# NOTE: we now have https://developmentseed.org/titiler/endpoints/colormaps/ in titiler + from io import BytesIO from typing import Sequence diff --git a/pctiler/pctiler/endpoints/pg_mosaic.py b/pctiler/pctiler/endpoints/pg_mosaic.py index fd49a42e..4e602adc 100644 --- a/pctiler/pctiler/endpoints/pg_mosaic.py +++ b/pctiler/pctiler/endpoints/pg_mosaic.py @@ -1,9 +1,11 @@ from dataclasses import dataclass, field +from typing import List, Optional -from fastapi import Query, Request +from fastapi import FastAPI, Query, Request from fastapi.responses import ORJSONResponse from psycopg_pool import ConnectionPool from titiler.core import dependencies +from titiler.pgstac.dependencies import SearchIdParams from titiler.pgstac.factory import MosaicTilerFactory from pccommon.config import get_collection_config @@ -34,31 +36,42 @@ def __init__(self, request: Request): pgstac_mosaic_factory = MosaicTilerFactory( reader=PGSTACBackend, + path_dependency=SearchIdParams, colormap_dependency=PCColorMapParams, layer_dependency=AssetsBidxExprParams, reader_dependency=ReaderParams, - router_prefix=get_settings().mosaic_endpoint_prefix, + router_prefix=get_settings().mosaic_endpoint_prefix + "/{search_id}", backend_dependency=BackendParams, - add_map_viewer=False, add_statistics=False, - add_mosaic_list=False, ) -@pgstac_mosaic_factory.router.get( - "/info", response_model=MosaicInfo, response_class=ORJSONResponse -) -def mosaic_info( - request: Request, collection: str = Query(..., description="STAC Collection ID") -) -> ORJSONResponse: - collection_config = get_collection_config(collection) - if not collection_config or not collection_config.mosaic_info: - return ORJSONResponse( - status_code=404, - content=f"No mosaic info available for collection {collection}", - ) +def add_collection_mosaic_info_route( + app: FastAPI, + *, + prefix: str = "", + tags: Optional[List[str]] = None, +) -> None: + """add `/info` endpoint.""" - return ORJSONResponse( - status_code=200, - content=collection_config.mosaic_info.dict(by_alias=True, exclude_unset=True), + @app.get( + f"{prefix}/info", + response_model=MosaicInfo, + response_class=ORJSONResponse, ) + def mosaic_info( + request: Request, collection: str = Query(..., description="STAC Collection ID") + ) -> ORJSONResponse: + collection_config = get_collection_config(collection) + if not collection_config or not collection_config.mosaic_info: + return ORJSONResponse( + status_code=404, + content=f"No mosaic info available for collection {collection}", + ) + + return ORJSONResponse( + status_code=200, + content=collection_config.mosaic_info.model_dump( + by_alias=True, exclude_unset=True + ), + ) diff --git a/pctiler/pctiler/endpoints/vector_tiles.py b/pctiler/pctiler/endpoints/vector_tiles.py index 0a82db26..9f2e8d2b 100644 --- a/pctiler/pctiler/endpoints/vector_tiles.py +++ b/pctiler/pctiler/endpoints/vector_tiles.py @@ -60,7 +60,7 @@ async def get_tilejson( if tileset.center: tilejson["center"] = tileset.center - return tilejson + return TileJSON(**tilejson) @vector_tile_router.get( diff --git a/pctiler/pctiler/main.py b/pctiler/pctiler/main.py index 93d1da3b..682e3428 100755 --- a/pctiler/pctiler/main.py +++ b/pctiler/pctiler/main.py @@ -1,7 +1,8 @@ #!/usr/bin/env python3 import logging import os -from typing import Dict, List +from contextlib import asynccontextmanager +from typing import AsyncGenerator, Dict, List from fastapi import FastAPI from fastapi.openapi.utils import get_openapi @@ -16,6 +17,7 @@ ) from titiler.mosaic.errors import MOSAIC_STATUS_CODES from titiler.pgstac.db import close_db_connection, connect_to_db +from titiler.pgstac.factory import add_search_register_route from pccommon.constants import X_REQUEST_ENTITY from pccommon.logging import ServiceName, init_logging @@ -30,28 +32,34 @@ pg_mosaic, vector_tiles, ) - -# Initialize logging -init_logging(ServiceName.TILER) -logger = logging.getLogger(__name__) +from pctiler.middleware import ModifyResponseMiddleware # Get the root path if set in the environment APP_ROOT_PATH = os.environ.get("APP_ROOT_PATH", "") +init_logging(ServiceName.TILER, APP_ROOT_PATH) +logger = logging.getLogger(__name__) + settings = get_settings() + +@asynccontextmanager +async def lifespan(app: FastAPI) -> AsyncGenerator: + """FastAPI Lifespan.""" + await connect_to_db(app) + yield + await close_db_connection(app) + + app = FastAPI( title=settings.title, openapi_url=settings.openapi_url, root_path=APP_ROOT_PATH, + lifespan=lifespan, ) app.state.service_name = ServiceName.TILER -# Note: -# With titiler.pgstac >3.0, items endpoint has changed and use path-parameter -# /collections/{collectionId}/items/{itemId} instead of query-parameter -# https://github.com/stac-utils/titiler-pgstac/blob/d16102bf331ba588f31e131e65b07637d649b4bd/titiler/pgstac/main.py#L87-L92 app.include_router( item.pc_tile_factory.router, prefix=settings.item_endpoint_prefix, @@ -60,10 +68,33 @@ app.include_router( pg_mosaic.pgstac_mosaic_factory.router, + prefix=settings.mosaic_endpoint_prefix + "/{search_id}", + tags=["PgSTAC Mosaic endpoints"], +) +pg_mosaic.add_collection_mosaic_info_route( + app, prefix=settings.mosaic_endpoint_prefix, tags=["PgSTAC Mosaic endpoints"], ) +add_search_register_route( + app, + prefix=settings.mosaic_endpoint_prefix, + tile_dependencies=[ + pg_mosaic.pgstac_mosaic_factory.layer_dependency, + pg_mosaic.pgstac_mosaic_factory.dataset_dependency, + pg_mosaic.pgstac_mosaic_factory.pixel_selection_dependency, + pg_mosaic.pgstac_mosaic_factory.process_dependency, + pg_mosaic.pgstac_mosaic_factory.rescale_dependency, + pg_mosaic.pgstac_mosaic_factory.colormap_dependency, + pg_mosaic.pgstac_mosaic_factory.render_dependency, + pg_mosaic.pgstac_mosaic_factory.reader_dependency, + pg_mosaic.pgstac_mosaic_factory.backend_dependency, + pg_mosaic.pgstac_mosaic_factory.pgstac_dependency, + ], + tags=["PgSTAC Mosaic endpoints"], +) + app.include_router( legend.legend_router, prefix=settings.legend_endpoint_prefix, @@ -92,6 +123,7 @@ app.add_exception_handler(Exception, http_exception_handler) +app.add_middleware(ModifyResponseMiddleware, route=f"{APP_ROOT_PATH}/mosaic/register") app.add_middleware(TraceMiddleware, service_name=app.state.service_name) app.add_middleware(CacheControlMiddleware, cachecontrol="public, max-age=3600") app.add_middleware(TotalTimeMiddleware) @@ -109,18 +141,6 @@ ) -@app.on_event("startup") -async def startup_event() -> None: - """Connect to database on startup.""" - await connect_to_db(app) - - -@app.on_event("shutdown") -async def shutdown_event() -> None: - """Close database connection.""" - await close_db_connection(app) - - @app.get("/") async def read_root() -> Dict[str, str]: return {"Hello": "Planetary Developer!"} diff --git a/pctiler/pctiler/middleware.py b/pctiler/pctiler/middleware.py new file mode 100644 index 00000000..7c5ccc4f --- /dev/null +++ b/pctiler/pctiler/middleware.py @@ -0,0 +1,44 @@ +import json +from collections import OrderedDict + +from starlette.datastructures import MutableHeaders +from starlette.types import ASGIApp, Message, Receive, Scope, Send + + +class ModifyResponseMiddleware: + def __init__(self, app: ASGIApp, route: str) -> None: + self.app = app + self.route = route + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + if scope["type"] != "http" or scope["path"] != self.route: + return await self.app(scope, receive, send) + + async def send_with_searchid(message: Message) -> None: + message_type = message["type"] + if message_type == "http.response.start": + # Don't send the initial message until we've determined how to + # modify the outgoing content-length header. + self.initial_message = message + elif message_type == "http.response.body": + # Rewrite id to searchid for backwards compatibility, keep key order + body = json.loads(message["body"]) + ordered_body = OrderedDict() + ordered_body["searchid"] = body.get("id") + ordered_body.update(body) + + resp_body = json.dumps(ordered_body, ensure_ascii=False).encode("utf-8") + message["body"] = resp_body + + # Update the content-length header on the start message + headers = MutableHeaders(scope=self.initial_message) + headers["Content-Length"] = str(len(resp_body)) + + # Send the start and body asgi messages + await send(self.initial_message) + await send(message) + + else: + await send(message) + + await self.app(scope, receive, send_with_searchid) diff --git a/pctiler/pctiler/reader.py b/pctiler/pctiler/reader.py index 19724a8c..c1443541 100644 --- a/pctiler/pctiler/reader.py +++ b/pctiler/pctiler/reader.py @@ -49,22 +49,15 @@ class ItemSTACReader(PgSTACReader): def _get_asset_info(self, asset: str) -> AssetInfo: """return asset's url.""" - asset_url = BlobCDN.transform_if_available( - super()._get_asset_info(asset)["url"] - ) + info = super()._get_asset_info(asset) + asset_url = BlobCDN.transform_if_available(info["url"]) if self.input.collection_id: render_config = get_render_config(self.input.collection_id) if render_config and render_config.requires_token: asset_url = pc.sign(asset_url) - asset_info = self.input.assets[asset] - info = AssetInfo(url=asset_url) - - if "file:header_size" in asset_info.extra_fields: - h = asset_info.extra_fields["file:header_size"] - info["env"] = {"GDAL_INGESTED_BYTES_AT_OPEN": h} - + info["url"] = asset_url return info @@ -119,7 +112,7 @@ class PGSTACBackend(pgstac_mosaic.PGSTACBackend): request: Optional[Request] = attr.ib(default=None) # Override from PGSTACBackend to use collection - def assets_for_tile( + def assets_for_tile( # type: ignore self, x: int, y: int, z: int, collection: Optional[str] = None, **kwargs: Any ) -> List[Dict]: settings = get_settings() @@ -166,12 +159,11 @@ def assets_for_tile( return assets # override from PGSTACBackend to pass through collection - def tile( + def tile( # type: ignore self, tile_x: int, tile_y: int, tile_z: int, - reverse: bool = False, collection: Optional[str] = None, scan_limit: Optional[int] = None, items_limit: Optional[int] = None, @@ -199,8 +191,6 @@ def tile( ) ts = time.perf_counter() - if reverse: - mosaic_assets = list(reversed(mosaic_assets)) def _reader( item: Dict[str, Any], x: int, y: int, z: int, **kwargs: Any diff --git a/pctiler/requirements-dev.txt b/pctiler/requirements-dev.txt index 09fb37b8..3669d8de 100644 --- a/pctiler/requirements-dev.txt +++ b/pctiler/requirements-dev.txt @@ -5,9 +5,9 @@ # pip-compile --extra=dev --output-file=pctiler/requirements-dev.txt ./pctiler/setup.py # affine==2.4.0 - # via - # rasterio - # supermercado + # via rasterio +annotated-types==0.7.0 + # via pydantic anyio==4.3.0 # via # httpx @@ -18,21 +18,11 @@ attrs==23.2.0 # morecantile # rasterio # rio-tiler -boto3==1.34.136 - # via - # pctiler (pctiler/setup.py) - # rio-tiler -botocore==1.34.136 - # via - # boto3 - # pctiler (pctiler/setup.py) - # s3transfer cachetools==5.3.3 # via # cogeo-mosaic - # morecantile # rio-tiler -certifi==2024.2.2 +certifi==2024.7.4 # via # httpcore # httpx @@ -45,20 +35,13 @@ click==8.1.7 # via # click-plugins # cligj - # mercantile # planetary-computer # rasterio - # stac-pydantic - # supermercado click-plugins==1.1.1 - # via - # rasterio - # supermercado + # via rasterio cligj==0.7.2 - # via - # rasterio - # supermercado -cogeo-mosaic==5.0.0 + # via rasterio +cogeo-mosaic==7.1.0 # via titiler-mosaic color-operations==0.1.3 # via rio-tiler @@ -68,16 +51,15 @@ cycler==0.12.1 # via matplotlib exceptiongroup==1.2.0 # via anyio -fastapi==0.91.0 +fastapi-slim==0.111.0 # via + # pctiler (pctiler/setup.py) # titiler-core - # titiler-pgstac fonttools==4.53.0 # via matplotlib -geojson-pydantic==0.4.2 +geojson-pydantic==1.1.0 # via # pctiler (pctiler/setup.py) - # stac-pydantic # titiler-core # titiler-pgstac h11==0.14.0 @@ -102,22 +84,18 @@ jinja2==3.1.4 # via # pctiler (pctiler/setup.py) # titiler-core -jmespath==1.0.1 - # via - # boto3 - # botocore kiwisolver==1.4.5 # via matplotlib markupsafe==2.1.5 # via jinja2 matplotlib==3.9.0 # via pctiler (pctiler/setup.py) -mercantile==1.2.1 - # via supermercado -morecantile==3.4.0 +morecantile==5.3.0 # via # cogeo-mosaic # rio-tiler + # supermorecado + # titiler-core numexpr==2.9.0 # via rio-tiler numpy==1.26.4 @@ -130,17 +108,18 @@ numpy==1.26.4 # rio-tiler # shapely # snuggs - # supermercado # titiler-core orjson==3.10.4 # via pctiler (pctiler/setup.py) packaging==24.1 - # via matplotlib + # via + # matplotlib + # planetary-computer pillow==10.3.0 # via # matplotlib # pctiler (pctiler/setup.py) -planetary-computer==0.4.9 +planetary-computer==1.0.0 # via pctiler (pctiler/setup.py) psycopg[binary,pool]==3.1.18 # via pctiler (pctiler/setup.py) @@ -148,17 +127,24 @@ psycopg-binary==3.1.18 # via psycopg psycopg-pool==3.2.1 # via psycopg -pydantic[dotenv]==1.10.14 +pydantic==2.7.4 # via # cogeo-mosaic - # fastapi + # fastapi-slim # geojson-pydantic # morecantile # pctiler (pctiler/setup.py) # planetary-computer + # pydantic-settings # rio-tiler - # stac-pydantic # titiler-core + # titiler-pgstac +pydantic-core==2.18.4 + # via pydantic +pydantic-settings==2.3.3 + # via + # cogeo-mosaic + # titiler-pgstac pyparsing==3.1.2 # via # matplotlib @@ -175,12 +161,13 @@ pystac-client==0.6.1 # via planetary-computer python-dateutil==2.9.0.post0 # via - # botocore # matplotlib # pystac # pystac-client python-dotenv==1.0.1 - # via pydantic + # via + # planetary-computer + # pydantic-settings pytz==2024.1 # via planetary-computer rasterio==1.3.10 @@ -188,19 +175,17 @@ rasterio==1.3.10 # cogeo-mosaic # pctiler (pctiler/setup.py) # rio-tiler - # supermercado + # supermorecado # titiler-core -requests==2.32.2 +requests==2.32.3 # via # pctiler (pctiler/setup.py) # planetary-computer # pystac-client -rio-tiler==4.1.13 +rio-tiler==6.6.1 # via # cogeo-mosaic # titiler-core -s3transfer==0.10.1 - # via boto3 shapely==2.0.3 # via cogeo-mosaic simplejson==3.19.2 @@ -213,24 +198,20 @@ sniffio==1.3.1 # httpx snuggs==1.4.7 # via rasterio -stac-pydantic==2.0.3 - # via titiler-pgstac -starlette==0.24.0 - # via - # fastapi - # titiler-pgstac -supermercado==0.2.0 +starlette==0.37.2 + # via fastapi-slim +supermorecado==0.1.2 # via cogeo-mosaic -titiler-core==0.10.2 +titiler-core==0.18.3 # via # pctiler (pctiler/setup.py) # titiler-mosaic # titiler-pgstac -titiler-mosaic==0.10.2 +titiler-mosaic==0.18.3 # via # pctiler (pctiler/setup.py) # titiler-pgstac -titiler-pgstac==0.2.4 +titiler-pgstac==1.3.0 # via pctiler (pctiler/setup.py) types-requests==2.31.0.6 # via pctiler (pctiler/setup.py) @@ -239,14 +220,15 @@ types-urllib3==1.26.25.14 typing-extensions==4.10.0 # via # anyio + # fastapi-slim # psycopg # psycopg-pool # pydantic + # pydantic-core # starlette + # titiler-core urllib3==1.26.18 - # via - # botocore - # requests + # via requests zipp==3.19.2 # via # importlib-metadata diff --git a/pctiler/requirements-server.txt b/pctiler/requirements-server.txt index 9f4f6e03..d694c026 100644 --- a/pctiler/requirements-server.txt +++ b/pctiler/requirements-server.txt @@ -5,37 +5,25 @@ # pip-compile --extra=server --output-file=pctiler/requirements-server.txt ./pctiler/setup.py # affine==2.4.0 - # via - # rasterio - # supermercado + # via rasterio +annotated-types==0.7.0 + # via pydantic anyio==3.7.1 # via # httpx # starlette - # watchgod -asgiref==3.8.1 - # via uvicorn + # watchfiles attrs==23.2.0 # via # cogeo-mosaic # morecantile # rasterio # rio-tiler -boto3==1.34.136 - # via - # pctiler (pctiler/setup.py) - # rio-tiler -botocore==1.34.136 - # via - # boto3 - # pctiler (pctiler/setup.py) - # s3transfer cachetools==5.3.3 # via # cogeo-mosaic - # morecantile # rio-tiler -certifi==2024.2.2 +certifi==2024.7.4 # via # httpcore # httpx @@ -48,21 +36,14 @@ click==8.1.7 # via # click-plugins # cligj - # mercantile # planetary-computer # rasterio - # stac-pydantic - # supermercado # uvicorn click-plugins==1.1.1 - # via - # rasterio - # supermercado + # via rasterio cligj==0.7.2 - # via - # rasterio - # supermercado -cogeo-mosaic==5.0.0 + # via rasterio +cogeo-mosaic==7.1.0 # via titiler-mosaic color-operations==0.1.3 # via rio-tiler @@ -72,16 +53,15 @@ cycler==0.12.1 # via matplotlib exceptiongroup==1.2.0 # via anyio -fastapi==0.91.0 +fastapi-slim==0.111.0 # via + # pctiler (pctiler/setup.py) # titiler-core - # titiler-pgstac fonttools==4.53.0 # via matplotlib -geojson-pydantic==0.4.2 +geojson-pydantic==1.1.0 # via # pctiler (pctiler/setup.py) - # stac-pydantic # titiler-core # titiler-pgstac h11==0.14.0 @@ -110,22 +90,18 @@ jinja2==3.1.4 # via # pctiler (pctiler/setup.py) # titiler-core -jmespath==1.0.1 - # via - # boto3 - # botocore kiwisolver==1.4.5 # via matplotlib markupsafe==2.1.5 # via jinja2 matplotlib==3.9.0 # via pctiler (pctiler/setup.py) -mercantile==1.2.1 - # via supermercado -morecantile==3.4.0 +morecantile==5.3.0 # via # cogeo-mosaic # rio-tiler + # supermorecado + # titiler-core numexpr==2.9.0 # via rio-tiler numpy==1.26.4 @@ -138,17 +114,18 @@ numpy==1.26.4 # rio-tiler # shapely # snuggs - # supermercado # titiler-core orjson==3.10.4 # via pctiler (pctiler/setup.py) packaging==24.1 - # via matplotlib + # via + # matplotlib + # planetary-computer pillow==10.3.0 # via # matplotlib # pctiler (pctiler/setup.py) -planetary-computer==0.4.9 +planetary-computer==1.0.0 # via pctiler (pctiler/setup.py) psycopg[binary,pool]==3.1.18 # via pctiler (pctiler/setup.py) @@ -156,17 +133,24 @@ psycopg-binary==3.1.18 # via psycopg psycopg-pool==3.2.1 # via psycopg -pydantic[dotenv]==1.10.14 +pydantic==2.7.4 # via # cogeo-mosaic - # fastapi + # fastapi-slim # geojson-pydantic # morecantile # pctiler (pctiler/setup.py) # planetary-computer + # pydantic-settings # rio-tiler - # stac-pydantic # titiler-core + # titiler-pgstac +pydantic-core==2.18.4 + # via pydantic +pydantic-settings==2.3.3 + # via + # cogeo-mosaic + # titiler-pgstac pyparsing==3.1.2 # via # matplotlib @@ -183,13 +167,13 @@ pystac-client==0.6.1 # via planetary-computer python-dateutil==2.9.0.post0 # via - # botocore # matplotlib # pystac # pystac-client python-dotenv==1.0.1 # via - # pydantic + # planetary-computer + # pydantic-settings # uvicorn pytz==2024.1 # via planetary-computer @@ -200,19 +184,17 @@ rasterio==1.3.10 # cogeo-mosaic # pctiler (pctiler/setup.py) # rio-tiler - # supermercado + # supermorecado # titiler-core -requests==2.32.2 +requests==2.32.3 # via # pctiler (pctiler/setup.py) # planetary-computer # pystac-client -rio-tiler==4.1.13 +rio-tiler==6.6.1 # via # cogeo-mosaic # titiler-core -s3transfer==0.10.1 - # via boto3 shapely==2.0.3 # via cogeo-mosaic simplejson==3.19.2 @@ -225,41 +207,38 @@ sniffio==1.3.1 # httpx snuggs==1.4.7 # via rasterio -stac-pydantic==2.0.3 - # via titiler-pgstac -starlette==0.24.0 - # via - # fastapi - # titiler-pgstac -supermercado==0.2.0 +starlette==0.37.2 + # via fastapi-slim +supermorecado==0.1.2 # via cogeo-mosaic -titiler-core==0.10.2 +titiler-core==0.18.3 # via # pctiler (pctiler/setup.py) # titiler-mosaic # titiler-pgstac -titiler-mosaic==0.10.2 +titiler-mosaic==0.18.3 # via # pctiler (pctiler/setup.py) # titiler-pgstac -titiler-pgstac==0.2.4 +titiler-pgstac==1.3.0 # via pctiler (pctiler/setup.py) typing-extensions==4.10.0 # via - # asgiref + # fastapi-slim # psycopg # psycopg-pool # pydantic + # pydantic-core # starlette + # titiler-core + # uvicorn urllib3==1.26.19 - # via - # botocore - # requests -uvicorn[standard]==0.17.6 + # via requests +uvicorn[standard]==0.30.1 # via pctiler (pctiler/setup.py) uvloop==0.19.0 # via uvicorn -watchgod==0.8.2 +watchfiles==0.22.0 # via uvicorn websockets==12.0 # via uvicorn diff --git a/pctiler/setup.py b/pctiler/setup.py index 4c0e5aab..f20c68ed 100644 --- a/pctiler/setup.py +++ b/pctiler/setup.py @@ -5,22 +5,21 @@ # Runtime requirements, see environment.yaml inst_reqs: List[str] = [ - "geojson-pydantic==0.4.2", + "fastapi-slim==0.111.0", + "geojson-pydantic==1.1.0", "jinja2==3.1.4", "pystac==1.10.1", - "planetary-computer==0.4.9", + "planetary-computer==1.0.0", "rasterio==1.3.10", - "titiler.core==0.10.2", - "titiler.mosaic==0.10.2", + "titiler.core==0.18.3", + "titiler.mosaic==0.18.3", "pillow==10.3.0", - "boto3==1.34.136", - "botocore==1.34.136", - "pydantic==1.10.14", + "pydantic>=2.7,<2.8", "idna>=3.7.0", - "requests==2.32.2", + "requests==2.32.3", # titiler-pgstac "psycopg[binary,pool]", - "titiler.pgstac==0.2.4", + "titiler.pgstac==1.3.0", # colormap dependencies "matplotlib==3.9.0", "orjson==3.10.4", @@ -30,7 +29,7 @@ extra_reqs = { "dev": ["types-requests"], "server": [ - "uvicorn[standard]>=0.17.0,<0.18.0", + "uvicorn[standard]==0.30.1", ], } diff --git a/pctiler/tests/conftest.py b/pctiler/tests/conftest.py index 95d50231..4c75dbe7 100644 --- a/pctiler/tests/conftest.py +++ b/pctiler/tests/conftest.py @@ -1,7 +1,7 @@ from typing import List import pytest -from httpx import AsyncClient +from httpx import ASGITransport, AsyncClient from pytest import Config, Item, Parser @@ -36,6 +36,9 @@ async def client() -> AsyncClient: from pctiler.main import app await connect_to_db(app) - async with AsyncClient(app=app, base_url="http://test") as client: + async with AsyncClient( + transport=ASGITransport(app=app), + base_url="http://test", + ) as client: yield client await close_db_connection(app) diff --git a/pctiler/tests/endpoints/test_colormaps.py b/pctiler/tests/endpoints/test_colormaps.py new file mode 100644 index 00000000..5e6a6a1c --- /dev/null +++ b/pctiler/tests/endpoints/test_colormaps.py @@ -0,0 +1,21 @@ +import pytest +from httpx import AsyncClient + + +@pytest.mark.asyncio +async def test_get_colormap_uppercasing(client: AsyncClient) -> None: + """ + Test mixed casing colormap_name which matches the original key defined + (and used in public render-configs) + """ + params = { + "collection": "naip", + "item": "al_m_3008506_nw_16_060_20191118_20200114", + "assets": "image", + "asset_bidx": "image|1", + "colormap_name": "modis-10A2", + } + response = await client.get( + "/item/tiles/WebMercatorQuad/15/8616/13419@1x", params=params + ) + assert response.status_code == 200 diff --git a/pctiler/tests/endpoints/test_pg_item.py b/pctiler/tests/endpoints/test_pg_item.py index 010d27fe..55994bdd 100644 --- a/pctiler/tests/endpoints/test_pg_item.py +++ b/pctiler/tests/endpoints/test_pg_item.py @@ -28,3 +28,38 @@ async def test_item_preview_xss(client: AsyncClient) -> None: # The XSS should be sanitized out of the response assert response_xss.status_code == 200 assert "//" not in response_xss.text + + +@pytest.mark.asyncio +async def test_item_crop(client: AsyncClient) -> None: + """ + Test the legacy /crop endpoint which is provided by pctiler, backed by the + /feature endpoint function from titiler-core + """ + params = { + "collection": "naip", + "item": "al_m_3008506_nw_16_060_20191118_20200114", + "assets": "image", + "asset_bidx": "image|1", + } + geom = { + "type": "Feature", + "properties": {}, + "geometry": { + "coordinates": [ + [ + [-85.34600303041255, 30.97430719427659], + [-85.34600303041255, 30.9740750264651], + [-85.34403025022365, 30.9740750264651], + [-85.34403025022365, 30.97430719427659], + [-85.34600303041255, 30.97430719427659], + ] + ], + "type": "Polygon", + }, + } + + resp = await client.post("/item/crop.tif", params=params, json=geom) + + assert resp.status_code == 200 + assert resp.headers["Content-Type"] == "image/tiff; application=geotiff" diff --git a/pctiler/tests/endpoints/test_pg_mosaic.py b/pctiler/tests/endpoints/test_pg_mosaic.py index 5f5a7e09..e9b1e9c1 100644 --- a/pctiler/tests/endpoints/test_pg_mosaic.py +++ b/pctiler/tests/endpoints/test_pg_mosaic.py @@ -11,3 +11,22 @@ async def test_get(client: AsyncClient) -> None: info_dict = response.json() mosaic_info = MosaicInfo(**info_dict) assert mosaic_info.default_location.zoom == 13 + + +@pytest.mark.asyncio +async def test_register(client: AsyncClient) -> None: + cql = { + "filter-lang": "cql2-json", + "filter": { + "op": "and", + "args": [{"op": "=", "args": [{"property": "collection"}, "naip"]}], + }, + } + expected_content_hash = "8b989f86a149628eabfde894fb965982" + response = await client.post("/mosaic/register", json=cql) + assert response.status_code == 200 + resp = response.json() + + # Test that `searchid` which has been removed in titiler remains in pctiler, + # and that the search hash remains consistent + assert resp["searchid"] == expected_content_hash diff --git a/requirements-dev.txt b/requirements-dev.txt index fd0c6386..d2af00a2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,7 @@ black==24.4.2 flake8==3.8.4 isort==5.9.2 -mypy==1.8.0 +mypy==1.10.0 openapi-spec-validator==0.3.0 pytest==7.* pytest-asyncio==0.18.*