-
Notifications
You must be signed in to change notification settings - Fork 16
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
196 additions
and
4 deletions.
There are no files selected for viewing
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
import asyncio | ||
from unittest.mock import AsyncMock | ||
from metrics_tools.compute.types import ExportReference, ExportType | ||
import pytest | ||
|
||
from metrics_tools.compute.cache import CacheExportManager, FakeExportAdapter | ||
|
||
|
||
@pytest.mark.asyncio | ||
async def test_cache_export_manager(): | ||
adapter_mock = AsyncMock(FakeExportAdapter) | ||
adapter_mock.export_table.return_value = ExportReference( | ||
table="test", | ||
type=ExportType.GCS, | ||
payload={}, | ||
) | ||
cache = await CacheExportManager.setup(adapter_mock) | ||
|
||
export_table_0 = await asyncio.wait_for( | ||
cache.resolve_export_references(["table1", "table2"]), timeout=1 | ||
) | ||
|
||
assert export_table_0.keys() == {"table1", "table2"} | ||
|
||
# Attempt to export tables again but this should be mostly cache hits except | ||
# for table3 | ||
export_table_1 = await asyncio.wait_for( | ||
cache.resolve_export_references(["table1", "table2", "table1", "table3"]), | ||
timeout=1, | ||
) | ||
assert export_table_1.keys() == {"table1", "table2", "table3"} | ||
|
||
assert adapter_mock.export_table.call_count == 3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
import asyncio | ||
|
||
from dask.distributed import Client | ||
import pytest | ||
|
||
from metrics_tools.compute.cluster import ( | ||
ClusterManager, | ||
ClusterProxy, | ||
ClusterFactory, | ||
ClusterStatus, | ||
) | ||
|
||
|
||
class FakeClient(Client): | ||
def __init__(self): | ||
pass | ||
|
||
def close(self, *args, **kwargs): | ||
pass | ||
|
||
def register_worker_plugin(self, *args, **kwargs): | ||
pass | ||
|
||
|
||
class FakeClusterProxy(ClusterProxy): | ||
def __init__(self, min_size: int, max_size: int): | ||
self.min_size = min_size | ||
self.max_size = max_size | ||
|
||
async def client(self) -> Client: | ||
return await FakeClient() | ||
|
||
async def status(self): | ||
return ClusterStatus( | ||
status="running", | ||
is_ready=True, | ||
dashboard_url="", | ||
workers=1, | ||
) | ||
|
||
async def stop(self): | ||
return | ||
|
||
@property | ||
def dashboard_link(self): | ||
return "http://fake-dashboard.com" | ||
|
||
@property | ||
def workers(self): | ||
return 1 | ||
|
||
|
||
class FakeClusterFactory(ClusterFactory): | ||
async def create_cluster(self, min_size: int, max_size: int): | ||
return FakeClusterProxy(min_size, max_size) | ||
|
||
|
||
@pytest.mark.asyncio | ||
async def test_cluster_manager_reports_ready(): | ||
cluster_manager = ClusterManager.with_dummy_metrics_plugin(FakeClusterFactory()) | ||
|
||
ready_future = cluster_manager.wait_for_ready() | ||
|
||
await cluster_manager.start_cluster(1, 1) | ||
|
||
try: | ||
await asyncio.wait_for(ready_future, timeout=1) | ||
except asyncio.TimeoutError: | ||
pytest.fail("Cluster never reported ready") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
from metrics_tools.compute.types import ( | ||
ClusterStartRequest, | ||
ExportReference, | ||
ExportType, | ||
QueryJobStatus, | ||
QueryJobSubmitRequest, | ||
) | ||
from metrics_tools.definition import PeerMetricDependencyRef | ||
import pytest | ||
import asyncio | ||
|
||
from metrics_tools.compute.service import MetricsCalculationService | ||
from metrics_tools.compute.cluster import ClusterManager, LocalClusterFactory | ||
from metrics_tools.compute.cache import CacheExportManager, FakeExportAdapter | ||
from datetime import datetime | ||
|
||
|
||
@pytest.mark.asyncio | ||
async def test_metrics_calculation_service(): | ||
service = MetricsCalculationService.setup( | ||
"someid", | ||
"bucket", | ||
"result_path_prefix", | ||
ClusterManager.with_dummy_metrics_plugin(LocalClusterFactory()), | ||
await CacheExportManager.setup(FakeExportAdapter()), | ||
) | ||
await service.start_cluster(ClusterStartRequest(min_size=1, max_size=1)) | ||
await service.add_existing_exported_table_references( | ||
{ | ||
"source.table123": ExportReference( | ||
table="export_table123", | ||
type=ExportType.GCS, | ||
payload={"gcs_path": "gs://bucket/result_path_prefix/export_table123"}, | ||
), | ||
} | ||
) | ||
response = await service.submit_job( | ||
QueryJobSubmitRequest( | ||
query_str="SELECT * FROM ref.table123", | ||
start=datetime(2021, 1, 1), | ||
end=datetime(2021, 1, 3), | ||
dialect="duckdb", | ||
batch_size=1, | ||
columns=[("col1", "int"), ("col2", "string")], | ||
ref=PeerMetricDependencyRef( | ||
name="test", | ||
entity_type="artifact", | ||
window=30, | ||
unit="day", | ||
), | ||
locals={}, | ||
dependent_tables_map={"source.table123": "source.table123"}, | ||
) | ||
) | ||
|
||
async def wait_for_job_to_complete(): | ||
status = await service.get_job_status(response.job_id) | ||
while status.status in [QueryJobStatus.PENDING, QueryJobStatus.RUNNING]: | ||
status = await service.get_job_status(response.job_id) | ||
await asyncio.sleep(1) | ||
|
||
await asyncio.wait_for(asyncio.create_task(wait_for_job_to_complete()), timeout=60) | ||
status = await service.get_job_status(response.job_id) | ||
assert status.status == QueryJobStatus.COMPLETED | ||
|
||
await service.close() |