-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* add test write property json * test read and copy properties file * test validation functions * add sonar cloud * check spatial temporal bounds * add tests for utils * add tests for canopy dataset * add more tests to canopy dataset * add badgets to readme * add tests for converter * add tests for regrid * test xesmf availability * exception for pytest with(out) xesmf * make linter happy * add tests for xesmf regrid * further improve coverage for converter * further improve coverage regrid * Apply suggestions from code review Co-authored-by: SarahAlidoost <[email protected]> * fix temp dir for windows by using mktemp * address comments from Sarah --------- Co-authored-by: SarahAlidoost <[email protected]>
- Loading branch information
1 parent
54439a0
commit d673d17
Showing
17 changed files
with
746 additions
and
8 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
name: sonarcloud | ||
|
||
on: | ||
push: | ||
branches: | ||
- main | ||
pull_request: | ||
types: [opened, synchronize, reopened] | ||
branches: | ||
- main | ||
|
||
jobs: | ||
|
||
sonarcloud: | ||
name: SonarCloud | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
with: | ||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis | ||
- name: Set up Python | ||
uses: actions/setup-python@v3 | ||
with: | ||
python-version: '3.10' | ||
- name: Python info | ||
shell: bash -l {0} | ||
run: | | ||
which python3 | ||
python3 --version | ||
- name: Install dependencies | ||
run: python3 -m pip install hatch --upgrade | ||
- name: Run unit tests with coverage | ||
run: hatch run coverage | ||
- name: Correct coverage paths | ||
run: sed -i "s+$PWD/++g" coverage.xml | ||
- name: SonarCloud Scan | ||
uses: SonarSource/sonarcloud-github-action@master | ||
env: | ||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN }} # Needed to get PR information, if any | ||
SONAR_TOKEN: ${{secrets.SONAR_TOKEN }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
sonar.organization=ecoextreml | ||
sonar.projectKey=EcoExtreML_zampy | ||
sonar.host.url=https://sonarcloud.io | ||
sonar.sources=src/zampy/ | ||
sonar.tests=tests/ | ||
sonar.links.homepage=https://github.com/EcoExtreML/zampy | ||
sonar.links.scm=https://github.com/EcoExtreML/zampy | ||
sonar.links.issue=https://github.com/EcoExtreML/zampy/issues | ||
sonar.links.ci=https://github.com/EcoExtreML/zampy/actions | ||
sonar.python.coverage.reportPaths=coverage.xml | ||
sonar.python.xunit.reportPath=xunit-result.xml | ||
sonar.python.pylint.reportPaths=pylint-report.txt | ||
sonar.python.version=3.8, 3.9, 3.10 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,103 @@ | ||
"""Unit test for converter.""" | ||
|
||
from pathlib import Path | ||
import numpy as np | ||
import pytest | ||
import xarray as xr | ||
from test_datasets import data_folder | ||
from zampy.datasets import EthCanopyHeight | ||
from zampy.datasets import converter | ||
from zampy.datasets.eth_canopy_height import parse_tiff_file | ||
|
||
|
||
path_dummy_data = data_folder / "eth-canopy-height" | ||
|
||
# ruff: noqa: B018 | ||
|
||
|
||
def test_check_convention_not_support(): | ||
convention = "fake_convention" | ||
with pytest.raises(ValueError, match="not supported"): | ||
converter.check_convention(convention) | ||
|
||
|
||
def test_check_convention_not_exist(): | ||
convention = Path("fake_path") | ||
with pytest.raises(FileNotFoundError, match="could not be found"): | ||
converter.check_convention(convention) | ||
|
||
|
||
def test_convert_var(): | ||
"""Test _convert_var function.""" | ||
ds = parse_tiff_file( | ||
path_dummy_data / "ETH_GlobalCanopyHeight_10m_2020_N51E003_Map.tif", | ||
) | ||
ds_convert = converter._convert_var(ds, "height_of_vegetation", "decimeter") | ||
|
||
assert np.allclose( | ||
ds_convert["height_of_vegetation"].values, | ||
ds["height_of_vegetation"].values * 10.0, | ||
equal_nan=True, | ||
) | ||
|
||
|
||
def test_convert_var_name(): | ||
"""Test convert function. | ||
In this test, no unit-conversion is performed. Only the variable name is updated. | ||
""" | ||
ds = parse_tiff_file( | ||
path_dummy_data / "ETH_GlobalCanopyHeight_10m_2020_N51E003_Map.tif", | ||
) | ||
ds_convert = converter.convert( | ||
data=ds, dataset=EthCanopyHeight(), convention="ALMA" | ||
) | ||
|
||
assert list(ds_convert.data_vars)[0] == "Hveg" | ||
|
||
|
||
def test_convert_unit(): | ||
"""Test convert function. | ||
In this test, unit conversion is performed. | ||
""" | ||
ds = parse_tiff_file( | ||
path_dummy_data / "ETH_GlobalCanopyHeight_10m_2020_N51E003_Map.tif", | ||
) | ||
ds["height_of_vegetation"].attrs["units"] = "decimeter" | ||
ds_convert = converter.convert( | ||
data=ds, dataset=EthCanopyHeight(), convention="ALMA" | ||
) | ||
|
||
assert np.allclose( | ||
ds_convert["Hveg"].values, | ||
ds["height_of_vegetation"].values / 10.0, | ||
equal_nan=True, | ||
) | ||
|
||
|
||
def test_convert_no_conversion(): | ||
"""Test convert function. | ||
In this test, no conversion is performed. The input will be returned without change. | ||
""" | ||
dummy_ds = xr.Dataset( | ||
data_vars=dict( | ||
temperature=( | ||
["latitude", "longitude"], | ||
np.random.randn(1, 2), | ||
{"units": "Celsius"}, | ||
), | ||
), | ||
coords=dict( | ||
lon=(["longitude"], [110, 111]), | ||
lat=(["latitude"], [20]), | ||
), | ||
attrs=dict(units="Weather dataset."), | ||
) | ||
|
||
ds_convert = converter.convert( | ||
data=dummy_ds, dataset=EthCanopyHeight(), convention="ALMA" | ||
) | ||
|
||
assert list(ds_convert.data_vars)[0] == "temperature" |
Binary file added
BIN
+226 KB
tests/test_data/eth-canopy-height/ETH_GlobalCanopyHeight_10m_2020_N51E003_Map.tif
Binary file not shown.
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
"""Unit test for dataset protocol.""" | ||
|
||
import json | ||
from pathlib import Path | ||
from tempfile import TemporaryDirectory | ||
import numpy as np | ||
import pytest | ||
from zampy.datasets import dataset_protocol | ||
from zampy.datasets.dataset_protocol import SpatialBounds | ||
from zampy.datasets.dataset_protocol import TimeBounds | ||
|
||
|
||
def dummy_property_file(dataset_folder): | ||
"""Write a dummy property file for testing.""" | ||
times = TimeBounds(np.datetime64("2020-01-01"), np.datetime64("2020-12-31")) | ||
bbox = SpatialBounds(54, 6, 51, 3) | ||
variables = ["Hveg", "SWnet"] | ||
|
||
dataset_protocol.write_properties_file( | ||
dataset_folder=dataset_folder, | ||
spatial_bounds=bbox, | ||
time_bounds=times, | ||
variable_names=variables, | ||
) | ||
|
||
|
||
def test_write_properties(): | ||
"""Test write properties function.""" | ||
with TemporaryDirectory() as temp_dir: | ||
dataset_folder = Path(temp_dir) | ||
dummy_property_file(dataset_folder) | ||
|
||
json_file_path = dataset_folder / "properties.json" | ||
with json_file_path.open(mode="r", encoding="utf-8") as file: | ||
properties = json.load(file) | ||
|
||
# Verify the written data | ||
assert properties["start_time"] == "2020-01-01" | ||
assert properties["end_time"] == "2020-12-31" | ||
assert properties["north"] == 54 | ||
assert properties["east"] == 6 | ||
assert properties["south"] == 51 | ||
assert properties["west"] == 3 | ||
assert properties["variable_names"] == ["Hveg", "SWnet"] | ||
|
||
|
||
def test_read_properties(): | ||
"""Test read properties function.""" | ||
with TemporaryDirectory() as temp_dir: | ||
dataset_folder = Path(temp_dir) | ||
dummy_property_file(dataset_folder) | ||
|
||
( | ||
spatial_bounds, | ||
time_bounds, | ||
variable_names, | ||
) = dataset_protocol.read_properties_file(dataset_folder) | ||
|
||
# Verify the returned values | ||
assert spatial_bounds.north == 54 | ||
assert spatial_bounds.east == 6 | ||
assert spatial_bounds.south == 51 | ||
assert spatial_bounds.west == 3 | ||
assert time_bounds.start == "2020-01-01" | ||
assert time_bounds.end == "2020-12-31" | ||
assert variable_names == ["Hveg", "SWnet"] | ||
|
||
|
||
def test_copy_properties_file(): | ||
"""Test copy properties file function.""" | ||
# Create temporary directories | ||
with TemporaryDirectory() as temp_dir1, TemporaryDirectory() as temp_dir2: | ||
source_folder = Path(temp_dir1) | ||
target_folder = Path(temp_dir2) | ||
|
||
# Create a properties.json file in the source folder | ||
dummy_property_file(source_folder) | ||
|
||
# Call the function | ||
dataset_protocol.copy_properties_file(source_folder, target_folder) | ||
|
||
# Verify that the file has been copied | ||
target_file_path = target_folder / "properties.json" | ||
assert target_file_path.exists() | ||
|
||
|
||
def test_invalid_spatial_bounds_north_south(): | ||
with pytest.raises(ValueError, match="greater than northern bound"): | ||
SpatialBounds(51, 6, 54, 3) | ||
|
||
|
||
def test_invalid_spatial_bounds_east_west(): | ||
with pytest.raises(ValueError, match="greater than eastern bound"): | ||
SpatialBounds(54, 6, 51, 20) | ||
|
||
|
||
def test_invalid_time_bounds(): | ||
with pytest.raises(ValueError): | ||
TimeBounds(np.datetime64("2021-01-01"), np.datetime64("2020-12-31")) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
"""This module contains all tests for datasets included in zampy.""" | ||
from pathlib import Path | ||
|
||
|
||
test_folder = Path(__file__).resolve().parents[1] | ||
data_folder = test_folder / "test_data" |
Oops, something went wrong.