Skip to content

Commit

Permalink
Add tests (#10)
Browse files Browse the repository at this point in the history
* add test write property json

* test read and copy properties file

* test validation functions

* add sonar cloud

* check spatial temporal bounds

* add tests for utils

* add tests for canopy dataset

* add more tests to canopy dataset

* add badgets to readme

* add tests for converter

* add tests for regrid

* test xesmf availability

* exception for pytest with(out) xesmf

* make linter happy

* add tests for xesmf regrid

* further improve coverage for converter

* further improve coverage regrid

* Apply suggestions from code review

Co-authored-by: SarahAlidoost <[email protected]>

* fix temp dir for windows by using mktemp

* address comments from Sarah

---------

Co-authored-by: SarahAlidoost <[email protected]>
  • Loading branch information
geek-yang and SarahAlidoost committed Jul 25, 2023
1 parent 54439a0 commit d673d17
Show file tree
Hide file tree
Showing 17 changed files with 746 additions and 8 deletions.
40 changes: 40 additions & 0 deletions .github/workflows/sonarcloud.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: sonarcloud

on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened]
branches:
- main

jobs:

sonarcloud:
name: SonarCloud
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.10'
- name: Python info
shell: bash -l {0}
run: |
which python3
python3 --version
- name: Install dependencies
run: python3 -m pip install hatch --upgrade
- name: Run unit tests with coverage
run: hatch run coverage
- name: Correct coverage paths
run: sed -i "s+$PWD/++g" coverage.xml
- name: SonarCloud Scan
uses: SonarSource/sonarcloud-github-action@master
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
SONAR_TOKEN: ${{secrets.SONAR_TOKEN }}
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
# zampy
Tool for downloading Land Surface Model input data

[![github license badge](https://img.shields.io/github/license/EcoExtreML/zampy)](https://github.com/EcoExtreML/zampy)
[![build](https://github.com/EcoExtreML/zampy/actions/workflows/build.yml/badge.svg)](https://github.com/EcoExtreML/zampy/actions/workflows/build.yml)
[![workflow scc badge](https://sonarcloud.io/api/project_badges/measure?project=EcoExtreML_zampy&metric=coverage)](https://sonarcloud.io/dashboard?id=EcoExtreML_zampy)


## Tool outline:

Expand Down
13 changes: 13 additions & 0 deletions sonar-project.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
sonar.organization=ecoextreml
sonar.projectKey=EcoExtreML_zampy
sonar.host.url=https://sonarcloud.io
sonar.sources=src/zampy/
sonar.tests=tests/
sonar.links.homepage=https://github.com/EcoExtreML/zampy
sonar.links.scm=https://github.com/EcoExtreML/zampy
sonar.links.issue=https://github.com/EcoExtreML/zampy/issues
sonar.links.ci=https://github.com/EcoExtreML/zampy/actions
sonar.python.coverage.reportPaths=coverage.xml
sonar.python.xunit.reportPath=xunit-result.xml
sonar.python.pylint.reportPaths=pylint-report.txt
sonar.python.version=3.8, 3.9, 3.10
13 changes: 11 additions & 2 deletions src/zampy/datasets/converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,16 @@ def check_convention(convention: Union[str, Path]) -> None:
def convert(
data: xr.Dataset, dataset: Dataset, convention: Union[str, Path]
) -> xr.Dataset:
"""Convert a loaded dataset to the specified convention."""
"""Convert a loaded dataset to the specified convention.
Args:
data: Input xarray data.
dataset: Zampy dataset instance.
convention: Input data exchange convention.
Return:
Input xarray with converted variables following given convention.
"""
converted = False
if isinstance(convention, str):
convention_file = Path(CONVENTIONS[convention]).open(mode="r", encoding="UTF8")
Expand All @@ -50,11 +59,11 @@ def convert(
var_name = convention_dict[var.lower()]["variable"]
var_units = data[var].attrs["units"]
if var_units != convert_units:
converted = True
# lazy dask array
data = _convert_var(data, var, convert_units)
data = data.rename({var: var_name})
print(f"{var} renamed to {var_name}.")
converted = True

else:
print(f"Variable '{var}' is not included in '{convention}' convention.")
Expand Down
4 changes: 2 additions & 2 deletions src/zampy/datasets/dataset_protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,12 @@ def __post_init__(self) -> None:
"""Validate the initialized SpatialBounds class."""
if self.south > self.north:
raise ValueError(
"Value of southern bound is greater than norther bound."
"Value of southern bound is greater than northern bound."
"\nPlease check the spatial bounds input."
)
if self.west > self.east:
raise ValueError(
"Value of western bound is greater than east bound."
"Value of western bound is greater than eastern bound."
"\nPlease check the spatial bounds input."
)

Expand Down
4 changes: 2 additions & 2 deletions src/zampy/datasets/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,10 @@ def compare_variables(
variable_names: User requested variables.
Raises:
ValueError: _description_
InvalidVariableError: If the variables are not available in the dataset
"""
if not all(var in dataset.variable_names for var in variable_names):
raise ValueError(
raise InvalidVariableError(
f"Input variable and/or units does not match the {dataset.name} dataset."
)

Expand Down
103 changes: 103 additions & 0 deletions tests/test_converter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
"""Unit test for converter."""

from pathlib import Path
import numpy as np
import pytest
import xarray as xr
from test_datasets import data_folder
from zampy.datasets import EthCanopyHeight
from zampy.datasets import converter
from zampy.datasets.eth_canopy_height import parse_tiff_file


path_dummy_data = data_folder / "eth-canopy-height"

# ruff: noqa: B018


def test_check_convention_not_support():
convention = "fake_convention"
with pytest.raises(ValueError, match="not supported"):
converter.check_convention(convention)


def test_check_convention_not_exist():
convention = Path("fake_path")
with pytest.raises(FileNotFoundError, match="could not be found"):
converter.check_convention(convention)


def test_convert_var():
"""Test _convert_var function."""
ds = parse_tiff_file(
path_dummy_data / "ETH_GlobalCanopyHeight_10m_2020_N51E003_Map.tif",
)
ds_convert = converter._convert_var(ds, "height_of_vegetation", "decimeter")

assert np.allclose(
ds_convert["height_of_vegetation"].values,
ds["height_of_vegetation"].values * 10.0,
equal_nan=True,
)


def test_convert_var_name():
"""Test convert function.
In this test, no unit-conversion is performed. Only the variable name is updated.
"""
ds = parse_tiff_file(
path_dummy_data / "ETH_GlobalCanopyHeight_10m_2020_N51E003_Map.tif",
)
ds_convert = converter.convert(
data=ds, dataset=EthCanopyHeight(), convention="ALMA"
)

assert list(ds_convert.data_vars)[0] == "Hveg"


def test_convert_unit():
"""Test convert function.
In this test, unit conversion is performed.
"""
ds = parse_tiff_file(
path_dummy_data / "ETH_GlobalCanopyHeight_10m_2020_N51E003_Map.tif",
)
ds["height_of_vegetation"].attrs["units"] = "decimeter"
ds_convert = converter.convert(
data=ds, dataset=EthCanopyHeight(), convention="ALMA"
)

assert np.allclose(
ds_convert["Hveg"].values,
ds["height_of_vegetation"].values / 10.0,
equal_nan=True,
)


def test_convert_no_conversion():
"""Test convert function.
In this test, no conversion is performed. The input will be returned without change.
"""
dummy_ds = xr.Dataset(
data_vars=dict(
temperature=(
["latitude", "longitude"],
np.random.randn(1, 2),
{"units": "Celsius"},
),
),
coords=dict(
lon=(["longitude"], [110, 111]),
lat=(["latitude"], [20]),
),
attrs=dict(units="Weather dataset."),
)

ds_convert = converter.convert(
data=dummy_ds, dataset=EthCanopyHeight(), convention="ALMA"
)

assert list(ds_convert.data_vars)[0] == "temperature"
Binary file not shown.
Empty file.
99 changes: 99 additions & 0 deletions tests/test_dataset_protocol.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
"""Unit test for dataset protocol."""

import json
from pathlib import Path
from tempfile import TemporaryDirectory
import numpy as np
import pytest
from zampy.datasets import dataset_protocol
from zampy.datasets.dataset_protocol import SpatialBounds
from zampy.datasets.dataset_protocol import TimeBounds


def dummy_property_file(dataset_folder):
"""Write a dummy property file for testing."""
times = TimeBounds(np.datetime64("2020-01-01"), np.datetime64("2020-12-31"))
bbox = SpatialBounds(54, 6, 51, 3)
variables = ["Hveg", "SWnet"]

dataset_protocol.write_properties_file(
dataset_folder=dataset_folder,
spatial_bounds=bbox,
time_bounds=times,
variable_names=variables,
)


def test_write_properties():
"""Test write properties function."""
with TemporaryDirectory() as temp_dir:
dataset_folder = Path(temp_dir)
dummy_property_file(dataset_folder)

json_file_path = dataset_folder / "properties.json"
with json_file_path.open(mode="r", encoding="utf-8") as file:
properties = json.load(file)

# Verify the written data
assert properties["start_time"] == "2020-01-01"
assert properties["end_time"] == "2020-12-31"
assert properties["north"] == 54
assert properties["east"] == 6
assert properties["south"] == 51
assert properties["west"] == 3
assert properties["variable_names"] == ["Hveg", "SWnet"]


def test_read_properties():
"""Test read properties function."""
with TemporaryDirectory() as temp_dir:
dataset_folder = Path(temp_dir)
dummy_property_file(dataset_folder)

(
spatial_bounds,
time_bounds,
variable_names,
) = dataset_protocol.read_properties_file(dataset_folder)

# Verify the returned values
assert spatial_bounds.north == 54
assert spatial_bounds.east == 6
assert spatial_bounds.south == 51
assert spatial_bounds.west == 3
assert time_bounds.start == "2020-01-01"
assert time_bounds.end == "2020-12-31"
assert variable_names == ["Hveg", "SWnet"]


def test_copy_properties_file():
"""Test copy properties file function."""
# Create temporary directories
with TemporaryDirectory() as temp_dir1, TemporaryDirectory() as temp_dir2:
source_folder = Path(temp_dir1)
target_folder = Path(temp_dir2)

# Create a properties.json file in the source folder
dummy_property_file(source_folder)

# Call the function
dataset_protocol.copy_properties_file(source_folder, target_folder)

# Verify that the file has been copied
target_file_path = target_folder / "properties.json"
assert target_file_path.exists()


def test_invalid_spatial_bounds_north_south():
with pytest.raises(ValueError, match="greater than northern bound"):
SpatialBounds(51, 6, 54, 3)


def test_invalid_spatial_bounds_east_west():
with pytest.raises(ValueError, match="greater than eastern bound"):
SpatialBounds(54, 6, 51, 20)


def test_invalid_time_bounds():
with pytest.raises(ValueError):
TimeBounds(np.datetime64("2021-01-01"), np.datetime64("2020-12-31"))
6 changes: 6 additions & 0 deletions tests/test_datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
"""This module contains all tests for datasets included in zampy."""
from pathlib import Path


test_folder = Path(__file__).resolve().parents[1]
data_folder = test_folder / "test_data"
Loading

0 comments on commit d673d17

Please sign in to comment.