diff --git a/examples/tech_meetings/20241213_run_damages_losses_without_files.ipynb b/examples/tech_meetings/20241213_run_damages_losses_without_files.ipynb
new file mode 100644
index 000000000..41167fac9
--- /dev/null
+++ b/examples/tech_meetings/20241213_run_damages_losses_without_files.ipynb
@@ -0,0 +1,274 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Run analysis without input files\n",
+ "In this example it is shown how Damages and Losses analyses can be run without input files, like resilience and damage curves.\n",
+ "This will make running large numbers of analyses for e.g. uncertainty much more convenient, because instead of preparing input files to the analyses objects can be passed to the analyses.\n",
+ "Related to https://github.com/Deltares/ra2ce/issues/654."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Imports\n",
+ "\n",
+ "from pathlib import Path\n",
+ "from ra2ce.ra2ce_handler import Ra2ceHandler\n",
+ "from ra2ce.network.network_config_data.enums.aggregate_wl_enum import AggregateWlEnum\n",
+ "from ra2ce.network.network_config_data.enums.source_enum import SourceEnum\n",
+ "from ra2ce.analysis.analysis_config_data.analysis_config_data import (\n",
+ " AnalysisConfigData,\n",
+ " AnalysisSectionDamages,\n",
+ " AnalysisSectionLosses,\n",
+ ")\n",
+ "from ra2ce.analysis.analysis_config_data.enums.analysis_damages_enum import (\n",
+ " AnalysisDamagesEnum,\n",
+ ")\n",
+ "from ra2ce.analysis.analysis_config_data.enums.analysis_losses_enum import (\n",
+ " AnalysisLossesEnum,\n",
+ ")\n",
+ "from ra2ce.analysis.analysis_config_data.enums.damage_curve_enum import DamageCurveEnum\n",
+ "from ra2ce.analysis.analysis_config_data.enums.event_type_enum import EventTypeEnum\n",
+ "from ra2ce.analysis.analysis_config_data.enums.traffic_period_enum import (\n",
+ " TrafficPeriodEnum,\n",
+ ")\n",
+ "from ra2ce.analysis.analysis_config_data.enums.trip_purpose_enum import TripPurposeEnum\n",
+ "from ra2ce.analysis.analysis_config_data.enums.weighing_enum import WeighingEnum\n",
+ "from ra2ce.network.network_config_data.enums.aggregate_wl_enum import AggregateWlEnum\n",
+ "from ra2ce.network.network_config_data.network_config_data import (\n",
+ " HazardSection,\n",
+ " NetworkConfigData,\n",
+ " NetworkSection,\n",
+ ")\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "root_dir = Path.cwd().parent.joinpath(\"data\", \"adaptation\")\n",
+ "\n",
+ "static_path = root_dir.joinpath(\"static\")\n",
+ "hazard_path =static_path.joinpath(\"hazard\")\n",
+ "network_path = static_path.joinpath(\"network\")\n",
+ "output_path=root_dir.joinpath(\"output\")\n",
+ "\n",
+ "input_path = root_dir.joinpath(\"input\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Configure the network"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "_network_section = NetworkSection(\n",
+ " source= SourceEnum.SHAPEFILE, \n",
+ " primary_file = [network_path.joinpath(\"network.shp\")], \n",
+ " file_id = \"ID\",\n",
+ " link_type_column=\"highway\",\n",
+ " save_gpkg=True\n",
+ ")\n",
+ "\n",
+ "_hazard = HazardSection(\n",
+ " hazard_map=[Path(file) for file in hazard_path.glob(\"*.tif\")],\n",
+ " hazard_field_name= [\"waterdepth\"],\n",
+ " aggregate_wl = AggregateWlEnum.MEAN,\n",
+ " hazard_crs = \"EPSG:4326\",\n",
+ ")\n",
+ "\n",
+ "_network_config_data = NetworkConfigData(\n",
+ " root_path=root_dir,\n",
+ " static_path=static_path,\n",
+ " output_path=output_path,\n",
+ " network=_network_section,\n",
+ " hazard=_hazard\n",
+ " )"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Configuring the analyses\n",
+ "First the analysis run needs to be configured in the standard way, except no paths to input files are provided."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "\n",
+ "_damages_section = AnalysisSectionDamages(\n",
+ " analysis=AnalysisDamagesEnum.DAMAGES,\n",
+ " event_type=EventTypeEnum.EVENT,\n",
+ " damage_curve=DamageCurveEnum.MAN,\n",
+ " save_gpkg=True,\n",
+ " save_csv=True,\n",
+ ")\n",
+ "\n",
+ "_multi_link_losses_section = AnalysisSectionLosses(\n",
+ " analysis=AnalysisLossesEnum.MULTI_LINK_LOSSES,\n",
+ " event_type=EventTypeEnum.EVENT,\n",
+ " weighing=WeighingEnum.TIME,\n",
+ " threshold=0,\n",
+ " production_loss_per_capita_per_hour=42,\n",
+ " hours_per_traffic_period=8,\n",
+ " traffic_period=TrafficPeriodEnum.DAY,\n",
+ " trip_purposes=[\n",
+ " TripPurposeEnum.BUSINESS,\n",
+ " TripPurposeEnum.COMMUTE,\n",
+ " TripPurposeEnum.FREIGHT,\n",
+ " TripPurposeEnum.OTHER,\n",
+ " ],\n",
+ " resilience_curves_file=None,\n",
+ " traffic_intensities_file=None,\n",
+ " values_of_time_file=None,\n",
+ " save_gpkg=True,\n",
+ " save_csv=True,\n",
+ ")\n",
+ "\n",
+ "_analysis_config_data = AnalysisConfigData(\n",
+ " root_path=root_dir,\n",
+ " input_path=input_path,\n",
+ " static_path=static_path,\n",
+ " output_path=output_path,\n",
+ " analyses=[\n",
+ " _damages_section,\n",
+ " _multi_link_losses_section,\n",
+ " ],\n",
+ " aggregate_wl=AggregateWlEnum.MEAN,\n",
+ ")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Creating the handler"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "handler = Ra2ceHandler.from_config(_network_config_data, _analysis_config_data)\n",
+ "\n",
+ "handler.configure()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Defining the analysis inputs\n",
+ "In this example the data is read from files outside the analysis, but this might as well be replaced by creating/generating them by code."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from ra2ce.analysis.damages.damage_functions.manual_damage_functions_reader import ManualDamageFunctionsReader\n",
+ "from ra2ce.analysis.losses.resilience_curves.resilience_curves_reader import ResilienceCurvesReader\n",
+ "from ra2ce.analysis.losses.time_values.time_values_reader import TimeValuesReader\n",
+ "from ra2ce.analysis.losses.traffic_intensities.traffic_intensities_reader import TrafficIntensitiesReader\n",
+ "\n",
+ "# Damages input data\n",
+ "_manual_damage_functions = ManualDamageFunctionsReader().read(input_path.joinpath(\"damage_functions\"))\n",
+ "\n",
+ "# Losses input data\n",
+ "_resilience_curves = ResilienceCurvesReader().read(input_path.joinpath(\"resilience_curve.csv\"))\n",
+ "_values_of_time = TimeValuesReader().read(input_path.joinpath(\"values_of_time.csv\"))\n",
+ "_traffic_intensities = TrafficIntensitiesReader(_analysis_config_data.network.file_id).read(input_path.joinpath(\"traffic_intensities.csv\"))"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Adding the analysis inputs"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "\n",
+ "from ra2ce.analysis.damages.damages import Damages\n",
+ "from ra2ce.analysis.losses.multi_link_losses import MultiLinkLosses\n",
+ "\n",
+ "\n",
+ "_damages = handler.analysis_collection.get_analysis(AnalysisDamagesEnum.DAMAGES)\n",
+ "assert isinstance(_damages, Damages)\n",
+ "_damages.manual_damage_functions = _manual_damage_functions\n",
+ "\n",
+ "_losses = handler.analysis_collection.get_analysis(AnalysisLossesEnum.MULTI_LINK_LOSSES)\n",
+ "assert isinstance(_losses, MultiLinkLosses)\n",
+ "_losses.resilience_curves = _resilience_curves\n",
+ "_losses.values_of_time = _values_of_time\n",
+ "_losses.intensities = _traffic_intensities"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Running the analysis"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "handler.run_analysis()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": ".env",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.11.10"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/ra2ce/analysis/analysis_collection.py b/ra2ce/analysis/analysis_collection.py
index 3af1f3c6c..aca27624b 100644
--- a/ra2ce/analysis/analysis_collection.py
+++ b/ra2ce/analysis/analysis_collection.py
@@ -24,6 +24,13 @@
from dataclasses import dataclass, field
from ra2ce.analysis.adaptation.adaptation import Adaptation
+from ra2ce.analysis.analysis_config_data.enums.analysis_damages_enum import (
+ AnalysisDamagesEnum,
+)
+from ra2ce.analysis.analysis_config_data.enums.analysis_enum import AnalysisEnum
+from ra2ce.analysis.analysis_config_data.enums.analysis_losses_enum import (
+ AnalysisLossesEnum,
+)
from ra2ce.analysis.analysis_config_wrapper import AnalysisConfigWrapper
from ra2ce.analysis.analysis_factory import AnalysisFactory
from ra2ce.analysis.damages.analysis_damages_protocol import AnalysisDamagesProtocol
@@ -47,6 +54,8 @@ def from_config(cls, analysis_config: AnalysisConfigWrapper) -> AnalysisCollecti
Returns:
AnalysisCollection: Collection of analyses to be executed.
"""
+ if not analysis_config:
+ return None
return cls(
damages_analyses=[
AnalysisFactory.get_damages_analysis(analysis, analysis_config)
@@ -60,3 +69,14 @@ def from_config(cls, analysis_config: AnalysisConfigWrapper) -> AnalysisCollecti
analysis_config.config_data.adaptation, analysis_config
),
)
+
+ def get_analysis(
+ self, analysis_type: AnalysisEnum | AnalysisDamagesEnum | AnalysisLossesEnum
+ ) -> AnalysisLossesProtocol | AnalysisDamagesProtocol | Adaptation:
+ if analysis_type == AnalysisEnum.ADAPTATION:
+ return self.adaptation_analysis
+ if isinstance(analysis_type, AnalysisDamagesEnum):
+ return next((x for x in self.damages_analyses), None)
+ if isinstance(analysis_type, AnalysisLossesEnum):
+ return next((x for x in self.losses_analyses), None)
+ raise ValueError(f"Analysis type {analysis_type} not found in the collection.")
diff --git a/ra2ce/analysis/losses/losses_base.py b/ra2ce/analysis/losses/losses_base.py
index ecbd3be27..03b5f36e7 100644
--- a/ra2ce/analysis/losses/losses_base.py
+++ b/ra2ce/analysis/losses/losses_base.py
@@ -101,8 +101,9 @@ def __init__(
self.analysis.production_loss_per_capita_per_hour
)
- self._check_validity_analysis_files()
- self.intensities = TrafficIntensitiesReader([self.link_id]).read(
+ # TODO: move this to the execute?
+ # self._check_validity_analysis_files()
+ self.intensities = TrafficIntensitiesReader(self.link_id).read(
self.analysis.traffic_intensities_file
)
self.resilience_curves = ResilienceCurvesReader().read(
diff --git a/ra2ce/analysis/losses/resilience_curves/resilience_curves_reader.py b/ra2ce/analysis/losses/resilience_curves/resilience_curves_reader.py
index 56054cba3..0ac821c03 100644
--- a/ra2ce/analysis/losses/resilience_curves/resilience_curves_reader.py
+++ b/ra2ce/analysis/losses/resilience_curves/resilience_curves_reader.py
@@ -28,20 +28,17 @@
from ra2ce.network.network_config_data.enums.road_type_enum import RoadTypeEnum
-@dataclass
class ResilienceCurvesReader(LossesInputDataReaderBase):
"""
Class to read the resilience curves from a csv file.
"""
object_type: type = ResilienceCurves
- csv_columns: list[str] = field(
- default_factory=lambda: [
- "link_type_hazard_intensity",
- "duration_steps",
- "functionality_loss_ratio",
- ]
- )
+ csv_columns: list[str] = [
+ "link_type_hazard_intensity",
+ "duration_steps",
+ "functionality_loss_ratio",
+ ]
def _parse_df(self, df: pd.DataFrame) -> ResilienceCurves:
def parse_link_type_hazard_intensity(
diff --git a/ra2ce/analysis/losses/time_values/time_values_reader.py b/ra2ce/analysis/losses/time_values/time_values_reader.py
index 357b315db..6a48147e2 100644
--- a/ra2ce/analysis/losses/time_values/time_values_reader.py
+++ b/ra2ce/analysis/losses/time_values/time_values_reader.py
@@ -14,7 +14,6 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see .
"""
-from dataclasses import dataclass, field
from pathlib import Path
from pandas import DataFrame
@@ -26,16 +25,13 @@
from ra2ce.analysis.losses.time_values.time_values import TimeValues
-@dataclass
class TimeValuesReader(LossesInputDataReaderBase):
"""
Class to read the time values from a csv file.
"""
object_type: type = TimeValues
- csv_columns: list[str] = field(
- default_factory=lambda: ["trip_types", "value_of_time", "occupants"]
- )
+ csv_columns: list[str] = ["trip_types", "value_of_time", "occupants"]
def _parse_df(self, df: DataFrame) -> TimeValues:
_time_values = {
diff --git a/ra2ce/analysis/losses/traffic_intensities/traffic_intensities_reader.py b/ra2ce/analysis/losses/traffic_intensities/traffic_intensities_reader.py
index f607a333b..4a9822277 100644
--- a/ra2ce/analysis/losses/traffic_intensities/traffic_intensities_reader.py
+++ b/ra2ce/analysis/losses/traffic_intensities/traffic_intensities_reader.py
@@ -15,7 +15,6 @@
along with this program. If not, see .
"""
import re
-from dataclasses import dataclass, field
from pathlib import Path
import pandas as pd
@@ -32,16 +31,18 @@
)
-@dataclass
class TrafficIntensitiesReader(LossesInputDataReaderBase):
"""
Class to read the traffic intensities per traffic period from a csv file.
"""
- csv_columns: list[str] = field(default_factory=list)
+ csv_columns: list[str] = []
separator: str = ","
object_type: type = TrafficIntensities
+ def __init__(self, link_id: str) -> None:
+ self.csv_columns = [link_id]
+
def _parse_df(self, df: pd.DataFrame) -> TrafficIntensities:
_traffic_intensities = TrafficIntensities()
for col in df:
diff --git a/ra2ce/ra2ce_handler.py b/ra2ce/ra2ce_handler.py
index 5fba5ae81..eec84b1b6 100644
--- a/ra2ce/ra2ce_handler.py
+++ b/ra2ce/ra2ce_handler.py
@@ -30,6 +30,7 @@
from shapely.errors import ShapelyDeprecationWarning
+from ra2ce.analysis.analysis_collection import AnalysisCollection
from ra2ce.analysis.analysis_config_data.analysis_config_data import AnalysisConfigData
from ra2ce.analysis.analysis_config_wrapper import AnalysisConfigWrapper
from ra2ce.analysis.analysis_result.analysis_result_wrapper import AnalysisResultWrapper
@@ -57,6 +58,7 @@ class Ra2ceHandler:
"""
input_config: ConfigWrapper
+ analysis_collection: AnalysisCollection
def __init__(self, network: Optional[Path], analysis: Optional[Path]) -> None:
if network or analysis:
@@ -158,6 +160,9 @@ def configure(self) -> None:
`NetworkConfigData` so that the analyses can be succesfully run.
"""
self.input_config.configure()
+ self.analysis_collection = AnalysisCollection.from_config(
+ self.input_config.analysis_config
+ )
def run_analysis(self) -> list[AnalysisResultWrapper]:
"""
@@ -178,7 +183,7 @@ def run_analysis(self) -> list[AnalysisResultWrapper]:
logging.error(_error)
raise ValueError(_error)
- return AnalysisRunnerFactory.run(self.input_config)
+ return AnalysisRunnerFactory.run(self.input_config, self.analysis_collection)
@staticmethod
def run_with_ini_files(
diff --git a/ra2ce/runners/analysis_runner_factory.py b/ra2ce/runners/analysis_runner_factory.py
index 43ad4607b..640299732 100644
--- a/ra2ce/runners/analysis_runner_factory.py
+++ b/ra2ce/runners/analysis_runner_factory.py
@@ -66,13 +66,23 @@ def get_supported_runners(ra2ce_input: ConfigWrapper) -> list[Type[AnalysisRunne
return _supported_runners
@staticmethod
- def run(ra2ce_input: ConfigWrapper) -> list[AnalysisResultWrapper]:
+ def run(
+ ra2ce_input: ConfigWrapper,
+ analysis_collection: AnalysisCollection,
+ ) -> list[AnalysisResultWrapper]:
+ """
+ Runs the given analysis collection.
+
+ Args:
+ ra2ce_input (Ra2ceInput): Input representing a set of network and analysis ini configurations.
+ analysis_collection (AnalysisCollection): Collection of analyses to run.
+
+ Returns:
+ list[AnalysisResultWrapper]: List of analysis results.
+ """
_supported_runners = AnalysisRunnerFactory.get_supported_runners(ra2ce_input)
- _analysis_collection = AnalysisCollection.from_config(
- ra2ce_input.analysis_config
- )
_results = []
for _runner_type in _supported_runners:
- _run_results = _runner_type().run(_analysis_collection)
+ _run_results = _runner_type().run(analysis_collection)
_results.extend(_run_results)
return _results
diff --git a/tests/analysis/losses/test_losses.py b/tests/analysis/losses/test_losses.py
index f93d809ef..9d234a9c4 100644
--- a/tests/analysis/losses/test_losses.py
+++ b/tests/analysis/losses/test_losses.py
@@ -64,14 +64,12 @@ def test_initialize_no_data(self, losses_analysis: type[AnalysisLossesProtocol])
)
# 2. Run test.
- with pytest.raises(ValueError) as exc:
- _losses = losses_analysis(_analysis_input, _config)
+ _losses = losses_analysis(_analysis_input, _config)
# 3. Verify final expectations.
- assert (
- str(exc.value)
- == "traffic_intensities_file, resilience_curves_file, and values_of_time_file should be given"
- )
+ assert isinstance(_losses, LossesBase)
+ assert isinstance(_losses, losses_analysis)
+ assert isinstance(_losses, AnalysisBase)
def test_initialize_with_data(
self,
diff --git a/tests/analysis/losses/traffic_intensities/test_traffic_intensities_reader.py b/tests/analysis/losses/traffic_intensities/test_traffic_intensities_reader.py
index 71dbef045..e87e4b1b3 100644
--- a/tests/analysis/losses/traffic_intensities/test_traffic_intensities_reader.py
+++ b/tests/analysis/losses/traffic_intensities/test_traffic_intensities_reader.py
@@ -19,7 +19,7 @@
class TestTimeValuesReader:
def test_initialize(self):
# 1. Run test
- _reader = TrafficIntensitiesReader(["link_id"])
+ _reader = TrafficIntensitiesReader("link_id")
# 2. Verify expections
assert isinstance(_reader, TrafficIntensitiesReader)
@@ -37,7 +37,7 @@ def test_read_traffic_intensities(
assert traffic_intensities_csv.is_file()
# 2. Execute test
- _traffic_intensities = TrafficIntensitiesReader(["link_id"]).read(
+ _traffic_intensities = TrafficIntensitiesReader("link_id").read(
traffic_intensities_csv
)
diff --git a/tests/analysis/test_analysis_collection.py b/tests/analysis/test_analysis_collection.py
index 8194d8adc..536c6c5ad 100644
--- a/tests/analysis/test_analysis_collection.py
+++ b/tests/analysis/test_analysis_collection.py
@@ -96,14 +96,6 @@ def test_create_collection_with_losses_analyses(
self,
analysis: AnalysisLossesEnum,
):
- def verify_expectations(_collection, analysis):
- assert isinstance(_collection, AnalysisCollection)
- assert len(_collection.losses_analyses) == 1
-
- _generated_analysis = _collection.losses_analyses[0]
- assert isinstance(_generated_analysis, AnalysisLossesProtocol)
- assert _generated_analysis.analysis.analysis == analysis
-
# 1. Define test data.
_config = AnalysisConfigWrapper()
_config.config_data.input_path = Path("Any input path")
@@ -112,21 +104,16 @@ def verify_expectations(_collection, analysis):
self.MockAnalysisSectionLosses(analysis=analysis)
)
- if (
- analysis.config_value == "single_link_losses"
- or analysis.config_value == "multi_link_losses"
- ):
- with pytest.raises(ValueError):
- # 2. Run test.
- _collection = AnalysisCollection.from_config(_config)
- # 3. Verify expectations.
- verify_expectations(_collection, analysis)
-
- else:
- # 2. Run test.
- _collection = AnalysisCollection.from_config(_config)
- # 3. Verify expectations.
- verify_expectations(_collection, analysis)
+ # 2. Run test.
+ _collection = AnalysisCollection.from_config(_config)
+
+ # 3. Verify expectations.
+ assert isinstance(_collection, AnalysisCollection)
+ assert len(_collection.losses_analyses) == 1
+
+ _generated_analysis = _collection.losses_analyses[0]
+ assert isinstance(_generated_analysis, AnalysisLossesProtocol)
+ assert _generated_analysis.analysis.analysis == analysis
def test_create_collection_with_adaptation(self):
# 1. Define test data.