Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: 660 create an example to run damages and losses without input files #661

Draft
wants to merge 16 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
274 changes: 274 additions & 0 deletions examples/tech_meetings/20241213_run_damages_losses_without_files.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,274 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Run analysis without input files\n",
"In this example it is shown how Damages and Losses analyses can be run without input files, like resilience and damage curves.\n",
"This will make running large numbers of analyses for e.g. uncertainty much more convenient, because instead of preparing input files to the analyses objects can be passed to the analyses.\n",
"Related to https://github.com/Deltares/ra2ce/issues/654."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Imports\n",
"\n",
"from pathlib import Path\n",
"from ra2ce.ra2ce_handler import Ra2ceHandler\n",
"from ra2ce.network.network_config_data.enums.aggregate_wl_enum import AggregateWlEnum\n",
"from ra2ce.network.network_config_data.enums.source_enum import SourceEnum\n",
"from ra2ce.analysis.analysis_config_data.analysis_config_data import (\n",
" AnalysisConfigData,\n",
" AnalysisSectionDamages,\n",
" AnalysisSectionLosses,\n",
")\n",
"from ra2ce.analysis.analysis_config_data.enums.analysis_damages_enum import (\n",
" AnalysisDamagesEnum,\n",
")\n",
"from ra2ce.analysis.analysis_config_data.enums.analysis_losses_enum import (\n",
" AnalysisLossesEnum,\n",
")\n",
"from ra2ce.analysis.analysis_config_data.enums.damage_curve_enum import DamageCurveEnum\n",
"from ra2ce.analysis.analysis_config_data.enums.event_type_enum import EventTypeEnum\n",
"from ra2ce.analysis.analysis_config_data.enums.traffic_period_enum import (\n",
" TrafficPeriodEnum,\n",
")\n",
"from ra2ce.analysis.analysis_config_data.enums.trip_purpose_enum import TripPurposeEnum\n",
"from ra2ce.analysis.analysis_config_data.enums.weighing_enum import WeighingEnum\n",
"from ra2ce.network.network_config_data.enums.aggregate_wl_enum import AggregateWlEnum\n",
"from ra2ce.network.network_config_data.network_config_data import (\n",
" HazardSection,\n",
" NetworkConfigData,\n",
" NetworkSection,\n",
")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"root_dir = Path.cwd().parent.joinpath(\"data\", \"adaptation\")\n",
"\n",
"static_path = root_dir.joinpath(\"static\")\n",
"hazard_path =static_path.joinpath(\"hazard\")\n",
"network_path = static_path.joinpath(\"network\")\n",
"output_path=root_dir.joinpath(\"output\")\n",
"\n",
"input_path = root_dir.joinpath(\"input\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Configure the network"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"_network_section = NetworkSection(\n",
" source= SourceEnum.SHAPEFILE, \n",
" primary_file = [network_path.joinpath(\"network.shp\")], \n",
" file_id = \"ID\",\n",
" link_type_column=\"highway\",\n",
" save_gpkg=True\n",
")\n",
"\n",
"_hazard = HazardSection(\n",
" hazard_map=[Path(file) for file in hazard_path.glob(\"*.tif\")],\n",
" hazard_field_name= [\"waterdepth\"],\n",
" aggregate_wl = AggregateWlEnum.MEAN,\n",
" hazard_crs = \"EPSG:4326\",\n",
")\n",
"\n",
"_network_config_data = NetworkConfigData(\n",
" root_path=root_dir,\n",
" static_path=static_path,\n",
" output_path=output_path,\n",
" network=_network_section,\n",
" hazard=_hazard\n",
" )"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Configuring the analyses\n",
"First the analysis run needs to be configured in the standard way, except no paths to input files are provided."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"_damages_section = AnalysisSectionDamages(\n",
" analysis=AnalysisDamagesEnum.DAMAGES,\n",
" event_type=EventTypeEnum.EVENT,\n",
" damage_curve=DamageCurveEnum.MAN,\n",
" save_gpkg=True,\n",
" save_csv=True,\n",
")\n",
"\n",
"_multi_link_losses_section = AnalysisSectionLosses(\n",
" analysis=AnalysisLossesEnum.MULTI_LINK_LOSSES,\n",
" event_type=EventTypeEnum.EVENT,\n",
" weighing=WeighingEnum.TIME,\n",
" threshold=0,\n",
" production_loss_per_capita_per_hour=42,\n",
" hours_per_traffic_period=8,\n",
" traffic_period=TrafficPeriodEnum.DAY,\n",
" trip_purposes=[\n",
" TripPurposeEnum.BUSINESS,\n",
" TripPurposeEnum.COMMUTE,\n",
" TripPurposeEnum.FREIGHT,\n",
" TripPurposeEnum.OTHER,\n",
" ],\n",
" resilience_curves_file=None,\n",
" traffic_intensities_file=None,\n",
" values_of_time_file=None,\n",
" save_gpkg=True,\n",
" save_csv=True,\n",
")\n",
"\n",
"_analysis_config_data = AnalysisConfigData(\n",
" root_path=root_dir,\n",
" input_path=input_path,\n",
" static_path=static_path,\n",
" output_path=output_path,\n",
" analyses=[\n",
" _damages_section,\n",
" _multi_link_losses_section,\n",
" ],\n",
" aggregate_wl=AggregateWlEnum.MEAN,\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Creating the handler"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"handler = Ra2ceHandler.from_config(_network_config_data, _analysis_config_data)\n",
"\n",
"handler.configure()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Defining the analysis inputs\n",
"In this example the data is read from files outside the analysis, but this might as well be replaced by creating/generating them by code."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from ra2ce.analysis.damages.damage_functions.manual_damage_functions_reader import ManualDamageFunctionsReader\n",
"from ra2ce.analysis.losses.resilience_curves.resilience_curves_reader import ResilienceCurvesReader\n",
"from ra2ce.analysis.losses.time_values.time_values_reader import TimeValuesReader\n",
"from ra2ce.analysis.losses.traffic_intensities.traffic_intensities_reader import TrafficIntensitiesReader\n",
"\n",
"# Damages input data\n",
"_manual_damage_functions = ManualDamageFunctionsReader().read(input_path.joinpath(\"damage_functions\"))\n",
"\n",
"# Losses input data\n",
"_resilience_curves = ResilienceCurvesReader().read(input_path.joinpath(\"resilience_curve.csv\"))\n",
"_values_of_time = TimeValuesReader().read(input_path.joinpath(\"values_of_time.csv\"))\n",
"_traffic_intensities = TrafficIntensitiesReader(_analysis_config_data.network.file_id).read(input_path.joinpath(\"traffic_intensities.csv\"))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Adding the analysis inputs"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"from ra2ce.analysis.damages.damages import Damages\n",
"from ra2ce.analysis.losses.multi_link_losses import MultiLinkLosses\n",
"\n",
"\n",
"_damages = handler.analysis_collection.get_analysis(AnalysisDamagesEnum.DAMAGES)\n",
"assert isinstance(_damages, Damages)\n",
"_damages.manual_damage_functions = _manual_damage_functions\n",
"\n",
"_losses = handler.analysis_collection.get_analysis(AnalysisLossesEnum.MULTI_LINK_LOSSES)\n",
"assert isinstance(_losses, MultiLinkLosses)\n",
"_losses.resilience_curves = _resilience_curves\n",
"_losses.values_of_time = _values_of_time\n",
"_losses.intensities = _traffic_intensities"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Running the analysis"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"handler.run_analysis()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.10"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
20 changes: 20 additions & 0 deletions ra2ce/analysis/analysis_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@
from dataclasses import dataclass, field

from ra2ce.analysis.adaptation.adaptation import Adaptation
from ra2ce.analysis.analysis_config_data.enums.analysis_damages_enum import (
AnalysisDamagesEnum,
)
from ra2ce.analysis.analysis_config_data.enums.analysis_enum import AnalysisEnum
from ra2ce.analysis.analysis_config_data.enums.analysis_losses_enum import (
AnalysisLossesEnum,
)
from ra2ce.analysis.analysis_config_wrapper import AnalysisConfigWrapper
from ra2ce.analysis.analysis_factory import AnalysisFactory
from ra2ce.analysis.damages.analysis_damages_protocol import AnalysisDamagesProtocol
Expand All @@ -47,6 +54,8 @@ def from_config(cls, analysis_config: AnalysisConfigWrapper) -> AnalysisCollecti
Returns:
AnalysisCollection: Collection of analyses to be executed.
"""
if not analysis_config:
return None
return cls(
damages_analyses=[
AnalysisFactory.get_damages_analysis(analysis, analysis_config)
Expand All @@ -60,3 +69,14 @@ def from_config(cls, analysis_config: AnalysisConfigWrapper) -> AnalysisCollecti
analysis_config.config_data.adaptation, analysis_config
),
)

def get_analysis(
self, analysis_type: AnalysisEnum | AnalysisDamagesEnum | AnalysisLossesEnum
) -> AnalysisLossesProtocol | AnalysisDamagesProtocol | Adaptation:
if analysis_type == AnalysisEnum.ADAPTATION:
return self.adaptation_analysis
if isinstance(analysis_type, AnalysisDamagesEnum):
return next((x for x in self.damages_analyses), None)
if isinstance(analysis_type, AnalysisLossesEnum):
return next((x for x in self.losses_analyses), None)
raise ValueError(f"Analysis type {analysis_type} not found in the collection.")
5 changes: 3 additions & 2 deletions ra2ce/analysis/losses/losses_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,9 @@ def __init__(
self.analysis.production_loss_per_capita_per_hour
)

self._check_validity_analysis_files()
self.intensities = TrafficIntensitiesReader([self.link_id]).read(
# TODO: move this to the execute?
# self._check_validity_analysis_files()
self.intensities = TrafficIntensitiesReader(self.link_id).read(
self.analysis.traffic_intensities_file
)
self.resilience_curves = ResilienceCurvesReader().read(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,20 +28,17 @@
from ra2ce.network.network_config_data.enums.road_type_enum import RoadTypeEnum


@dataclass
class ResilienceCurvesReader(LossesInputDataReaderBase):
"""
Class to read the resilience curves from a csv file.
"""

object_type: type = ResilienceCurves
csv_columns: list[str] = field(
default_factory=lambda: [
"link_type_hazard_intensity",
"duration_steps",
"functionality_loss_ratio",
]
)
csv_columns: list[str] = [
"link_type_hazard_intensity",
"duration_steps",
"functionality_loss_ratio",
]

def _parse_df(self, df: pd.DataFrame) -> ResilienceCurves:
def parse_link_type_hazard_intensity(
Expand Down
6 changes: 1 addition & 5 deletions ra2ce/analysis/losses/time_values/time_values_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from dataclasses import dataclass, field
from pathlib import Path

from pandas import DataFrame
Expand All @@ -26,16 +25,13 @@
from ra2ce.analysis.losses.time_values.time_values import TimeValues


@dataclass
class TimeValuesReader(LossesInputDataReaderBase):
"""
Class to read the time values from a csv file.
"""

object_type: type = TimeValues
csv_columns: list[str] = field(
default_factory=lambda: ["trip_types", "value_of_time", "occupants"]
)
csv_columns: list[str] = ["trip_types", "value_of_time", "occupants"]

def _parse_df(self, df: DataFrame) -> TimeValues:
_time_values = {
Expand Down
Loading