Skip to content

Commit

Permalink
Merge pull request #249 from Deltares/feature/248-create-objects-for-…
Browse files Browse the repository at this point in the history
…graph-and-files-in-config-wrappers

feature: replace dicts (files, hazard_files, graphs) by dataclasses
  • Loading branch information
ArdtK authored Nov 23, 2023
2 parents 7ccccc7 + 7de4f96 commit 28b304d
Show file tree
Hide file tree
Showing 31 changed files with 817 additions and 331 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,6 @@ class AnalysisConfigData(ConfigDataProtocol):
static_path: Optional[Path] = None
project: ProjectSection = field(default_factory=lambda: ProjectSection())
analyses: list[AnalysisSectionBase] = field(default_factory=list)
files: dict[str, Path] = field(default_factory=dict)
origins_destinations: Optional[OriginsDestinationsSection] = field(
default_factory=lambda: OriginsDestinationsSection()
)
Expand Down
9 changes: 4 additions & 5 deletions ra2ce/analyses/analysis_config_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,25 +22,25 @@
from __future__ import annotations

from pathlib import Path
from typing import Optional

from ra2ce.analyses.analysis_config_data.analysis_config_data import AnalysisConfigData
from ra2ce.analyses.analysis_config_data.analysis_config_data_validator import (
AnalysisConfigDataValidator,
)
from ra2ce.common.configuration.config_wrapper_protocol import ConfigWrapperProtocol
from ra2ce.graph.graph_files.graph_files_collection import GraphFilesCollection
from ra2ce.graph.network_config_wrapper import NetworkConfigWrapper


class AnalysisConfigWrapper(ConfigWrapperProtocol):
ini_file: Path
config_data: AnalysisConfigData
graphs: Optional[dict]
graph_files: GraphFilesCollection

def __init__(self) -> None:
self.ini_file = None
self.config_data = AnalysisConfigData()
self.graphs = None
self.graph_files = GraphFilesCollection()

def initialize_output_dirs(self) -> None:
"""
Expand Down Expand Up @@ -81,13 +81,12 @@ def from_data_with_network(

if not isinstance(network_config, NetworkConfigWrapper):
raise ValueError("No valid network config provided.")
_new_analysis.config_data.files = network_config.files
_new_analysis.config_data.network = network_config.config_data.network
_new_analysis.config_data.origins_destinations = (
network_config.config_data.origins_destinations
)
# Graphs are retrieved from the already configured object
_new_analysis.graphs = network_config.graphs
_new_analysis.graph_files = network_config.graph_files

return _new_analysis

Expand Down
23 changes: 7 additions & 16 deletions ra2ce/analyses/direct/analyses_direct.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
DamageNetworkEvents,
DamageNetworkReturnPeriods,
)
from ra2ce.graph.graph_files.graph_files_collection import GraphFilesCollection


class DirectAnalyses: ### THIS SHOULD ONLY DO COORDINATION
Expand All @@ -50,11 +51,11 @@ class DirectAnalyses: ### THIS SHOULD ONLY DO COORDINATION
"""

config: AnalysisConfigData
graphs: dict
graph_files: GraphFilesCollection

def __init__(self, config: AnalysisConfigData, graphs: dict):
def __init__(self, config: AnalysisConfigData, graph_files: GraphFilesCollection):
self.config = config
self.graphs = graphs
self.graph_files = graph_files

def execute(self):
"""Main Coordinator of all direct damage analysis
Expand Down Expand Up @@ -106,23 +107,14 @@ def road_damage(self, analysis: AnalysisSectionDirect) -> gpd.GeoDataFrame:
### CONTROLLER FOR CALCULATING THE ROAD DAMAGE
Arguments:
*analysis* (dict) : contains part of the settings from the analysis ini
*analysis* (AnalysisSectionDirect) : contains part of the settings from the analysis ini
Returns:
*result_gdf* (GeoDataFrame) : The original hazard dataframe with the result of the damage calculations added
"""
# Open the network with hazard data
# Dirty fix, Todo: figure out why this key does not exist under certaint conditions
if (
"base_network_hazard" not in self.graphs
): # key is missing due to error in handler?
self.graphs["base_network_hazard"] = None

road_gdf = self.graphs["base_network_hazard"]
if self.graphs["base_network_hazard"] is None:
road_gdf = gpd.read_feather(self.config.files["base_network_hazard"])

road_gdf = self.graph_files.base_network_hazard.get_graph()
road_gdf.columns = rename_road_gdf_to_conventions(road_gdf.columns)

# Find the hazard columns; these may be events or return periods
Expand Down Expand Up @@ -186,8 +178,7 @@ def effectiveness_measures(self, analysis: AnalysisSectionDirect):
em = EffectivenessMeasures(self.config, analysis)
effectiveness_dict = em.load_effectiveness_table()

if self.graphs["base_network_hazard"] is None:
gdf_in = gpd.read_feather(self.config.files["base_network_hazard"])
gdf_in = self.graph_files.base_network_hazard.get_graph()

if analysis.create_table is True:
df = em.create_feature_table(
Expand Down
2 changes: 1 addition & 1 deletion ra2ce/analyses/direct/damage/max_damage.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def from_csv(self, path: Path, sep=",") -> None:
"""
self.name = path.stem
self.raw_data = pd.read_csv(path, index_col="Road_type \ lanes", sep=sep)
self.raw_data = pd.read_csv(path, index_col=r"Road_type \ lanes", sep=sep)
self.origin_path = path # to track the original path from which the object was constructed; maybe also date?

###Determine units
Expand Down
108 changes: 53 additions & 55 deletions ra2ce/analyses/indirect/analyses_indirect.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,28 +43,28 @@
from ra2ce.analyses.indirect.traffic_analysis.traffic_analysis_factory import (
TrafficAnalysisFactory,
)
from ra2ce.common.io.readers.graph_pickle_reader import GraphPickleReader
from ra2ce.graph.graph_files.graph_files_collection import GraphFilesCollection
from ra2ce.graph.networks_utils import buffer_geometry, graph_to_gdf, graph_to_gpkg


class IndirectAnalyses:
"""Indirect analyses that can be done with NetworkX graphs.
Attributes:
config: A dictionary with the configuration details on how to create and adjust the network.
graphs: A dictionary with one or multiple NetworkX graphs.
config: An object with the configuration details on how to create and adjust the network.
graph files: An object with one or multiple NetworkX graphs.
"""

config: AnalysisConfigData
graphs: dict
graph_files: GraphFilesCollection
hazard_names_df: pd.DataFrame

_file_name_key = "File name"
_ra2ce_name_key = "RA2CE name"

def __init__(self, config: AnalysisConfigData, graphs: list[Any]):
def __init__(self, config: AnalysisConfigData, graph_files: GraphFilesCollection):
self.config = config
self.graphs = graphs
self.graph_files = graph_files
if self.config.output_path.joinpath("hazard_names.xlsx").is_file():
self.hazard_names_df = pd.read_excel(
self.config.output_path.joinpath("hazard_names.xlsx")
Expand All @@ -76,7 +76,9 @@ def __init__(self, config: AnalysisConfigData, graphs: list[Any]):
self.hazard_names_df = pd.DataFrame(data=None)
self.config.hazard_names = list()

def single_link_redundancy(self, graph, analysis: AnalysisSectionIndirect):
def single_link_redundancy(
self, graph: nx.Graph, analysis: AnalysisSectionIndirect
):
"""This is the function to analyse roads with a single link disruption and an alternative route.
Args:
Expand Down Expand Up @@ -285,7 +287,9 @@ def _single_link_losses_categorized(
axis=1,
)

def multi_link_redundancy(self, graph: dict, analysis: AnalysisSectionIndirect):
def multi_link_redundancy(
self, graph: nx.classes.MultiGraph, analysis: AnalysisSectionIndirect
):
"""Calculates the multi-link redundancy of a NetworkX graph.
The function removes all links of a variable that have a minimum value
Expand Down Expand Up @@ -338,18 +342,28 @@ def multi_link_redundancy(self, graph: dict, analysis: AnalysisSectionIndirect):
u, v, k, edata = edges

if nx.has_path(graph, u, v):
alt_dist = nx.dijkstra_path_length(graph, u, v, weight=analysis.weighing)
alt_dist = nx.dijkstra_path_length(
graph, u, v, weight=analysis.weighing
)
alt_nodes = nx.dijkstra_path(graph, u, v)
connected = 1
else:
alt_dist, alt_nodes, connected = np.NaN, np.NaN, 0

data = {"u": [u], "v": [v], "alt_dist": [alt_dist], "alt_nodes": [alt_nodes], "connected": [connected]}
data = {
"u": [u],
"v": [v],
"alt_dist": [alt_dist],
"alt_nodes": [alt_nodes],
"connected": [connected],
}

if "rfid" in gdf:
data["rfid"] = [str(edata["rfid"])]

df_calculated = pd.concat([df_calculated, pd.DataFrame(data)], ignore_index=True)
df_calculated = pd.concat(
[df_calculated, pd.DataFrame(data)], ignore_index=True
)
# Merge the dataframes
if "rfid" in gdf:
gdf = gdf.merge(df_calculated, how="left", on=["u", "v", "rfid"])
Expand Down Expand Up @@ -512,7 +526,7 @@ def multi_link_losses(self, gdf, analysis: AnalysisSectionIndirect):

@staticmethod
def extract_od_nodes_from_graph(
graph: nx.classes.MultiGraph,
graph: nx.classes.MultiGraph,
) -> list[tuple[str, str]]:
"""
Extracts all Origin - Destination nodes from the graph, prevents from entries
Expand All @@ -533,7 +547,7 @@ def extract_od_nodes_from_graph(
return _od_nodes

def _get_origin_destination_pairs(
self, graph: nx.classes.MultiGraph
self, graph: nx.classes.MultiGraph
) -> list[tuple[int, str], tuple[int, str]]:
od_path = self.config.static_path.joinpath(
"output_graph", "origin_destination_table.feather"
Expand Down Expand Up @@ -585,7 +599,9 @@ def optimal_route_od_link(
equity,
).optimal_route_od_link()

def multi_link_origin_destination(self, graph, analysis: AnalysisSectionIndirect):
def multi_link_origin_destination(
self, graph: nx.classes.MultiGraph, analysis: AnalysisSectionIndirect
):
"""Calculates the connectivity between origins and destinations"""
od_nodes = self._get_origin_destination_pairs(graph)

Expand Down Expand Up @@ -839,7 +855,7 @@ def multi_link_isolated_locations(
Args:
graph (nx.Graph): The original graph representing the network, with additional hazard information.
analysis (dict): The configuration of the analysis, which contains the threshold for considering a hazard impact significant.
analysis (AnalysisSectionIndirect): The configuration of the analysis, which contains the threshold for considering a hazard impact significant.
crs (int, optional): The coordinate reference system used for geographical data. Defaults to 4326 (WGS84).
Returns:
Expand Down Expand Up @@ -872,8 +888,11 @@ def multi_link_isolated_locations(
edges_hz_direct = [
e
for e in edges
if (e[-1][hazard_name] > float(analysis.threshold))
& ("bridge" not in e[-1])
if e[-1][hazard_name]
and (
(e[-1][hazard_name] > float(analysis.threshold))
& ("bridge" not in e[-1])
)
]
edges_hz_indirect = [e for e in edges if e not in edges_hz_direct]

Expand Down Expand Up @@ -1022,7 +1041,6 @@ def _summarize_locations(

def execute(self):
"""Executes the indirect analysis."""
_pickle_reader = GraphPickleReader()
for analysis in self.config.indirect:
logging.info(
f"----------------------------- Started analyzing '{analysis.name}' -----------------------------"
Expand Down Expand Up @@ -1056,15 +1074,14 @@ def _save_gpkg_analysis(
if analysis.weighing == "distance":
# The name is different in the graph.
analysis.weighing = "length"
_config_files = self.config.files
if analysis.analysis == "single_link_redundancy":
g = _pickle_reader.read(_config_files["base_graph"])
g = self.graph_files.base_graph.get_graph()
gdf = self.single_link_redundancy(g, analysis)
elif analysis.analysis == "multi_link_redundancy":
g = _pickle_reader.read(_config_files["base_graph_hazard"])
g = self.graph_files.base_graph_hazard.get_graph()
gdf = self.multi_link_redundancy(g, analysis)
elif analysis.analysis == "optimal_route_origin_destination":
g = _pickle_reader.read(_config_files["origins_destinations_graph"])
g = self.graph_files.origins_destinations_graph.get_graph()
gdf = self.optimal_route_origin_destination(g, analysis)

if analysis.save_traffic and hasattr(
Expand Down Expand Up @@ -1092,16 +1109,9 @@ def _save_gpkg_analysis(
)
route_traffic_df.to_csv(impact_csv_path, index=False)
elif analysis.analysis == "multi_link_origin_destination":
g = _pickle_reader.read(
self.config.files["origins_destinations_graph_hazard"]
)
g = self.graph_files.origins_destinations_graph_hazard.get_graph()
gdf = self.multi_link_origin_destination(g, analysis)
g_not_disrupted = _pickle_reader.read(
self.config.files["origins_destinations_graph_hazard"]
)
gdf_not_disrupted = self.optimal_route_origin_destination(
g_not_disrupted, analysis
)
gdf_not_disrupted = self.optimal_route_origin_destination(g, analysis)
(
disruption_impact_df,
gdf_ori,
Expand Down Expand Up @@ -1134,17 +1144,13 @@ def _save_gpkg_analysis(
(analysis.name.replace(" ", "_") + "_impact_summary.csv"),
)
disruption_impact_df.to_csv(impact_csv_path, index=False)
elif analysis.analysis == "single_link_losses":
g = _pickle_reader.read(self.config.files["base_graph_hazard"])
elif analysis.analysis in ["single_link_losses", "multi_link_losses)"]:
g = self.graph_files.base_graph_hazard.get_graph()
gdf = self.single_link_redundancy(g, analysis)
gdf = self.single_link_losses(gdf, analysis)
elif analysis.analysis == "multi_link_losses":
g = _pickle_reader.read(self.config.files["base_graph_hazard"])
gdf = self.multi_link_redundancy(g, analysis)
gdf = self.multi_link_losses(gdf, analysis)
elif analysis.analysis == "optimal_route_origin_closest_destination":
analyzer = OriginClosestDestination(
self.config, analysis, self.hazard_names_df
self.config, analysis, self.graph_files, self.hazard_names_df
)
(
base_graph,
Expand All @@ -1171,7 +1177,7 @@ def _save_gpkg_analysis(
opt_routes.to_csv(csv_path, index=False)
elif analysis.analysis == "multi_link_origin_closest_destination":
analyzer = OriginClosestDestination(
self.config, analysis, self.hazard_names_df
self.config, analysis, self.graph_files, self.hazard_names_df
)

if analysis.calculate_route_without_disruption:
Expand All @@ -1181,10 +1187,7 @@ def _save_gpkg_analysis(
destinations,
) = analyzer.optimal_route_origin_closest_destination()

if (
analyzer.config.files["origins_destinations_graph_hazard"]
is None
):
if self.graph_files.origins_destinations_graph_hazard.file is None:
origins = analyzer.load_origins()
opt_routes_with_hazard = gpd.GeoDataFrame(data=None)
else:
Expand Down Expand Up @@ -1247,26 +1250,21 @@ def _save_gpkg_analysis(
del opt_routes_with_hazard["geometry"]
opt_routes_with_hazard.to_csv(csv_path, index=False)

if (
analyzer.config.files["origins_destinations_graph_hazard"]
is not None
):
if self.graph_files.origins_destinations_graph_hazard.file is not None:
agg_results.to_excel(
_output_path.joinpath(
analysis.name.replace(" ", "_") + "_results.xlsx"
),
index=False,
)
elif analysis.analysis == "losses":
if self.graphs["base_network_hazard"] is None:
gdf_in = gpd.read_feather(self.config.files["base_network_hazard"])

gdf_in = self.graph_files.base_graph_hazard.get_graph()
losses = Losses(self.config, analysis)
df = losses.calculate_losses_from_table()
gdf = gdf_in.merge(df, how="left", on="LinkNr")
elif analysis.analysis == "multi_link_isolated_locations":
g = _pickle_reader.read(self.config.files["base_graph_hazard"])
gdf, df = self.multi_link_isolated_locations(g, analysis)
g = self.graph_files.base_graph_hazard.get_graph()
(gdf, df) = self.multi_link_isolated_locations(g, analysis)

df_path = _output_path / (
analysis.name.replace(" ", "_") + "_results.csv"
Expand Down Expand Up @@ -1329,9 +1327,9 @@ def save_gdf(gdf: gpd.GeoDataFrame, save_path: Path):


def find_route_ods(
graph: nx.classes.MultiGraph,
od_nodes: list[tuple[tuple[int, str], tuple[int, str]]],
weighing: str,
graph: nx.classes.MultiGraph,
od_nodes: list[tuple[tuple[int, str], tuple[int, str]]],
weighing: str,
) -> gpd.GeoDataFrame:
# create the routes between all OD pairs
(
Expand Down
Loading

0 comments on commit 28b304d

Please sign in to comment.