Skip to content

Commit

Permalink
Merge branch 'master' into feat/180-update-geo-dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
frederique-hub authored Nov 8, 2023
2 parents dce394c + a457c46 commit 05898dd
Show file tree
Hide file tree
Showing 40 changed files with 551 additions and 565 deletions.
14 changes: 7 additions & 7 deletions docs/user_guide/user_guide.rst
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ Initialization file templates
polygon = map.geojson # <name + file extension of the geojson polygon file in the static/network folder> / None
network_type = drive # drive / walk / bike / drive_service / all
road_types = motorway,motorway_link,trunk,trunk_link,primary, primary_link,secondary,secondary_link,tertiary,tertiary_link
save_shp = True # True / False
save_gpkg = True # True / False

[origins_destinations]
origins = origins.shp # <file name> / None
Expand Down Expand Up @@ -159,7 +159,7 @@ Initialization file templates
name = single link redundancy test
analysis = single_link_redundancy
weighing = distance
save_shp = True
save_gpkg = True
save_csv = True

[analysis2]
Expand All @@ -168,14 +168,14 @@ Initialization file templates
aggregate_wl = max
threshold = 0.5
weighing = distance
save_shp = True
save_gpkg = True
save_csv = True

[analysis3]
name = optimal origin dest test
analysis = optimal_route_origin_destination
weighing = distance
save_shp = True
save_gpkg = True
save_csv = True

[analysis4]
Expand All @@ -184,7 +184,7 @@ Initialization file templates
aggregate_wl = max
threshold = 0.5
weighing = distance
save_shp = True
save_gpkg = True
save_csv = False

[analysis5]
Expand All @@ -193,7 +193,7 @@ Initialization file templates
aggregate_wl = max
threshold = 0.5
weighing = distance
save_shp = True
save_gpkg = True
save_csv = True

[analysis6]
Expand All @@ -204,6 +204,6 @@ Initialization file templates
weighing = length
buffer_meters = 40
category_field_name = category
save_shp = True
save_gpkg = True
save_csv = True

2 changes: 1 addition & 1 deletion examples/example_origin_destination_analysis.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -4133,7 +4133,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion examples/example_ra2ce_basics.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -864,7 +864,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion examples/example_single_link_redundancy.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2067,7 +2067,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
"version": "3.10.12"
}
},
"nbformat": 4,
Expand Down
4 changes: 2 additions & 2 deletions examples/pizza_course_material_DIY.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@
"**polygon = ...<br>**\n",
"network_type = drive<br>\n",
"**road_types = ...<br>**\n",
"save_shp = True<br>\n",
"save_gpkg = True<br>\n",
"<br>"
]
},
Expand Down Expand Up @@ -301,7 +301,7 @@
"name = beira_redundancy <br>\n",
"analysis = single_link_redundancy <br>\n",
"weighing = distance <br>\n",
"save_shp = True <br>\n",
"save_gpkg = True <br>\n",
"save_csv = True <br>"
]
},
Expand Down
6 changes: 3 additions & 3 deletions ra2ce/analyses/direct/analyses_direct.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ def execute(self):
gdf = []

output_path = self.config["output"] / analysis["analysis"]
if analysis["save_shp"]:
shp_path = output_path / (analysis["name"].replace(" ", "_") + ".shp")
if analysis["save_gpkg"]:
shp_path = output_path / (analysis["name"].replace(" ", "_") + ".gpkg")
save_gdf(gdf, shp_path)
if analysis["save_csv"]:
csv_path = output_path / (analysis["name"].replace(" ", "_") + ".csv")
Expand Down Expand Up @@ -187,7 +187,7 @@ def effectiveness_measures(self, analysis):
else:
df = em.load_table(
self.config["input"] / "direct",
analysis["file_name"].replace(".shp", ".csv"),
analysis["file_name"].replace(".gpkg", ".csv"),
)

df = em.calculate_strategy_effectiveness(df, effectiveness_dict)
Expand Down
2 changes: 1 addition & 1 deletion ra2ce/analyses/direct/cost_benefit_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def create_feature_table(file_path: Path):

# save as csv
path, file = os.path.split(file_path)
df.to_csv(os.path.join(path, file.replace(".shp", ".csv")), index=False)
df.to_csv(os.path.join(path, file.replace(".gpkg", ".csv")), index=False)
return df

@staticmethod
Expand Down
62 changes: 45 additions & 17 deletions ra2ce/analyses/indirect/analyses_indirect.py
Original file line number Diff line number Diff line change
Expand Up @@ -510,7 +510,31 @@ def multi_link_losses(self, gdf, analysis):
aggregated_results = pd.concat(results, ignore_index=True)
return aggregated_results

def _get_origin_destination_pairs(self, graph):
@staticmethod
def extract_od_nodes_from_graph(
graph: nx.classes.MultiGraph,
) -> list[tuple[str, str]]:
"""
Extracts all Origin - Destination nodes from the graph, prevents from entries
with list of nodes for a node.
Args:
graph (nx.classes.MultiGraph): Graph containing origin-destination nodes.
Returns:
list[tuple[str, str]]]: List containing tuples of origin - destination node combinations.
"""
_od_nodes = []
for n, v in graph.nodes(data=True):
if "od_id" not in v:
continue
_o_node_list = list(map(lambda x: (n, x), v["od_id"].split(",")))
_od_nodes.extend(_o_node_list)
return _od_nodes

def _get_origin_destination_pairs(
self, graph: nx.classes.MultiGraph
) -> list[tuple[int, str], tuple[int, str]]:
od_path = self.config["static"].joinpath(
"output_graph", "origin_destination_table.feather"
)
Expand All @@ -520,7 +544,7 @@ def _get_origin_destination_pairs(self, graph):
for a in od.loc[od["o_id"].notnull(), "o_id"]
for b in od.loc[od["d_id"].notnull(), "d_id"]
]
all_nodes = [(n, v["od_id"]) for n, v in graph.nodes(data=True) if "od_id" in v]
all_nodes = self.extract_od_nodes_from_graph(graph)
od_nodes = []
for aa, bb in od_pairs:
# it is possible that there are multiple origins/destinations at the same 'entry-point' in the road
Expand All @@ -542,7 +566,7 @@ def _get_origin_destination_pairs(self, graph):

def optimal_route_origin_destination(
self, graph: nx.classes.MultiGraph, analysis: dict
):
) -> gpd.GeoDataFrame:
# create list of origin-destination pairs
od_nodes = self._get_origin_destination_pairs(graph)
pref_routes = find_route_ods(graph, od_nodes, analysis["weighing"])
Expand Down Expand Up @@ -754,10 +778,8 @@ def multi_link_origin_destination_regional_impact(self, gdf_ori):
gdf_ori_ = gdf_ori.copy()

# read origin points
origin_fn = (
Path(self.config["static"])
/ "output_graph"
/ "origin_destination_table.gpkg"
origin_fn = Path(self.config["static"]).joinpath(
"output_graph", "origin_destination_table.gpkg"
)
origin = gpd.read_file(origin_fn, engine="pyogrio")
index = [type(x) == str for x in origin["o_id"]]
Expand Down Expand Up @@ -892,7 +914,7 @@ def multi_link_isolated_locations(
results_hz_roads.to_file(
self.config["output"]
/ analysis["analysis"]
/ f"flooded_and_isolated_roads_{hazard_name}.shp"
/ f"flooded_and_isolated_roads_{hazard_name}.gpkg"
)

# relate the locations to network disruption due to hazard by spatial overlay
Expand Down Expand Up @@ -1008,7 +1030,7 @@ def execute(self):
opt_routes = None
output_path = self.config["output"].joinpath(analysis["analysis"])

def _save_shp_analysis(
def _save_gpkg_analysis(
base_graph,
to_save_gdf: List[gpd.GeoDataFrame],
to_save_gdf_names: List[str],
Expand Down Expand Up @@ -1137,11 +1159,11 @@ def _save_shp_analysis(
opt_routes,
destinations,
) = analyzer.optimal_route_origin_closest_destination()
if analysis["save_shp"]:
if analysis["save_gpkg"]:
# Save the GeoDataFrames
to_save_gdf = [destinations, opt_routes]
to_save_gdf_names = ["destinations", "optimal_routes"]
_save_shp_analysis(base_graph, to_save_gdf, to_save_gdf_names)
_save_gpkg_analysis(base_graph, to_save_gdf, to_save_gdf_names)

if analysis["save_csv"]:
csv_path = output_path / (
Expand Down Expand Up @@ -1191,7 +1213,7 @@ def _save_shp_analysis(
) = analyzer.multi_link_origin_closest_destination()
opt_routes_without_hazard = gpd.GeoDataFrame()

if analysis["save_shp"]:
if analysis["save_gpkg"]:
# Save the GeoDataFrames
to_save_gdf = [
origins,
Expand All @@ -1205,7 +1227,7 @@ def _save_shp_analysis(
"optimal_routes_without_hazard",
"optimal_routes_with_hazard",
]
_save_shp_analysis(base_graph, to_save_gdf, to_save_gdf_names)
_save_gpkg_analysis(base_graph, to_save_gdf, to_save_gdf_names)
if analysis["save_csv"]:
csv_path = output_path / (
analysis["name"].replace(" ", "_") + "_destinations.csv"
Expand Down Expand Up @@ -1253,7 +1275,7 @@ def _save_shp_analysis(

if not gdf.empty:
# Not for all analyses a gdf is created as output.
if analysis["save_shp"]:
if analysis["save_gpkg"]:
gpkg_path = output_path.joinpath(
analysis["name"].replace(" ", "_") + ".gpkg"
)
Expand Down Expand Up @@ -1285,8 +1307,7 @@ def save_gdf(gdf: gpd.GeoDataFrame, save_path: Path):
Arguments:
gdf [geodataframe]: geodataframe object to be converted
edge_shp [str]: output path including extension for edges shapefile
node_shp [str]: output path including extension for nodes shapefile
save_path [str]: output path including extension for edges shapefile
Returns:
None
"""
Expand All @@ -1302,7 +1323,9 @@ def save_gdf(gdf: gpd.GeoDataFrame, save_path: Path):


def find_route_ods(
graph: nx.classes.MultiGraph, od_nodes: list[tuple[tuple[int, str]]], weighing: str
graph: nx.classes.MultiGraph,
od_nodes: list[tuple[tuple[int, str], tuple[int, str]]],
weighing: str,
) -> gpd.GeoDataFrame:
# create the routes between all OD pairs
(
Expand Down Expand Up @@ -1374,6 +1397,11 @@ def find_route_ods(
geometry="geometry",
crs="epsg:4326",
)
# Remove potential duplicates (o, d node) with a different Origin name.
_duplicate_columns = ["o_node", "d_node", "destination", "length", "geometry"]
pref_routes = pref_routes.drop_duplicates(
subset=_duplicate_columns, keep="first"
).reset_index(drop=True)
return pref_routes


Expand Down
56 changes: 8 additions & 48 deletions ra2ce/analyses/indirect/traffic_analysis/traffic_analysis_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import ast
import itertools
import logging
import operator
from abc import ABC, abstractmethod
from typing import Any
Expand Down Expand Up @@ -60,9 +61,15 @@ def optimal_route_od_link(
opt_path = self._get_opt_path_values(o_node, d_node)
for u_node, v_node in itertools.pairwise(opt_path):
_nodes_key_name = self._get_node_key(u_node, v_node)
_calculated_traffic = self._calculate_origin_node_traffic(
if "," in o_node:
logging.error(
"List of nodes as 'origin node' is not accepted and will be skipped."
)
continue
_calculated_traffic = self._get_accumulated_traffic_from_node(
o_node, count_destination_nodes
)

if "," in d_node:
_calculated_traffic *= len(d_node.split(","))

Expand Down Expand Up @@ -102,53 +109,6 @@ def _get_recorded_traffic_in_node(
/ count_destination_nodes
)

def _get_accumulated_traffic_from_node_list(
self,
nodes_list: list[str],
count_destination_nodes: int,
) -> AccumulatedTraffic:
# TODO: This algorithm is not entirely clear (increase decrease of variable _intermediate_nodes)
# When do we want to 'multiply' the accumulated values?
# When do we want to 'add' the accumulated values?
_accumulated_traffic = AccumulatedTraffic(
utilitarian=1, egalitarian=1, prioritarian=1
)
_intermediate_nodes = 0
for _node in nodes_list:
if self.destinations_names in _node:
_intermediate_nodes -= 1
continue
_node_traffic = self._get_accumulated_traffic_from_node(
_node, count_destination_nodes
)
# Multiplication ( 'operator.mul' or *) or Addition ( 'operator.add' or +) operations to acummulate traffic.
# This will trigger the overloaded methods in `AccumulatedTraffic`.
_acummulated_operator = (
operator.mul if _intermediate_nodes == 0 else operator.add
)
_accumulated_traffic = _acummulated_operator(
_accumulated_traffic, _node_traffic
)
_intermediate_nodes += 1

# Set the remainig values
_accumulated_traffic.egalitarian = len(
list(filter(lambda x: self.destinations_names not in x, nodes_list))
)
return _accumulated_traffic

def _calculate_origin_node_traffic(
self,
origin_node: str,
total_d_nodes: int,
) -> AccumulatedTraffic:
if "," in origin_node:
return self._get_accumulated_traffic_from_node_list(
origin_node.split(","), total_d_nodes
)

return self._get_accumulated_traffic_from_node(origin_node, total_d_nodes)

@abstractmethod
def _get_accumulated_traffic_from_node(
self, target_node: str, total_d_nodes: int
Expand Down
4 changes: 2 additions & 2 deletions ra2ce/graph/exporters/geodataframe_network_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@


class GeoDataFrameNetworkExporter(NetworkExporterBase):
def export_to_shp(self, output_dir: Path, export_data: gpd.GeoDataFrame) -> None:
_output_shp_path = output_dir / (self._basename + ".shp")
def export_to_gpkg(self, output_dir: Path, export_data: gpd.GeoDataFrame) -> None:
_output_shp_path = output_dir / (self._basename + ".gpkg")
export_data.to_file(
_output_shp_path, index=False
) # , encoding='utf-8' -Removed the encoding type because this causes some shapefiles not to save.
Expand Down
4 changes: 2 additions & 2 deletions ra2ce/graph/exporters/multi_graph_network_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
class MultiGraphNetworkExporter(NetworkExporterBase):
pickle_path: Optional[Path]

def export_to_shp(self, output_dir: Path, export_data: MULTIGRAPH_TYPE) -> None:
def export_to_gpkg(self, output_dir: Path, export_data: MULTIGRAPH_TYPE) -> None:
if not output_dir.is_dir():
output_dir.mkdir(parents=True)

Expand All @@ -46,7 +46,7 @@ def export_to_shp(self, output_dir: Path, export_data: MULTIGRAPH_TYPE) -> None:
output_dir / (self._basename + "_nodes.gpkg"),
)
logging.info(
f"Saved {self._basename + '_edges.shp'} and {self._basename + '_nodes.shp'} in {output_dir}."
f"Saved {self._basename + '_edges.gpkg'} and {self._basename + '_nodes.gpkg'} in {output_dir}."
)

def export_to_pickle(self, output_dir: Path, export_data: MULTIGRAPH_TYPE) -> None:
Expand Down
Loading

0 comments on commit 05898dd

Please sign in to comment.