Skip to content

Commit

Permalink
Merge pull request #186 from Deltares/feat/180-update-geo-dependencies
Browse files Browse the repository at this point in the history
Feat/180 update geo dependencies
  • Loading branch information
frederique-hub authored Nov 8, 2023
2 parents a457c46 + 05898dd commit a067826
Show file tree
Hide file tree
Showing 20 changed files with 480 additions and 524 deletions.
2 changes: 0 additions & 2 deletions .config/docker_environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,9 @@ channels:
dependencies:
- conda-forge::python=3.10
- conda-forge::gdal=3.5.1
- conda-forge::fiona
# from here some dragons
- conda-forge::geopandas
- conda-forge::rasterio
- conda-forge::geojson
- conda-forge::osmnx
- conda-forge::tqdm
- conda-forge::geopy
Expand Down
1 change: 0 additions & 1 deletion .config/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ channels:
dependencies:
- conda-forge::python=3.10
- conda-forge::gdal=3.5.1
- conda-forge::fiona
# Install poetry 1.3.2 (1.4.*) gives issues.
# If this does not work then exclude it from the environment and install it manually
# with: conda install -c conda-forge poetry=1.3.2
Expand Down
1 change: 0 additions & 1 deletion .config/tc_environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,4 @@ channels:
dependencies:
- python=3.10
- conda-forge::gdal=3.5.1
- conda-forge::fiona
- teamcity-messages
702 changes: 340 additions & 362 deletions poetry.lock

Large diffs are not rendered by default.

12 changes: 5 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,6 @@ run_ra2ce = "ra2ce.run:main"
python = "^3.9, <3.11"
affine = "^2.3.1"
click = "^8.1.3"
geojson = "^2.5.0"
geopy = "^2.2.0"
numpy = "^1.23.3"
networkx = "^2.8.6"
pandas = "^1.4.4"
Expand All @@ -39,18 +37,18 @@ rasterio = "^1.3.2"
rasterstats = "^0.17.0"
Rtree = "^1.0.0"
seaborn = "^0.12.0"
Shapely = "^1.8.4"
tqdm = "^4.64.1"
openpyxl = "^3.0.10"
xlrd = "^2.0.1"
xarray = "^2022.6.0"
geopandas = "^0.11.1"
osmnx = "0.16.*"
scipy = "^1.9.1"
GDAL = "3.5.1"
fiona = "1.8.21"
pygeos = "^0.14"
momepy = "0.5.0"
geopandas = "^0.14.0"
shapely = "^2.0.1"
osmnx = "^1.6.0"
tqdm = "^4.66.1"
geopy = "^2.4.0"
pyogrio = "^0.6.0"
joblib = "^1.3.2"

Expand Down
7 changes: 4 additions & 3 deletions ra2ce/analyses/direct/cost_benefit_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

import logging
import os
from pathlib import Path

import geopandas as gpd
import numpy as np
Expand Down Expand Up @@ -71,17 +72,17 @@ def _validate_input_params(self, analysis: dict, config: dict) -> None:
)

@staticmethod
def load_effectiveness_table(path):
def load_effectiveness_table(path: Path):
"""This function loads a CSV table containing effectiveness of the different aspects for a number of strategies"""
file_path = path / "effectiveness_measures.csv"
df_lookup = pd.read_csv(file_path, index_col="strategies")
return df_lookup.transpose().to_dict()

@staticmethod
def create_feature_table(file_path):
def create_feature_table(file_path: Path):
"""This function loads a table of features from the input folder"""
logging.info("Loading feature dataframe...")
gdf = gpd.read_file(file_path)
gdf = gpd.read_file(file_path, engine="pyogrio")
logging.info("Dataframe loaded...")

# cleaning up dataframe
Expand Down
28 changes: 12 additions & 16 deletions ra2ce/analyses/indirect/analyses_indirect.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@

import copy
import logging
import sys
import time
from pathlib import Path
from typing import List, Tuple
Expand Down Expand Up @@ -536,8 +535,8 @@ def extract_od_nodes_from_graph(
def _get_origin_destination_pairs(
self, graph: nx.classes.MultiGraph
) -> list[tuple[int, str], tuple[int, str]]:
od_path = (
self.config["static"] / "output_graph" / "origin_destination_table.feather"
od_path = self.config["static"].joinpath(
"output_graph", "origin_destination_table.feather"
)
od = gpd.read_feather(od_path)
od_pairs = [
Expand Down Expand Up @@ -782,7 +781,7 @@ def multi_link_origin_destination_regional_impact(self, gdf_ori):
origin_fn = Path(self.config["static"]).joinpath(
"output_graph", "origin_destination_table.gpkg"
)
origin = gpd.read_file(origin_fn)
origin = gpd.read_file(origin_fn, engine="pyogrio")
index = [type(x) == str for x in origin["o_id"]]
origin = origin[index]
origin.reset_index(inplace=True, drop=True)
Expand Down Expand Up @@ -983,15 +982,12 @@ def get_network_with_edge_fid(self, graph: nx.Graph) -> gpd.GeoDataFrame:
"""
network = graph_to_gdf(graph)[0]
# TODO: add making "edges_fid" (internal convention) to graph_to_gdf
if all(
c_idx in network.columns for c_idx in ["node_A", "node_B"]
): # shapefiles
if all(c_idx in network.index.names for c_idx in ["u", "v"]):
network["edge_fid"] = [f"{na}_{nb}" for (na, nb, _) in network.index]
elif all(c_idx in network.columns for c_idx in ["node_A", "node_B"]):
# shapefiles
network["edge_fid"] = [
f"{na}_{nb}" for na, nb in network[["node_A", "node_B"]].values
]
elif all(c_idx in network.columns for c_idx in ["u", "v"]): # osm
network["edge_fid"] = [
f"{na}_{nb}" for na, nb in network[["u", "v"]].values
f"{na}_{nb}" for na, nb in network["node_A", "node_B"].values
]
return network[["edge_fid", "geometry"]]

Expand Down Expand Up @@ -1032,7 +1028,7 @@ def execute(self):
starttime = time.time()
gdf = pd.DataFrame()
opt_routes = None
output_path = self.config["output"] / analysis["analysis"]
output_path = self.config["output"].joinpath(analysis["analysis"])

def _save_gpkg_analysis(
base_graph,
Expand All @@ -1041,16 +1037,16 @@ def _save_gpkg_analysis(
):
for to_save, save_name in zip(to_save_gdf, to_save_gdf_names):
if not to_save.empty:
gpkg_path = output_path / (
gpkg_path = output_path.joinpath(
analysis["name"].replace(" ", "_") + f"_{save_name}.gpkg"
)
save_gdf(to_save, gpkg_path)

# Save the Graph
gpkg_path_nodes = output_path / (
gpkg_path_nodes = output_path.joinpath(
analysis["name"].replace(" ", "_") + "_results_nodes.gpkg"
)
gpkg_path_edges = output_path / (
gpkg_path_edges = output_path.joinpath(
analysis["name"].replace(" ", "_") + "_results_edges.gpkg"
)
graph_to_gpkg(base_graph, gpkg_path_edges, gpkg_path_nodes)
Expand Down
1 change: 0 additions & 1 deletion ra2ce/configuration/config_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@

import logging
from pathlib import Path
from typing import Optional

from ra2ce.analyses.analysis_config_wrapper.analysis_config_wrapper_base import (
AnalysisConfigWrapperBase,
Expand Down
2 changes: 1 addition & 1 deletion ra2ce/graph/hazard/hazard_overlay.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,7 +766,7 @@ def create(self):
#### Step 4: hazard overlay of the locations that are checked for isolation ###
if self._isolation_locations:
logging.info("Detected isolated locations, checking for hazard overlay.")
locations = gpd.read_file(self._isolation_locations)
locations = gpd.read_file(self._isolation_locations, engine="pyogrio")
locations["i_id"] = locations.index
locations_crs = pyproj.CRS.from_user_input(locations.crs)
hazard_crs = pyproj.CRS.from_user_input(self._hazard_crs)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,12 @@
import networkx as nx
import osmnx
import pandas as pd
from geopandas import GeoDataFrame
from geopandas import GeoDataFrame, read_file
from networkx import MultiDiGraph, MultiGraph
from shapely.geometry.base import BaseGeometry

import ra2ce.graph.networks_utils as nut
from shapely.geometry import shape
from ra2ce.graph.exporters.json_exporter import JsonExporter
from ra2ce.graph.network_config_data.network_config_data import NetworkConfigData
from ra2ce.graph.network_wrappers.network_wrapper_protocol import NetworkWrapperProtocol
Expand Down Expand Up @@ -133,9 +134,9 @@ def get_clean_graph_from_osm(self) -> MultiDiGraph:
"No polygon_file file found at {}.".format(self.polygon_path)
)

poly_dict = nut.read_geojson(geojson_file=self.polygon_path)
_normalized_polygon = nut.get_normalized_geojson_polygon(self.polygon_path)
_complex_graph = self._download_clean_graph_from_osm(
polygon=nut.geojson_to_shp(poly_dict),
polygon=_normalized_polygon,
network_type=self.network_type,
road_types=self.road_types,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def from_shapefile_to_poly(shapefile: Path, out_path: Path, outname: str = ""):
Returns:
.poly file for each region, in a new dir in the working directory (in the CRS of te input file)
"""
shp_file_gdf = gpd.read_file(str(shapefile))
shp_file_gdf = gpd.read_file(str(shapefile), engine="pyogrio")

num = 0
# iterate over the seperate polygons in the shapefile
Expand Down
4 changes: 2 additions & 2 deletions ra2ce/graph/network_wrappers/shp_network_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,12 +68,12 @@ def _read_merge_shp(self) -> gpd.GeoDataFrame:
properties (pandas dataframe): attributes of shapefile(s), in order of the linestrings in lines
"""
# concatenate all shapefile into one geodataframe and set analysis to 1 or 0 for diversions
lines = [gpd.read_file(shp) for shp in self.primary_files]
lines = [gpd.read_file(shp, engine="pyogrio") for shp in self.primary_files]

if any(self.diversion_files):
lines.extend(
[
nut.check_crs_gdf(gpd.read_file(shp), self.crs)
nut.check_crs_gdf(gpd.read_file(shp, engine="pyogrio"), self.crs)
for shp in self.diversion_files
]
)
Expand Down
2 changes: 1 addition & 1 deletion ra2ce/graph/network_wrappers/vector_network_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def _read_files(self, file_list: list[Path]) -> gpd.GeoDataFrame:
gpd.GeoDataFrame: GeoDataFrame representing the data.
"""
# read file
gdf = gpd.GeoDataFrame(pd.concat(list(map(gpd.read_file, file_list))))
gdf = gpd.GeoDataFrame(pd.concat([gpd.read_file(_fl, engine="pyogrio") for _fl in file_list]))
logging.info(
"Read files {} into a 'GeoDataFrame'.".format(
", ".join(map(str, file_list))
Expand Down
Loading

0 comments on commit a067826

Please sign in to comment.