Skip to content

Commit

Permalink
Added a v_threshold to stop pathlines at a give speed.
Browse files Browse the repository at this point in the history
  • Loading branch information
aaschwanden committed Jun 15, 2024
1 parent 9f194db commit 23fda70
Show file tree
Hide file tree
Showing 8 changed files with 352 additions and 615 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
- uses: actions/checkout@v4
- uses: conda-incubator/setup-miniconda@v3
with:
activate-environment: pypism
activate-environment: glacier-flow-tools
environment-file: environment.yml
python-version: ${{ matrix.python-version }}
channels: conda-forge
Expand Down
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ jobs=4 #number of processes to use
[BASIC]
good-names=nameOfYourProject #names to be considered ok
[pre-commit-hook]
disable=bare-except, import-outside-toplevel, too-many-return-statements, line-too-long, invalid-name, too-many-arguments, dangerous-default-value, too-many-locals, duplicate-code, too-many-statements, redefined-builtin
disable=bare-except, import-outside-toplevel, too-many-return-statements, line-too-long, invalid-name, too-many-arguments, dangerous-default-value, too-many-locals, duplicate-code, too-many-statements, redefined-builtin, c-extension-no-member
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ channels:
- conda-forge
dependencies:
- python=3.11.7
- cython
- mpich-mpicxx
- setuptools
- pre-commit
Expand Down
29 changes: 21 additions & 8 deletions glacier_flow_tools/pathlines.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ def compute_pathline(
hmin: float = 0.0001,
hmax: float = 10,
tol: float = 1e-4,
v_threshold: float = 0.0,
notebook: bool = False,
progress: bool = False,
progress_kwargs: Dict = {"leave": False, "position": 0},
Expand Down Expand Up @@ -129,6 +130,8 @@ def compute_pathline(
The maximum step size for the integration. Default is 10.
tol : float, optional
The error tolerance for the integration. Default is 1e-4.
v_threshold : float, optional
A velocity threshold below which the solver stops. Default is 0.
notebook : bool, optional
If True, a progress bar is displayed in a Jupyter notebook. Default is False.
progress : bool, optional
Expand Down Expand Up @@ -202,14 +205,16 @@ def compute_pathline(
error_estimate = np.empty(0, dtype=float)

pts = np.vstack([pts, x])
velocities = np.vstack([velocities, f(point, start_time, *f_args)])
vel = f(point, start_time, *f_args)
v = np.sqrt(vel[0] ** 2 + vel[1] ** 2)
velocities = np.vstack([velocities, vel])
time = np.append(time, start_time)
error_estimate = np.append(error_estimate, 0.0)

k = 0
p_bar = tqdm_notebook if notebook else tqdm_script
with p_bar(desc="Integrating pathline", total=end_time, **progress_kwargs) if progress else nullcontext():
while t < end_time:
while (t < end_time) and (v > v_threshold):

if np.isclose(t + h, end_time, rtol=1e-5):
h = end_time - t
Expand Down Expand Up @@ -246,8 +251,11 @@ def compute_pathline(
f"Error: Could not converge to the required tolerance {tol:e} with minimum stepsize {hmin:e}"
)

vel = f(point, start_time, *f_args)
v = np.sqrt(vel[0] ** 2 + vel[1] ** 2)

pts = np.append(pts, [x], axis=0)
velocities = np.append(velocities, [f(x, start_time, *f_args)], axis=0)
velocities = np.append(velocities, [vel], axis=0)
time = np.append(time, t)
error_estimate = np.append(error_estimate, r)
k += 1
Expand Down Expand Up @@ -334,12 +342,17 @@ def pathline_to_line_geopandas_dataframe(
gp.GeoDataFrame
A GeoDataFrame where each row represents a pathline. If attributes are provided, they are added as columns in the GeoDataFrame.
"""
geom = LineString(points)
pathline_dict = {"geometry": geom}
if len(points) > 1:
geom = LineString(points)
pathline_dict = {"geometry": geom}

if attrs is not None:
pathline_dict.update(attrs)
return gp.GeoDataFrame.from_dict(pathline_dict, crs=crs)
if attrs is not None:
pathline_dict.update(attrs)
gdf = gp.GeoDataFrame.from_dict(pathline_dict, crs=crs)

else:
gdf = gp.GeoDataFrame()
return gdf


def pathline_to_geopandas_dataframe(
Expand Down
78 changes: 78 additions & 0 deletions notebooks/bedmachine_profiles.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,84 @@
"id": "497bc432-2892-4c72-9fe8-2b8dcc4b7f73",
"metadata": {},
"outputs": [],
"source": [
"import xarray as xr\n",
"import xskillscore as xs\n",
"from functools import partial\n",
"from glacier_flow_tools.utils import preprocess_nc"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "24f7672e-790a-4012-8bf9-f0a7b3ea40ec",
"metadata": {},
"outputs": [],
"source": [
"exp_files = sorted(Path(\".\").glob(\"gris_g1800m*CTRL*0.nc\"))\n",
"\n",
"exp_ds = xr.open_mfdataset(\n",
" exp_files,\n",
" preprocess=partial(\n",
" preprocess_nc,\n",
" regexp=\"v(.+?)\",\n",
" ),\n",
" concat_dim=\"exp_id\",\n",
" combine=\"nested\",\n",
" chunks=\"auto\",\n",
" engine=\"h5netcdf\",\n",
" parallel=True,\n",
")\n",
"velsurf_mag = exp_ds[\"velsurf_mag\"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5b14d240-8e4d-4470-a4c8-ea7ad5cc6c81",
"metadata": {},
"outputs": [],
"source": [
"itslive_ds = xr.open_dataset(\"GRE_G0240_0000.nc\", chunks=\"auto\", decode_times=False)\n",
"v = itslive_ds[\"v\"].interp_like(exp_ds.sel(exp_id=1))"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2f46d329-fd5c-4023-8fb8-fab9edb773b2",
"metadata": {},
"outputs": [],
"source": [
"mae = xs.mae(velsurf_mag, v, dim=[\"x\", \"y\"], skipna=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "82aea8bf-3e8f-4a69-933f-8e3a2481454c",
"metadata": {},
"outputs": [],
"source": [
"mae"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "dcafce16-fd13-41e4-bc2e-7e64d6212e9f",
"metadata": {},
"outputs": [],
"source": [
"xs.mae?"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "1ac03c6d-cec9-4b50-a7a0-832134affe85",
"metadata": {},
"outputs": [],
"source": []
}
],
Expand Down
229 changes: 166 additions & 63 deletions notebooks/dem_smoothing.ipynb

Large diffs are not rendered by default.

Loading

0 comments on commit 23fda70

Please sign in to comment.