Skip to content

Commit

Permalink
Backwards compatability updates
Browse files Browse the repository at this point in the history
  • Loading branch information
aschonfeld committed Jun 9, 2023
1 parent adf8405 commit b5518d8
Show file tree
Hide file tree
Showing 15 changed files with 196 additions and 138 deletions.
38 changes: 29 additions & 9 deletions dtale/charts/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -758,13 +758,23 @@ def build_base_chart(
classifier = classify_type(find_dtype(data[col]))
if classifier == "F" or (classifier == "I" and group_type == "bins"):
if bin_type == "width":
data.loc[:, col] = pd.qcut(
data[col], q=bins_val, duplicates="drop"
).astype("str")
kwargs = (
{"duplicates": "drop"}
if pandas_util.check_pandas_version("0.23.0")
else {}
)
data.loc[:, col] = pd.qcut(data[col], q=bins_val, **kwargs).astype(
"str"
)
kwargs_str = (
', duplicates="drop"'
if pandas_util.check_pandas_version("0.23.0")
else ""
)
code.append(
(
"chart_data.loc[:, '{col}'] = pd.qcut(chart_data['{col}'], q={bins}, duplicates=\"drop\")"
).format(col=col, bins=bins_val)
"chart_data.loc[:, '{col}'] = pd.qcut(chart_data['{col}'], q={bins}{kwargs})"
).format(col=col, bins=bins_val, kwargs=kwargs_str)
)
else:
bins_data = data[col].dropna()
Expand All @@ -774,18 +784,28 @@ def build_base_chart(
np.arange(npt),
np.sort(bins_data),
)
kwargs = (
{"duplicates": "drop"}
if pandas_util.check_pandas_version("0.23.0")
else {}
)
data.loc[:, col] = pd.cut(
data[col], bins=equal_freq_bins, duplicates="drop"
data[col], bins=equal_freq_bins, **kwargs
).astype("str")
cut_kwargs_str = (
', duplicates="drop"'
if pandas_util.check_pandas_version("0.23.0")
else ""
)
code.append(
(
"bins_data = data['{col}'].dropna()\n"
"npt = len(bins_data)\n"
"equal_freq_bins = np.interp(np.linspace(0, npt, {bins}), np.arange(npt), "
"np.sort(bins_data))\n"
"chart_data.loc[:, '{col}'] = pd.cut(chart_data['{col}'], bins=equal_freq_bins, "
'duplicates="drop")'
).format(col=col, bins=bins_val + 1)
"chart_data.loc[:, '{col}'] = pd.cut(chart_data['{col}'], bins=equal_freq_bins"
"{cut_kwargs})"
).format(col=col, bins=bins_val + 1, cut_kwargs=cut_kwargs_str)
)

main_group = group_col
Expand Down
17 changes: 12 additions & 5 deletions dtale/column_builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,8 +174,11 @@ def build_column(self, data):
self.cfg.get(p)
for p in ["col", "search", "replacement", "caseSensitive", "regex"]
)
kwargs = dict(case=case)
if pandas_util.check_pandas_version("0.23.0"):
kwargs["regex"] = regex
return pd.Series(
data[col].str.replace(search, replacement, case=case, regex=regex),
data[col].str.replace(search, replacement, **kwargs),
index=data.index,
name=self.name,
)
Expand All @@ -185,12 +188,16 @@ def build_code(self):
self.cfg.get(p)
for p in ["col", "search", "replacement", "caseSensitive", "regex"]
)
return "data['{col}'].str.replace('{search}', '{replacement}', case={case}, regex={regex})".format(
kwargs = ""
if pandas_util.check_pandas_version("0.23.0"):
kwargs = ", regex='{}'".format("True" if regex else "False")

return "data['{col}'].str.replace('{search}', '{replacement}', case={case}{kwargs})".format(
col=col,
search=search,
replacement=replacement,
case="True" if case else "False",
regex="True" if regex else "False",
kwargs=kwargs,
)


Expand Down Expand Up @@ -1473,7 +1480,7 @@ def build_column(self, data):
self.cfg.get(p) for p in ["col", "periods", "fillValue", "dtype"]
)
kwargs = {}
if fill_value is not None:
if fill_value is not None and pandas_util.check_pandas_version("0.24.0"):
fill_formatter = find_dtype_formatter(dtype)
kwargs["fill_value"] = fill_formatter(fill_value)
return pd.Series(
Expand All @@ -1485,7 +1492,7 @@ def build_code(self):
self.cfg.get(p) for p in ["col", "periods", "fillValue", "dtype"]
)
kwargs = ""
if fill_value is not None:
if fill_value is not None and pandas_util.check_pandas_version("0.24.0"):
if classify_type(dtype) == "S":
kwargs = ", fill_value='{}'".format(fill_value)
else:
Expand Down
57 changes: 31 additions & 26 deletions dtale/dash_application/charts.py
Original file line number Diff line number Diff line change
Expand Up @@ -2544,28 +2544,30 @@ def build_charts():
continue

layout = build_layout(build_title(selected_label, y2, group=series_key))
chart = chart_builder(
graph_wrapper(
figure={
"data": [
go.Funnel(
**dict_merge(
dict(x=series[y2], y=series["x"]),
name_builder(y2, series_key),
)
chart = graph_wrapper(
figure={
"data": [
go.Funnel(
**dict_merge(
dict(x=series[y2], y=series["x"]),
name_builder(y2, series_key),
)
],
"layout": layout,
},
modal=inputs.get("modal", False),
),
group_filter=dict_merge(
dict(y=y2),
{}
if series_key == "all"
else dict(group=series.get("_filter_")),
),
)
],
"layout": layout,
},
modal=inputs.get("modal", False),
)
if not export:
chart = chart_builder(
chart,
group_filter=dict_merge(
dict(y=y2),
{}
if series_key == "all"
else dict(group=series.get("_filter_")),
),
)
if len(negative_values):
error_title = (
"The following negative values could not be represented within the {}Funnel chart"
Expand Down Expand Up @@ -2600,13 +2602,16 @@ def build_charts():
title["title"]["text"] += " stacked by {}".format(", ".join(group))
layout = build_layout(title)

yield chart_builder(
graph_wrapper(
figure={"data": stacked_data, "layout": layout},
modal=inputs.get("modal", False),
),
group_filter=dict(y=final_cols[0]),
chart = graph_wrapper(
figure={"data": stacked_data, "layout": layout},
modal=inputs.get("modal", False),
)
if not export:
chart = chart_builder(
chart,
group_filter=dict(y=final_cols[0]),
)
yield chart

if export:
return next(build_charts())
Expand Down
4 changes: 2 additions & 2 deletions dtale/query.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import pandas as pd
from pkg_resources import parse_version

import dtale.global_state as global_state

from dtale.pandas_util import check_pandas_version
from dtale.utils import format_data, get_bool_arg


Expand Down Expand Up @@ -142,7 +142,7 @@ def _load_pct(df):
return _load_pct(df), []
return _load_pct(df)

is_pandas25 = parse_version(pd.__version__) >= parse_version("0.25.0")
is_pandas25 = check_pandas_version("0.25.0")
curr_app_settings = global_state.get_app_settings()
engine = curr_app_settings.get("query_engine", "python")
filtered_indexes = []
Expand Down
3 changes: 2 additions & 1 deletion dtale/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -3859,7 +3859,8 @@ def network_data(data_id):
edges.columns = ["to", "from"]
if weight:
edges.loc[:, "value"] = df[weight]
edges = edges.to_dict(orient="records")
edge_f = grid_formatter(grid_columns(edges), nan_display="nan")
edges = edge_f.format_dicts(edges.itertuples())

def build_mapping(col):
if col:
Expand Down
5 changes: 4 additions & 1 deletion tests/dtale/column_builders/test_column_builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,10 @@ def test_exponential_smoothing(rolling_data):
verify_builder(builder, lambda col: col.isnull().sum() == 0)


@pytest.mark.unit
@pytest.mark.skipif(
not pandas_util.check_pandas_version("0.24.0"),
reason="requires pandas 0.24.0 or higher",
)
def test_shift(rolling_data):
import dtale.views as views

Expand Down
15 changes: 11 additions & 4 deletions tests/dtale/correlations/test_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,8 +148,9 @@ def test_get_correlations(unittest, test_data, rolling_data):


@pytest.mark.skipif(
parse_version(platform.python_version()) < parse_version("3.6.0"),
reason="requires python 3.6 or higher",
parse_version(platform.python_version()) < parse_version("3.6.0")
or not pandas_util.check_pandas_version("1.0.0"),
reason="requires python 3.6 or higher and pandas 1.0.0 or higher",
)
def test_get_pps_matrix(unittest, test_data):
import dtale.views as views
Expand Down Expand Up @@ -210,7 +211,10 @@ def test_get_pps_matrix(unittest, test_data):
corr_ts.columns = ['date', 'corr']"""


@pytest.mark.unit
@pytest.mark.skipif(
not pandas_util.check_pandas_version("1.0.0"),
reason="requires pandas 1.0.0 or higher",
)
def test_get_correlations_ts(unittest, rolling_data):
import dtale.views as views

Expand Down Expand Up @@ -340,7 +344,10 @@ def test_get_correlations_ts(unittest, rolling_data):
only_in_s1 = len(scatter_data[scatter_data['bar'].isnull()])"""


@pytest.mark.unit
@pytest.mark.skipif(
not pandas_util.check_pandas_version("1.0.0"),
reason="requires pandas 1.0.0 or higher",
)
def test_get_scatter(unittest, rolling_data):
import dtale.views as views

Expand Down
Loading

0 comments on commit b5518d8

Please sign in to comment.