diff --git a/README.md b/README.md index 6075e215..e07df43f 100644 --- a/README.md +++ b/README.md @@ -769,6 +769,15 @@ global_state.use_arcticdb_store(uri='lmdb:///', library='my_lib') dtale.show('my_symbol') ``` +Or you can set your library using `dtale.show` with a pipe-delimited identifier: +```python +import dtale.global_state as global_state +import dtale + +global_state.use_arcticdb_store(uri='lmdb:///') +dtale.show('my_lib|my_symbol') +``` + You can also do everything using `dtale.show_arcticdb`: ```python import dtale diff --git a/dtale/app.py b/dtale/app.py index 883f3daf..bf37f98b 100644 --- a/dtale/app.py +++ b/dtale/app.py @@ -35,6 +35,7 @@ import dtale.config as dtale_config from dtale import dtale from dtale.cli.clickutils import retrieve_meta_info_and_version, setup_logging +from dtale.dash_application import views as dash_views from dtale.utils import ( DuplicateDataError, build_shutdown_url, @@ -507,8 +508,6 @@ def handle_data_id(_endpoint, values): auth.setup_auth(app) with app.app_context(): - from .dash_application import views as dash_views - app = dash_views.add_dash(app) return app diff --git a/dtale/cli/loaders/arcticdb_loader.py b/dtale/cli/loaders/arcticdb_loader.py index 3f80a25c..a694fd3f 100644 --- a/dtale/cli/loaders/arcticdb_loader.py +++ b/dtale/cli/loaders/arcticdb_loader.py @@ -78,7 +78,14 @@ def loader_func(**kwargs): if symbol is None: # select symbol from the UI return None - startup(data=symbol) + if library is None and symbol is not None: + raise ValueError( + "When trying to load the symbol, {}, a library must be specified!".format( + symbol + ) + ) + + startup(data="{}|{}".format(library, symbol)) return symbol if not library: diff --git a/dtale/column_filters.py b/dtale/column_filters.py index 56dfafd2..94175003 100644 --- a/dtale/column_filters.py +++ b/dtale/column_filters.py @@ -32,7 +32,7 @@ def __init__(self, data_id, column, cfg): if not dtype: if global_state.is_arcticdb: instance = global_state.store.get(data_id) - data, _ = format_data(instance.load_data(row_range=[0, 1])) + data, _ = format_data(instance.base_df) s = data[column] else: s = global_state.get_data(data_id, columns=[column])[column] diff --git a/dtale/dash_application/charts.py b/dtale/dash_application/charts.py index e03cdded..66fd9a35 100644 --- a/dtale/dash_application/charts.py +++ b/dtale/dash_application/charts.py @@ -607,7 +607,13 @@ def build_hoverable(link, msg): export_png_link, export_csv_link, ], - style={"position": "absolute", "zIndex": 5, "left": 5, "top": 2}, + style={ + "position": "absolute", + "zIndex": 5, + "left": 5, + "top": 2, + "height": "100%", + }, ) return html.Div( [links] + make_list(chart), style={"position": "relative", "height": "100%"} @@ -706,12 +712,14 @@ def cpg_chunker(charts, columns=2): return charts def _formatter(chart): - if hasattr(chart, "style"): - chart.style.pop("height", None) return html.Div(chart, className="col-md-6") return [ - html.Div([_formatter(c) for c in chunk], className="row pb-3") + html.Div( + [_formatter(c) for c in chunk], + className="row pb-3", + style={"height": "100%"}, + ) for chunk in divide_chunks(charts, columns) ] diff --git a/dtale/dash_application/layout/layout.py b/dtale/dash_application/layout/layout.py index 0b0d2e4e..26b9d9f5 100644 --- a/dtale/dash_application/layout/layout.py +++ b/dtale/dash_application/layout/layout.py @@ -1407,10 +1407,12 @@ def build_slider_counts(df, data_id, query_value): ) ) slider_counts = { - v * 20: {"label": "{}% ({:,.0f})".format(v * 20, (v * 2) / 10 * record_ct)} + "{}".format(v * 20): { + "label": "{}% ({:,.0f})".format(v * 20, (v * 2) / 10 * record_ct) + } for v in range(1, 6) } - slider_counts[100]["style"] = {"white-space": "nowrap"} + slider_counts["100"]["style"] = {"white-space": "nowrap"} return slider_counts @@ -2645,7 +2647,8 @@ def show_map_style(show): id="chart-inputs", ), dcc.Loading( - html.Div(id="chart-content", style={"max-height": "69vh"}), type="circle" + html.Div(id="chart-content", style={"height": "calc(100vh - 380px"}), + type="circle", ), dcc.Textarea(id="copy-text", style=dict(position="absolute", left="-110%")), ] diff --git a/dtale/dash_application/views.py b/dtale/dash_application/views.py index 3962dbc9..9501504b 100644 --- a/dtale/dash_application/views.py +++ b/dtale/dash_application/views.py @@ -1365,7 +1365,7 @@ def display_page(pathname, search): settings = global_state.get_settings(params["data_id"]) or {} return html.Div( charts_layout(df, settings, **params) + saved_charts.build_layout(), - className="charts-body", + className="charts-body pb-0", ) custom_geojson.init_callbacks(dash_app) diff --git a/dtale/global_state.py b/dtale/global_state.py index b83ad6f9..0c8a92b1 100644 --- a/dtale/global_state.py +++ b/dtale/global_state.py @@ -4,7 +4,7 @@ from six import PY3 -from dtale.utils import dict_merge +from dtale.utils import dict_merge, format_data try: from collections.abc import MutableMapping @@ -55,6 +55,10 @@ def load_data(self): def rows(self, **kwargs): return self._rows + @property + def is_large(self): + return False + @property def data(self): return self.load_data() @@ -138,18 +142,29 @@ def settings(self, settings): class DtaleArcticDBInstance(DtaleInstance): - def __init__(self, data, lib, symbol, parent): + def __init__(self, data, data_id, parent): super(DtaleArcticDBInstance, self).__init__(data) - self.lib = lib + self.parent = parent + data_id_segs = (data_id or "").split("|") + symbol = data_id_segs[-1] + if len(data_id_segs) > 1: + lib_name = data_id_segs[0] + if not parent.lib or lib_name != parent.lib.name: + parent.update_library(lib_name) + self.lib = parent.lib self.symbol = symbol self._rows = 0 - if self.lib and self.symbol and self.symbol in parent.symbols: + self._cols = 0 + self._base_df = None + if self.lib and self.symbol and self.symbol in self.parent.symbols: self._rows = self.lib._nvs.get_num_rows(self.symbol) + self._base_df = self.load_data(row_range=[0, 1]) + self._cols = len(format_data(self._base_df)[0].columns) def load_data(self, **kwargs): from arcticdb.version_store._store import VersionedItem - if not self.lib.has_symbol(self.symbol): + if self.symbol not in self.parent.symbols: raise ValueError( "{} does not exist in {}!".format(self.symbol, self.lib.name) ) @@ -170,6 +185,18 @@ def rows(self, **kwargs): return len(read_result.frame_data.value.data[0]) return self._rows + @property + def base_df(self): + return self._base_df + + @property + def is_large(self): + if self.rows() > LARGE_ARCTICDB: + return True + if self._cols > 50: + return True + return False + @property def data(self): return self.load_data() @@ -207,22 +234,22 @@ def update_library(self, library=None): return if library in self._libraries: self.lib = self.conn[library] - self._db.clear() - self.load_symbols() + if library not in self._symbols: + self.load_symbols() elif library is not None: raise ValueError("Library '{}' does not exist!".format(library)) def load_libraries(self): - self._libraries = self.conn.list_libraries() + self._libraries = sorted(self.conn.list_libraries()) @property def libraries(self): return self._libraries def load_symbols(self, library=None): - self._symbols[library or self.lib.name] = ( - self.conn[library] if library else self.lib - ).list_symbols() + self._symbols[library or self.lib.name] = sorted( + (self.conn[library] if library else self.lib).list_symbols() + ) @property def symbols(self): @@ -231,7 +258,7 @@ def symbols(self): def build_instance(self, data_id, data=None): if data_id is None: return DtaleInstance(data) - return DtaleArcticDBInstance(data, self.lib, data_id, self) + return DtaleArcticDBInstance(data, data_id, self) def get(self, key, **kwargs): if key is None: diff --git a/dtale/utils.py b/dtale/utils.py index 8a24b174..0795674d 100644 --- a/dtale/utils.py +++ b/dtale/utils.py @@ -861,6 +861,9 @@ def format_data(data, inplace=False, drop_index=False): else: data = data.reset_index(drop=drop_index) + if drop_index: + index = [] + if drop: if inplace: data.drop("index", axis=1, errors="ignore", inplace=True) diff --git a/dtale/views.py b/dtale/views.py index bd8f8b09..096f143d 100644 --- a/dtale/views.py +++ b/dtale/views.py @@ -243,8 +243,11 @@ def __init__(self, data_id, url, is_proxy=False, app_root=None): self.app_root = app_root def build_main_url(self, name=None): + quoted_data_id = get_url_quote()( + get_url_quote()(name or self._data_id, safe="") + ) return "{}/dtale/main/{}".format( - self.app_root if self.is_proxy else self._url, name or self._data_id + self.app_root if self.is_proxy else self._url, quoted_data_id ) @property @@ -379,6 +382,10 @@ def __str__(self): return "" self.notebook() return "" + + if global_state.is_arcticdb and global_state.store.get(self._data_id).is_large: + return self.main_url() + return self.data.__str__() def __repr__(self): @@ -427,6 +434,7 @@ def _build_iframe( iframe_url = "{}?{}".format(iframe_url, params) else: iframe_url = "{}?{}".format(iframe_url, url_encode_func()(params)) + return IFrame(iframe_url, width=width, height=height) def notebook(self, route="/dtale/iframe/", params=None, width="100%", height=475): @@ -965,10 +973,47 @@ def startup( if global_state.is_arcticdb and isinstance(data, string_types): data_id = data + data_id_segs = data_id.split("|") + if len(data_id_segs) < 2: + if not global_state.store.lib: + raise ValueError( + ( + "When specifying a data identifier for ArcticDB it must be comprised of a library and a symbol." + "Use the following format: [library]|[symbol]" + ) + ) + data_id = "{}|{}".format(global_state.store.lib.name, data_id) global_state.new_data_inst(data_id) instance = global_state.store.get(data_id) - data = instance.load_data(row_range=[0, 1]) - ret_data = startup(data=data, data_id=data_id, force_save=False) + data = instance.base_df + ret_data = startup( + url=url, + data=data, + data_id=data_id, + force_save=False, + name=name, + context_vars=context_vars, + ignore_duplicate=ignore_duplicate, + allow_cell_edits=allow_cell_edits, + precision=precision, + show_columns=show_columns, + hide_columns=hide_columns, + column_formats=column_formats, + nan_display=nan_display, + sort=sort, + locked=locked, + background_mode=background_mode, + range_highlights=range_highlights, + app_root=app_root, + is_proxy=is_proxy, + vertical_headers=vertical_headers, + hide_shutdown=hide_shutdown, + column_edit_options=column_edit_options, + auto_hide_empty_columns=auto_hide_empty_columns, + highlight_filter=highlight_filter, + hide_header_editor=hide_header_editor, + lock_header_menu=lock_header_menu, + ) startup_code = ( "from arcticdb import Arctic\n" "from arcticdb.version_store._store import VersionedItem\n\n" @@ -1105,17 +1150,22 @@ def startup( if force_save or ( global_state.is_arcticdb and not global_state.contains(data_id) ): + data = data[curr_locked + [c for c in data.columns if c not in curr_locked]] global_state.set_data(data_id, data) dtypes_data = data ranges = None if global_state.is_arcticdb: instance = global_state.store.get(data_id) - if instance.rows() < global_state.LARGE_ARCTICDB: + if not instance.is_large: dtypes_data = instance.load_data() dtypes_data, _ = format_data( dtypes_data, inplace=inplace, drop_index=drop_index ) ranges = calc_data_ranges(dtypes_data) + dtypes_data = dtypes_data[ + curr_locked + + [c for c in dtypes_data.columns if c not in curr_locked] + ] dtypes_state = build_dtypes_state( dtypes_data, global_state.get_dtypes(data_id) or [], ranges=ranges ) @@ -2105,10 +2155,7 @@ def describe(data_id): if p in dtype: return_data["describe"][p] = dtype[p] - if ( - classification != "F" - and global_state.store.get(data_id).rows() < global_state.LARGE_ARCTICDB - ): + if classification != "F" and not global_state.store.get(data_id).is_large: uniq_vals = data[column].value_counts().sort_values(ascending=False) uniq_vals.index.name = "value" uniq_vals.name = "count" @@ -2174,7 +2221,7 @@ def describe(data_id): if ( classification in ["I", "F", "D"] - and global_state.store.get(data_id).rows() < global_state.LARGE_ARCTICDB + and not global_state.store.get(data_id).is_large ): sd_metrics, sd_code = build_sequential_diffs(data[column], column) return_data["sequential_diffs"] = sd_metrics @@ -2503,10 +2550,7 @@ def build_filter_vals(series, data_id, column, fmt): @dtale.route("/column-filter-data/") @exception_decorator def get_column_filter_data(data_id): - if ( - global_state.is_arcticdb - and global_state.store.get(data_id).rows() > global_state.LARGE_ARCTICDB - ): + if global_state.is_arcticdb and global_state.store.get(data_id).is_large: return jsonify(dict(success=True, hasMissing=True)) column = get_str_arg(request, "col") s = global_state.get_data(data_id)[column] @@ -2582,6 +2626,7 @@ def get_data(data_id): return jsonify({}) curr_settings = global_state.get_settings(data_id) or {} + curr_locked = curr_settings.get("locked", []) final_query = build_query(data_id, curr_settings.get("query")) highlight_filter = curr_settings.get("highlightFilter") or False @@ -2610,6 +2655,10 @@ def get_data(data_id): data = data.head(export_rows) else: data = instance.load_data(row_range=[0, export_rows]) + data, _ = format_data(data) + data = data[ + curr_locked + [c for c in data.columns if c not in curr_locked] + ] results = f.format_dicts(data.itertuples()) results = [dict_merge({IDX_COL: i}, r) for i, r in enumerate(results)] elif query_builder: @@ -2618,6 +2667,7 @@ def get_data(data_id): ) total = len(df) df, _ = format_data(df) + df = df[curr_locked + [c for c in df.columns if c not in curr_locked]] for sub_range in ids: sub_range = list(map(int, sub_range.split("-"))) if len(sub_range) == 1: @@ -2640,6 +2690,7 @@ def get_data(data_id): df = instance.load_data(**date_range) total = len(df) df, _ = format_data(df) + df = df[curr_locked + [c for c in df.columns if c not in curr_locked]] for sub_range in ids: sub_range = list(map(int, sub_range.split("-"))) if len(sub_range) == 1: @@ -2666,6 +2717,10 @@ def get_data(data_id): row_range=[sub_range[0], sub_range[0] + 1] ) sub_df, _ = format_data(sub_df) + sub_df = sub_df[ + curr_locked + + [c for c in sub_df.columns if c not in curr_locked] + ] sub_df = f.format_dicts(sub_df.itertuples()) results[sub_range[0]] = dict_merge( {IDX_COL: sub_range[0]}, sub_df[0] @@ -2676,6 +2731,10 @@ def get_data(data_id): row_range=[start, total if end >= total else end + 1] ) sub_df, _ = format_data(sub_df) + sub_df = sub_df[ + curr_locked + + [c for c in sub_df.columns if c not in curr_locked] + ] sub_df = f.format_dicts(sub_df.itertuples()) for i, d in zip(range(start, end + 1), sub_df): results[i] = dict_merge({IDX_COL: i}, d) @@ -2687,6 +2746,7 @@ def get_data(data_id): curr_dtypes = [c["name"] for c in global_state.get_dtypes(data_id)] if any(c not in curr_dtypes for c in data.columns): data, _ = format_data(data) + data = data[curr_locked + [c for c in data.columns if c not in curr_locked]] global_state.set_data(data_id, data) global_state.set_dtypes( data_id, @@ -4144,14 +4204,18 @@ def load_arcticdb_description(): zip(description.index.name, description.index.dtype), ) ) + rows = description.row_count + description_str = ( "ROWS: {rows:,.0f}\n" "INDEX:\n" "\t- {index}\n" "COLUMNS:\n" "\t- {columns}\n" ).format( - rows=description.row_count, + rows=rows, index="\n\t- ".join(index), columns="\n\t- ".join(columns), ) - return jsonify(dict(success=True, symbol=symbol, description=description_str)) + return jsonify( + dict(success=True, library=library, symbol=symbol, description=description_str) + ) @dtale.route("/arcticdb/load-symbol") @@ -4159,11 +4223,12 @@ def load_arcticdb_description(): def load_arcticdb_symbol(): library = get_str_arg(request, "library") symbol = get_str_arg(request, "symbol") + data_id = "{}|{}".format(library, symbol) if not global_state.store.lib or global_state.store.lib.name != library: global_state.store.update_library(library) - startup(data=symbol) + startup(data=data_id) startup_code = ( "from arcticdb import Arctic\n" "from arcticdb.version_store._store import VersionedItem\n\n" @@ -4173,8 +4238,8 @@ def load_arcticdb_symbol(): "if isinstance(data, VersionedItem):\n" "\tdf = df.data\n" ).format(uri=global_state.store.uri, library=library, symbol=symbol) - curr_settings = global_state.get_settings(symbol) + curr_settings = global_state.get_settings(data_id) global_state.set_settings( - symbol, dict_merge(curr_settings, dict(startup_code=startup_code)) + data_id, dict_merge(curr_settings, dict(startup_code=startup_code)) ) - return dict(success=True, data_id=symbol) + return dict(success=True, data_id=data_id) diff --git a/frontend/static/__tests__/popups/arcticdb/LibrarySymbolSelector-test.tsx b/frontend/static/__tests__/popups/arcticdb/LibrarySymbolSelector-test.tsx index 1b0c5ec6..6956ba96 100644 --- a/frontend/static/__tests__/popups/arcticdb/LibrarySymbolSelector-test.tsx +++ b/frontend/static/__tests__/popups/arcticdb/LibrarySymbolSelector-test.tsx @@ -60,7 +60,13 @@ describe('LibrarySymbolSelector tests', () => { return Promise.resolve({ data: { symbols: SYMBOLS.baz } }); } else if (url.startsWith('/dtale/arcticdb/load-description')) { const params = parseUrlParams(url); - return Promise.resolve({ data: { description: 'Test Description', symbol: params.symbol } }); + return Promise.resolve({ + data: { + description: 'Test Description', + library: params.library, + symbol: params.symbol, + }, + }); } else if (url.startsWith('/dtale/arcticdb/load-symbol')) { return Promise.resolve({ data: { data_id: '2' } }); } @@ -89,7 +95,7 @@ describe('LibrarySymbolSelector tests', () => { await fireEvent.click(screen.getByText('View Info')); }); expect(loadDescriptionSpy).toHaveBeenCalledWith('foo', 'foo1'); - expect(result.getElementsByTagName('b')[0].textContent).toBe('foo1 Description'); + expect(result.getElementsByTagName('b')[0].textContent).toBe('foo - foo1 Description'); expect(result.getElementsByTagName('pre')[0].textContent).toBe('Test Description'); const librariesSpy = jest.spyOn(ArcticDBRepository, 'libraries'); diff --git a/frontend/static/popups/arcticdb/LibrarySymbolSelector.tsx b/frontend/static/popups/arcticdb/LibrarySymbolSelector.tsx index 41cecf09..40477a32 100644 --- a/frontend/static/popups/arcticdb/LibrarySymbolSelector.tsx +++ b/frontend/static/popups/arcticdb/LibrarySymbolSelector.tsx @@ -144,7 +144,7 @@ const LibrarySymbolSelector: React.FC = ({ t }) => { {!!description && (
- {`${description?.symbol} ${t('Description')}`} + {`${description?.library} - ${description?.symbol} ${t('Description')}`}
{description?.description ?? ''}
diff --git a/frontend/static/repository/ArcticDBRepository.ts b/frontend/static/repository/ArcticDBRepository.ts index 383c73e7..0c2b5e49 100644 --- a/frontend/static/repository/ArcticDBRepository.ts +++ b/frontend/static/repository/ArcticDBRepository.ts @@ -16,6 +16,7 @@ export interface SymbolsResponse extends GenericRepository.BaseResponse { /** Axios response for loading symbol */ export interface LoadDescriptionResponse extends GenericRepository.BaseResponse { description: string; + library: string; symbol: string; } diff --git a/tests/arcticdb/test_views.py b/tests/arcticdb/test_views.py index 77748b84..a994ccc5 100644 --- a/tests/arcticdb/test_views.py +++ b/tests/arcticdb/test_views.py @@ -58,7 +58,7 @@ def test_loading_data(unittest, arcticdb_path, arcticdb): startup(data="df1") with app.test_client() as c: - validate_data_load("df1", unittest, c) + validate_data_load("dtale|df1", unittest, c) @pytest.mark.unit @@ -71,7 +71,7 @@ def test_loading_data_w_slashed_symbol(unittest, arcticdb_path, arcticdb): with app.test_client() as c: response = c.get("/") - assert response.location.endswith("/dtale/main/slashed%252Fdf1") + assert response.location.endswith("/dtale/main/dtale%257Cslashed%252Fdf1") data_id = response.location.split("/")[-1] validate_data_load(data_id, unittest, c) @@ -86,12 +86,14 @@ def test_loading_data_w_filters(unittest, arcticdb_path, arcticdb): with app.test_client() as c: c.get( - "/dtale/save-column-filter/df1", + "/dtale/save-column-filter/dtale%257Cdf1", query_string=dict( col="str_val", cfg=json.dumps({"type": "string", "value": ["b"]}) ), ) - response = c.get("/dtale/data/df1", query_string=dict(ids=json.dumps(["0"]))) + response = c.get( + "/dtale/data/dtale%257Cdf1", query_string=dict(ids=json.dumps(["0"])) + ) response_data = response.get_json() expected_results = { "0": { @@ -107,17 +109,19 @@ def test_loading_data_w_filters(unittest, arcticdb_path, arcticdb): unittest.assertEqual(response_data["final_query"], "`str_val` == 'b'") c.get( - "/dtale/save-column-filter/df1", + "/dtale/save-column-filter/dtale%257Cdf1", query_string=dict(col="str_val", cfg=json.dumps({"type": "string"})), ) c.get( - "/dtale/save-column-filter/df1", + "/dtale/save-column-filter/dtale%257Cdf1", query_string=dict( col="int_val", cfg=json.dumps({"type": "int", "value": [3], "operand": "="}), ), ) - response = c.get("/dtale/data/df1", query_string=dict(ids=json.dumps(["0"]))) + response = c.get( + "/dtale/data/dtale%257Cdf1", query_string=dict(ids=json.dumps(["0"])) + ) response_data = response.get_json() expected_results = { "0": { @@ -133,17 +137,19 @@ def test_loading_data_w_filters(unittest, arcticdb_path, arcticdb): unittest.assertEqual(response_data["final_query"], "`int_val` == 3") c.get( - "/dtale/save-column-filter/df1", + "/dtale/save-column-filter/dtale%257Cdf1", query_string=dict(col="int_val", cfg=json.dumps({"type": "int"})), ) c.get( - "/dtale/save-column-filter/df1", + "/dtale/save-column-filter/dtale%257Cdf1", query_string=dict( col="float_val", cfg=json.dumps({"type": "float", "value": 1.1, "operand": "="}), ), ) - response = c.get("/dtale/data/df1", query_string=dict(ids=json.dumps(["0"]))) + response = c.get( + "/dtale/data/dtale%257Cdf1", query_string=dict(ids=json.dumps(["0"])) + ) response_data = response.get_json() expected_results = { "0": { @@ -159,11 +165,11 @@ def test_loading_data_w_filters(unittest, arcticdb_path, arcticdb): unittest.assertEqual(response_data["final_query"], "`float_val` == 1.1") c.get( - "/dtale/save-column-filter/df1", + "/dtale/save-column-filter/dtale%257Cdf1", query_string=dict(col="float_val", cfg=json.dumps({"type": "float"})), ) c.get( - "/dtale/save-column-filter/df1", + "/dtale/save-column-filter/dtale%257Cdf1", query_string=dict( col="index", cfg=json.dumps( @@ -171,7 +177,9 @@ def test_loading_data_w_filters(unittest, arcticdb_path, arcticdb): ), ), ) - response = c.get("/dtale/data/df1", query_string=dict(ids=json.dumps(["0"]))) + response = c.get( + "/dtale/data/dtale%257Cdf1", query_string=dict(ids=json.dumps(["0"])) + ) response_data = response.get_json() unittest.assertEqual(response_data["results"], expected_results) unittest.assertEqual(response_data["total"], 1) @@ -187,7 +195,9 @@ def test_describe(unittest, arcticdb_path, arcticdb): startup(data="df1") with app.test_client() as c: - response = c.get("/dtale/describe/df1", query_string=dict(col="int_val")) + response = c.get( + "/dtale/describe/dtale%257Cdf1", query_string=dict(col="int_val") + ) response_data = response.get_json() unittest.assertEqual( response_data["uniques"], @@ -214,7 +224,9 @@ def test_large_describe(arcticdb_path, arcticdb): startup(data="large_df") with app.test_client() as c: - response = c.get("/dtale/describe/large_df", query_string=dict(col="col1")) + response = c.get( + "/dtale/describe/dtale%257Clarge_df", query_string=dict(col="col1") + ) response_data = response.get_json() assert "uniques" not in response_data @@ -229,7 +241,7 @@ def test_column_filter_data(unittest, arcticdb_path, arcticdb): with app.test_client() as c: response = c.get( - "/dtale/column-filter-data/df1", query_string=dict(col="int_val") + "/dtale/column-filter-data/dtale%257Cdf1", query_string=dict(col="int_val") ) response_data = response.get_json() unittest.assertEqual( @@ -254,7 +266,8 @@ def test_large_column_filter_data(unittest, arcticdb_path, arcticdb): with app.test_client() as c: response = c.get( - "/dtale/column-filter-data/large_df", query_string=dict(col="col1") + "/dtale/column-filter-data/dtale%257Clarge_df", + query_string=dict(col="col1"), ) response_data = response.get_json() unittest.assertEqual(response_data, {"hasMissing": True, "success": True}) @@ -334,6 +347,6 @@ def test_load_arcticdb_symbol(unittest, arcticdb_path, arcticdb): query_string=dict(library="dtale", symbol="df1"), ) response_data = response.get_json() - assert response_data["data_id"] == "df1" + assert response_data["data_id"] == "dtale|df1" - validate_data_load("df1", unittest, c) + validate_data_load("dtale|df1", unittest, c) diff --git a/tests/dtale/test_instance.py b/tests/dtale/test_instance.py index 11533beb..ece48da3 100644 --- a/tests/dtale/test_instance.py +++ b/tests/dtale/test_instance.py @@ -28,12 +28,12 @@ def import_mock(name, *args, **kwargs): stack.enter_context( mock.patch("dtale.views.in_ipython_frontend", return_value=False) ) - build_data_inst({9999: df}) + build_data_inst({"9999": df}) getter = namedtuple("get", "ok") stack.enter_context( mock.patch("dtale.app.requests.get", return_value=getter(False)) ) - instance = DtaleData(9999, "http://localhost:9999") + instance = DtaleData("9999", "http://localhost:9999") assert not instance.is_up() assert instance._build_iframe() is None @@ -56,8 +56,8 @@ def import_mock(name, *args, **kwargs): stack.enter_context( mock.patch("dtale.views.in_ipython_frontend", return_value=True) ) - build_data_inst({9999: df}) - instance = DtaleData(9999, "http://localhost:9999") + build_data_inst({"9999": df}) + instance = DtaleData("9999", "http://localhost:9999") instance.notebook = mock.Mock() assert str(instance) == "" @@ -85,8 +85,8 @@ def mock_requests_get(url, verify=True): stack.enter_context( mock.patch("dtale.views.in_ipython_frontend", return_value=True) ) - build_data_inst({9999: df}) - instance = DtaleData(9999, "http://localhost:9999") + build_data_inst({"9999": df}) + instance = DtaleData("9999", "http://localhost:9999") instance.notebook_correlations(col1="col1", col2="col2") mock_iframe.assert_called_once() @@ -166,7 +166,7 @@ def test_jupyter_server_proxy_is_proxy(): ) mock_requests = stack.enter_context(mock.patch("requests.get", mock.Mock())) instance = DtaleData( - 9999, + "9999", "user/root/proxy/40000", is_proxy=True, app_root="user/root/proxy/40000", @@ -182,9 +182,9 @@ def test_cleanup(): with ExitStack() as stack: mock_cleanup = stack.enter_context(mock.patch("dtale.global_state.cleanup")) - instance = DtaleData(9999, "user/root/proxy/9999") + instance = DtaleData("9999", "user/root/proxy/9999") instance.cleanup() - mock_cleanup.assert_called_once_with(9999) + mock_cleanup.assert_called_once_with("9999") @pytest.mark.unit @@ -195,7 +195,7 @@ def test_started_with_open_browser(): stack.enter_context( mock.patch("dtale.views.in_ipython_frontend", return_value=True) ) - instance = DtaleData(9999, "user/root/proxy/9999") + instance = DtaleData("9999", "user/root/proxy/9999") instance.started_with_open_browser = True assert instance.__str__() == "" assert instance.started_with_open_browser is False @@ -218,12 +218,12 @@ def test_settings_management(): mock.patch("dtale.global_state.get_settings") ) - instance = DtaleData(9999, "user/root/proxy/9999") + instance = DtaleData("9999", "user/root/proxy/9999") instance.update_settings(range_highlights={}) - mock_default_store.get_settings.assert_called_once_with(9999) + mock_default_store.get_settings.assert_called_once_with("9999") mock_default_store.set_settings.assert_called_once_with( - 9999, dict(rangeHighlight={}) + "9999", dict(rangeHighlight={}) ) mock_default_store.reset_mock() instance.get_settings() - mock_get_settings.assert_called_once_with(9999) + mock_get_settings.assert_called_once_with("9999") diff --git a/tests/dtale/test_views.py b/tests/dtale/test_views.py index a78d8d9e..e00823d0 100644 --- a/tests/dtale/test_views.py +++ b/tests/dtale/test_views.py @@ -454,24 +454,22 @@ def test_processes(test_data, unittest): response = c.get("/dtale/processes") response_data = response.get_json() - unittest.assertEqual( - [ - { - "rows": 50, - "name": "foo", - "ts": 1525106204000, - "start": "12:36:44 PM", - "names": "date,security_id,foo,bar,baz", - "data_id": str(c.port), - "columns": 5, - "mem_usage": 4600 if PY3 else 4000, - } - ], - response_data["data"], + unittest.assertDictContainsSubset( + { + "rows": 50, + "name": "foo", + "ts": 1525106204000, + "start": "12:36:44 PM", + "names": "date,security_id,foo,bar,baz", + "data_id": str(c.port), + "columns": 5, + # "mem_usage": 4600 if PY3 else 4000, + }, + response_data["data"][0], ) response = c.get("/dtale/process-keys") - response_data = response.json + response_data = response.get_json() assert response_data["data"][0]["id"] == str(c.port) global_state.clear_store()