Skip to content

Commit

Permalink
Replace / in all var names if the dataset is grouped
Browse files Browse the repository at this point in the history
  • Loading branch information
frankinspace committed Nov 16, 2022
1 parent 03d55f7 commit 3728c7d
Showing 1 changed file with 10 additions and 3 deletions.
13 changes: 10 additions & 3 deletions podaac/subsetter/subset.py
Original file line number Diff line number Diff line change
Expand Up @@ -1154,10 +1154,10 @@ def convert_to_datetime(dataset, time_vars):
return dataset, start_date


def subset(file_to_subset, bbox, output_file, variables=None,
def subset(file_to_subset, bbox, output_file, variables=(),
# pylint: disable=too-many-branches, disable=too-many-statements
cut=True, shapefile=None, min_time=None, max_time=None, origin_source=None,
lat_var_names=None, lon_var_names=None, time_var_names=None):
lat_var_names=(), lon_var_names=(), time_var_names=()):
"""
Subset a given NetCDF file given a bounding box
Expand Down Expand Up @@ -1190,6 +1190,9 @@ def subset(file_to_subset, bbox, output_file, variables=None,
ISO timestamp representing the upper bound of the temporal
subset to be performed. If this value is not provided, the
granule will not be subset temporally on the upper bound.
origin_source : str
Original location or filename of data to be used in "derived from"
history element.
lat_var_names : list
List of variables that represent the latitude coordinate
variables for this granule. This list will only contain more
Expand Down Expand Up @@ -1221,8 +1224,12 @@ def subset(file_to_subset, bbox, output_file, variables=None,

nc_dataset, rename_vars = dc.remove_duplicate_dims(nc_dataset)

if variables:
if has_groups:
variables = [x.replace('/', GROUP_DELIM) for x in variables]
lat_var_names = [x.replace('/', GROUP_DELIM) for x in lat_var_names]
lon_var_names = [x.replace('/', GROUP_DELIM) for x in lon_var_names]
time_var_names = [x.replace('/', GROUP_DELIM) for x in time_var_names]


args = {
'decode_coords': False,
Expand Down

0 comments on commit 3728c7d

Please sign in to comment.