From 756acb3acdcacdcd11b9503afa1387e4e7132588 Mon Sep 17 00:00:00 2001 From: Zach Sherman Date: Tue, 7 Nov 2023 15:04:31 -0600 Subject: [PATCH] MNT: Setup.cfg update (#749) * MNT: Remove armfiles and add in data directory. * MNT: Removed unused variable. --- act/io/armfiles.py | 54 ++++++++++++++++++++++++++-------------------- setup.cfg | 5 ++--- 2 files changed, 33 insertions(+), 26 deletions(-) diff --git a/act/io/armfiles.py b/act/io/armfiles.py index 9e591eae09..7f69ff87bc 100644 --- a/act/io/armfiles.py +++ b/act/io/armfiles.py @@ -5,27 +5,26 @@ """ import copy +import datetime as dt import glob import json import re -import urllib -import warnings -from pathlib import Path, PosixPath -from netCDF4 import Dataset -from os import PathLike import tarfile import tempfile +import urllib import warnings +from os import PathLike +from pathlib import Path, PosixPath -from cftime import num2date import numpy as np import xarray as xr -import datetime as dt +from cftime import num2date +from netCDF4 import Dataset import act import act.utils as utils from act.config import DEFAULT_DATASTREAM_NAME -from act.utils.io_utils import unpack_tar, unpack_gzip, cleanup_files, is_gunzip_file +from act.utils.io_utils import cleanup_files, is_gunzip_file, unpack_gzip, unpack_tar def read_netcdf( @@ -108,7 +107,6 @@ def read_netcdf( message = 'act.io.armfiles.read_netcdf will be replaced in version 2.0.0 by act.io.arm.read_arm_netcdf()' warnings.warn(message, DeprecationWarning, 2) - ds = None filenames, cleanup_temp_directory = check_if_tar_gz_file(filenames) @@ -137,7 +135,8 @@ def read_netcdf( if 'drop_variables' in kwargs.keys(): drop_variables = kwargs['drop_variables'] kwargs['drop_variables'] = keep_variables_to_drop_variables( - filenames, keep_variables, drop_variables=drop_variables) + filenames, keep_variables, drop_variables=drop_variables + ) # Create an exception tuple to use with try statements. Doing it this way # so we can add the FileNotFoundError if requested. Can add more error @@ -178,7 +177,9 @@ def read_netcdf( # If requested use base_time and time_offset to derive time. Assumes that the units # of both are in seconds and that the value is number of seconds since epoch. if use_base_time: - time = num2date(ds['base_time'].values + ds['time_offset'].values, ds['base_time'].attrs['units']) + time = num2date( + ds['base_time'].values + ds['time_offset'].values, ds['base_time'].attrs['units'] + ) time = time.astype('datetime64[ns]') # Need to use a new Dataset creation to correctly index time for use with @@ -280,10 +281,7 @@ def read_netcdf( return ds -def keep_variables_to_drop_variables( - filenames, - keep_variables, - drop_variables=None): +def keep_variables_to_drop_variables(filenames, keep_variables, drop_variables=None): """ Returns a list of variable names to exclude from reading by passing into `Xarray.open_dataset` drop_variables keyword. This can greatly help reduce @@ -347,7 +345,6 @@ def keep_variables_to_drop_variables( # Use netCDF4 library to extract the variable and dimension names. rootgrp = Dataset(filename, 'r') read_variables = list(rootgrp.variables) - dimensions = list(rootgrp.dimensions) # Loop over the variables to exclude needed coordinate dimention names. dims_to_keep = [] for var_name in keep_variables: @@ -400,7 +397,9 @@ def check_arm_standards(ds): return the_flag -def create_ds_from_arm_dod(proc, set_dims, version='', fill_value=-9999.0, scalar_fill_dim=None, local_file=False): +def create_ds_from_arm_dod( + proc, set_dims, version='', fill_value=-9999.0, scalar_fill_dim=None, local_file=False +): """ Queries the ARM DOD api and builds a dataset based on the ARM DOD and @@ -631,7 +630,9 @@ def write_netcdf( try: att_values = write_ds[var_name].attrs[attr_name] if isinstance(att_values, (list, tuple)): - att_values = [att_value.replace(' ', join_char) for att_value in att_values] + att_values = [ + att_value.replace(' ', join_char) for att_value in att_values + ] write_ds[var_name].attrs[attr_name] = ' '.join(att_values) except KeyError: @@ -759,9 +760,16 @@ def write_netcdf( pass current_time = dt.datetime.now().replace(microsecond=0) if 'history' in list(write_ds.attrs.keys()): - write_ds.attrs['history'] += ''.join(['\n', str(current_time), ' created by ACT ', str(act.__version__), - ' act.io.write.write_netcdf']) - + write_ds.attrs['history'] += ''.join( + [ + '\n', + str(current_time), + ' created by ACT ', + str(act.__version__), + ' act.io.write.write_netcdf', + ] + ) + if hasattr(write_ds, 'time_bounds') and not write_ds.time.encoding: write_ds.time.encoding.update(write_ds.time_bounds.encoding) @@ -830,7 +838,7 @@ def read_mmcr(filenames): # read it in with xarray multi_ds = [] for f in filenames: - nc = Dataset(f, "a") + nc = Dataset(f, 'a') # Change heights name to range to read appropriately to xarray if 'heights' in nc.dimensions: nc.renameDimension('heights', 'range') @@ -878,7 +886,7 @@ def read_mmcr(filenames): data=data, coords={time_name: ds['time'].values[idx], range_name: range_data[idy]}, dims=[time_name, range_name], - attrs=attrs + attrs=attrs, ) ds[new_var_name] = da diff --git a/setup.cfg b/setup.cfg index 58b0e8d9d6..09382c0d37 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,9 +1,9 @@ [flake8] -exclude = act/io/armfiles.py docs *__init__.py* setup.cfg +exclude = act/tests/data/ docs *__init__.py* setup.cfg ignore = E203,E266,E501,W503,E722,E402,C901,E731,F401 max-line-length = 100 max-complexity = 18 -extend-exclude = act/io/armfiles.py docs *__init__.py* +extend-exclude = docs *__init__.py* extend-ignore = E203,E266,E501,W503,E722,E402,C901,E731,F401 [isort] @@ -18,7 +18,6 @@ line_length=100 skip= docs/source/conf.py setup.py - act/io/armfiles.py [tool:pytest] addopts = --cov=./ --cov-report=xml --verbose