diff --git a/docs/users_guide/tasks/timeSeriesOceanRegions.rst b/docs/users_guide/tasks/timeSeriesOceanRegions.rst index 344b65085..dd19bbaaa 100644 --- a/docs/users_guide/tasks/timeSeriesOceanRegions.rst +++ b/docs/users_guide/tasks/timeSeriesOceanRegions.rst @@ -22,7 +22,81 @@ The following configuration options are available for this task: ## options related to plotting time series of groups of ocean regions # the names of region groups to plot, each with its own section below - regionGroups = ['Antarctic Regions'] + regionGroups = ['Ocean Basins', 'Arctic Ocean Regions', 'Antarctic Regions'] + + # a list of variables available to plot + availableVariables = [ + {'name': 'temperature', + 'title': 'Temperature', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature']}, + {'name': 'salinity', + 'title': 'Salinity', + 'units': 'PSU', + 'mpas': ['timeMonthly_avg_activeTracers_salinity']}, + {'name': 'potentialDensity', + 'title': 'Potential Density', + 'units': 'kg m$$^{-3}$$', + 'mpas': ['timeMonthly_avg_potentialDensity']}, + {'name': 'thermalForcing', + 'title': 'Thermal Forcing', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_activeTracers_salinity', + 'timeMonthly_avg_density']}, + {'name': 'oceanHeatContent', + 'title': 'Ocean Heat Content', + 'units': r'$$10^{22}$$ J', + 'mpas': ['timeMonthly_avg_activeTracers_temperature']}, + {'name': 'mixedLayerDepth', + 'title': 'Mixed Layer Depth', + 'units': 'm', + 'mpas': ['timeMonthly_avg_dThreshMLD']}] + + # allow for longer plot titles + maxTitleLength = 70 + + [timeSeriesOceanBasins] + ## options related to plotting time series of Ocean Basins + + # list of regions to plot or ['all'] for all regions in the masks file. + # See "regionNames" in the oceanBasins masks file in + # regionMaskSubdirectory for details. + regionNames = ['all'] + + # a list of variables to plot from availableVariables in timeSeriesOceanRegions + variables = ['oceanHeatContent'] + + # variables that are anomalies + anomalies = ['oceanHeatContent'] + + # The minimum and maximum depth over which fields are averaged. + zmin = -10000 + zmax = 0 + + # Observational data sets to compare against + obs = [] + + [timeSeriesArcticOceanRegions] + ## options related to plotting time series of Arctic Ocean regions + + # list of regions to plot or ['all'] for all regions in the masks file. + # See "regionNames" in the arcticOceanRegions masks file in + # regionMaskSubdirectory for details. + regionNames = [] + + # a list of variables to plot from availableVariables in timeSeriesOceanRegions + variables = ['temperature', 'salinity', 'potentialDensity', 'mixedLayerDepth'] + + # variables that are anomalies + anomalies = [] + + # The minimum and maximum depth over which fields are averaged. + zmin = -1000 + zmax = 0 + + # Observational data sets to compare against + obs = ['WOA18'] [timeSeriesAntarcticRegions] @@ -33,23 +107,12 @@ The following configuration options are available for this task: # regionMaskSubdirectory for details. regionNames = [] - # a list of variables to plot - variables = [{'name': 'temperature', - 'title': 'Temperature', - 'units': r'$^\circ$C', - 'mpas': 'timeMonthly_avg_activeTracers_temperature'}, - {'name': 'salinity', - 'title': 'Salinity', - 'units': 'PSU', - 'mpas': 'timeMonthly_avg_activeTracers_salinity'}, - {'name': 'potentialDensity', - 'title': 'Potential Density', - 'units': 'kg m$^{-3}$', - 'mpas': 'timeMonthly_avg_potentialDensity'}, - {'name': 'mixedLayerDepth', - 'title': 'Mixed Layer Depth', - 'units': 'm', - 'mpas': 'timeMonthly_avg_dThreshMLD'}] + # a list of variables to plot from availableVariables in timeSeriesOceanRegions + variables = ['temperature', 'salinity', 'potentialDensity', 'thermalForcing', + 'oceanHeatContent', 'mixedLayerDepth'] + + # variables that are anomalies + anomalies = ['oceanHeatContent'] # The minimum and maximum depth over which fields are averaged, default is # to take these values from the geojson feature's zmin and zmax properties. @@ -63,7 +126,7 @@ The following configuration options are available for this task: Region Groups ------------- -``regionGroup`` is a list of region groups,each of which will get its own +``regionGroup`` is a list of region groups, each of which will get its own gallery group on the resulting analysis webpage. See :ref:`config_region_groups` for more information on the available region groups. For each region group, there should be a corresponding @@ -71,6 +134,18 @@ groups. For each region group, there should be a corresponding from the name of the region group. By default, the only region group for this task is ``'Antarctic Regions'``. +Available Variables +------------------- + +The ``availableVariables`` list has a python dictionary for each variable that +is available to be plotted. A separate gallery will be produced for each +requested variable with a title given by the ``"title"`` entry in the +dictionary. The ``"units"`` entry is used for the y-axis label of each plot. +The ``"name"`` is the name of the variable in the NetCDF files as well as the +text appended to subtaks names and file names. It should contain no spaces. +The ``"mpas"`` entry is the name of the corresponding field in the MPAS-Ocean +``timeSeriesStatsMonthlyOutput`` files. + Region Names ------------ @@ -90,13 +165,17 @@ region group. In the case of ``Antarctic Regions``, these are: Variables --------- -The ``variables`` list has a python dictionary for each variable to be plotted. -A separate gallery will be produced for each variable with a title given by -the ``"title"`` entry in the dictionary. The ``"units"`` entry is used for the -y-axis label of each plot. The ``"name"`` is the name of the variable in -the NetCDF files as well as the text appended to subtaks names and file names. -It should contain no spaces. The ``"mpas"`` entry is the name of the -corresponding field in the MPAS-Ocean ``timeSeriesStatsMonthlyOutput`` files. +The ``variables`` list are the variables from ``availableVariables`` that +should be plotted in a given region. + +Anomalies +--------- + +The ``anomalies`` list are the variables from ``variables`` that should be +plotted as anomalies from the beginning of the simulation or from +``anomalyRefYear`` for time series if that config option is defined. +By default, this is applied to the ocean heat content in cases where it is +being plotted. Depth Bounds ------------ @@ -113,6 +192,9 @@ since they have negative values. Other Config Options -------------------- +``maxTitleLength`` is used to give titles of these plots more space than +allowed in typical plots. + For more details, see: * :ref:`config_regions` diff --git a/mpas_analysis/default.cfg b/mpas_analysis/default.cfg index c6b3bc176..43b2ec47c 100755 --- a/mpas_analysis/default.cfg +++ b/mpas_analysis/default.cfg @@ -1683,8 +1683,60 @@ movingAveragePoints = 1 ## options related to plotting time series of groups of ocean regions # the names of region groups to plot, each with its own section below -regionGroups = ['Arctic Ocean Regions', 'Antarctic Regions'] +regionGroups = ['Ocean Basins', 'Arctic Ocean Regions', 'Antarctic Regions'] + +# a list of variables available to plot +availableVariables = [ + {'name': 'temperature', + 'title': 'Temperature', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature']}, + {'name': 'salinity', + 'title': 'Salinity', + 'units': 'PSU', + 'mpas': ['timeMonthly_avg_activeTracers_salinity']}, + {'name': 'potentialDensity', + 'title': 'Potential Density', + 'units': 'kg m$$^{-3}$$', + 'mpas': ['timeMonthly_avg_potentialDensity']}, + {'name': 'thermalForcing', + 'title': 'Thermal Forcing', + 'units': r'$$^\circ$$C', + 'mpas': ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_activeTracers_salinity', + 'timeMonthly_avg_density']}, + {'name': 'oceanHeatContent', + 'title': 'Ocean Heat Content', + 'units': r'$$10^{22}$$ J', + 'mpas': ['timeMonthly_avg_activeTracers_temperature']}, + {'name': 'mixedLayerDepth', + 'title': 'Mixed Layer Depth', + 'units': 'm', + 'mpas': ['timeMonthly_avg_dThreshMLD']}] + +# allow for longer plot titles +maxTitleLength = 70 + +[timeSeriesOceanBasins] +## options related to plotting time series of Ocean Basins +# list of regions to plot or ['all'] for all regions in the masks file. +# See "regionNames" in the oceanBasins masks file in +# regionMaskSubdirectory for details. +regionNames = ['all'] + +# a list of variables to plot from availableVariables in timeSeriesOceanRegions +variables = ['oceanHeatContent'] + +# variables that are anomalies +anomalies = ['oceanHeatContent'] + +# The minimum and maximum depth over which fields are averaged. +zmin = -10000 +zmax = 0 + +# Observational data sets to compare against +obs = [] [timeSeriesArcticOceanRegions] ## options related to plotting time series of Arctic Ocean regions @@ -1694,23 +1746,11 @@ regionGroups = ['Arctic Ocean Regions', 'Antarctic Regions'] # regionMaskSubdirectory for details. regionNames = [] -# a list of variables to plot -variables = [{'name': 'temperature', - 'title': 'Temperature', - 'units': r'$$^\circ$$C', - 'mpas': 'timeMonthly_avg_activeTracers_temperature'}, - {'name': 'salinity', - 'title': 'Salinity', - 'units': 'PSU', - 'mpas': 'timeMonthly_avg_activeTracers_salinity'}, - {'name': 'potentialDensity', - 'title': 'Potential Density', - 'units': 'kg m$$^{-3}$$', - 'mpas': 'timeMonthly_avg_potentialDensity'}, - {'name': 'mixedLayerDepth', - 'title': 'Mixed Layer Depth', - 'units': 'm', - 'mpas': 'timeMonthly_avg_dThreshMLD'}] +# a list of variables to plot from availableVariables in timeSeriesOceanRegions +variables = ['temperature', 'salinity', 'potentialDensity', 'mixedLayerDepth'] + +# variables that are anomalies +anomalies = [] # The minimum and maximum depth over which fields are averaged. zmin = -1000 @@ -1728,29 +1768,12 @@ obs = ['WOA23'] # regionMaskSubdirectory for details. regionNames = [] -# a list of variables to plot -variables = [{'name': 'temperature', - 'title': 'Temperature', - 'units': r'$$^\circ$$C', - 'mpas': 'timeMonthly_avg_activeTracers_temperature'}, - {'name': 'salinity', - 'title': 'Salinity', - 'units': 'PSU', - 'mpas': 'timeMonthly_avg_activeTracers_salinity'}, - {'name': 'potentialDensity', - 'title': 'Potential Density', - 'units': 'kg m$$^{-3}$$', - 'mpas': 'timeMonthly_avg_potentialDensity'}, - {'name': 'thermalForcing', - 'title': 'Thermal Forcing', - 'units': r'$$^\circ$$C', - 'mpas': ['timeMonthly_avg_activeTracers_temperature', - 'timeMonthly_avg_activeTracers_salinity', - 'timeMonthly_avg_density']}, - {'name': 'mixedLayerDepth', - 'title': 'Mixed Layer Depth', - 'units': 'm', - 'mpas': 'timeMonthly_avg_dThreshMLD'}] +# a list of variables to plot from availableVariables in timeSeriesOceanRegions +variables = ['temperature', 'salinity', 'potentialDensity', 'thermalForcing', + 'oceanHeatContent', 'mixedLayerDepth'] + +# variables that are anomalies +anomalies = ['oceanHeatContent'] # The minimum and maximum depth over which fields are averaged, default is # to take these values from the geojson feature's zmin and zmax properties. diff --git a/mpas_analysis/ocean/time_series_ocean_regions.py b/mpas_analysis/ocean/time_series_ocean_regions.py index 5109bfa13..95572eed7 100644 --- a/mpas_analysis/ocean/time_series_ocean_regions.py +++ b/mpas_analysis/ocean/time_series_ocean_regions.py @@ -17,21 +17,16 @@ from mpas_tools.cime.constants import constants as cime_constants from mpas_analysis.shared.analysis_task import AnalysisTask - -from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig, \ - add_inset - +from mpas_analysis.shared.constants import constants +from mpas_analysis.shared.html import write_image_xml from mpas_analysis.shared.io import open_mpas_dataset, write_netcdf_with_fill - from mpas_analysis.shared.io.utility import build_config_full_path, \ build_obs_path, get_files_year_month, decode_strings - -from mpas_analysis.shared.html import write_image_xml - +from mpas_analysis.shared.plot import timeseries_analysis_plot, savefig, \ + add_inset +from mpas_analysis.shared.timekeeping.utility import get_simulation_start_time from mpas_analysis.ocean.utility import compute_zmid -from mpas_analysis.shared.constants import constants - class TimeSeriesOceanRegions(AnalysisTask): """ @@ -265,8 +260,9 @@ def run_task(self): # Xylar Asay-Davis config = self.config + logger = self.logger - self.logger.info("\nCompute depth mask for regional means...") + logger.info("\nCompute depth mask for regional means...") regionGroup = self.regionGroup sectionSuffix = regionGroup[0].upper() + \ @@ -286,7 +282,7 @@ def run_task(self): timeSeriesName) if os.path.exists(outFileName): - self.logger.info(' Mask file exists -- Done.') + logger.info(' Mask file exists -- Done.') return # Load mesh related variables @@ -334,8 +330,7 @@ def run_task(self): datasets = [] for regionIndex in range(nRegions): - self.logger.info(' region: {}'.format( - self.regionNames[regionIndex])) + logger.info(f' region: {self.regionNames[regionIndex]}') dsRegion = dsRegionMask.isel(nRegions=regionIndex) cellMask = dsRegion.regionCellMasks == 1 @@ -343,8 +338,8 @@ def run_task(self): cellMask = numpy.logical_and(cellMask, openOceanMask) totalArea = areaCell.where(cellMask).sum() - self.logger.info(' totalArea: {} mil. km^2'.format( - 1e-12 * totalArea.values)) + logger.info( + f' totalArea: {1e-12 * totalArea.values} mil. km^2') if config_zmin is None: if 'zminRegions' in dsRegion: @@ -481,8 +476,9 @@ def run_task(self): # Xylar Asay-Davis config = self.config + logger = self.logger - self.logger.info("\nCompute time series of regional means...") + logger.info("\nCompute time series of regional means...") startDate = '{:04d}-01-01_00:00:00'.format(self.startYear) endDate = '{:04d}-12-31_23:59:59'.format(self.endYear) @@ -512,19 +508,14 @@ def run_task(self): self.historyStreams, 'timeSeriesStatsMonthlyOutput') - variables = config.getexpression(sectionName, 'variables') + variables = _get_variables_list(config, sectionName) variableList = {'timeMonthly_avg_layerThickness'} for var in variables: mpas_var = var['mpas'] - if mpas_var == 'none': - continue - if isinstance(mpas_var, (list, tuple)): - for v in mpas_var: - variableList.add(v) - else: - variableList.add(mpas_var) + for v in mpas_var: + variableList.add(v) outputExists = os.path.exists(outFileName) outputValid = outputExists @@ -546,11 +537,11 @@ def run_task(self): break if outputValid: - self.logger.info(' Time series exists -- Done.') + logger.info(' Time series exists -- Done.') return - regionMaskFileName = '{}/depthMasks_{}.nc'.format(outputDirectory, - timeSeriesName) + regionMaskFileName = \ + f'{outputDirectory}/depthMasks_{timeSeriesName}.nc' dsRegionMask = xarray.open_dataset(regionMaskFileName) nRegions = dsRegionMask.sizes['nRegions'] areaCell = dsRegionMask.areaCell @@ -558,7 +549,7 @@ def run_task(self): datasets = [] nTime = len(inputFiles) for tIndex in range(nTime): - self.logger.info(' {}/{}'.format(tIndex + 1, nTime)) + logger.info(f' {tIndex + 1}/{nTime}') dsIn = open_mpas_dataset( fileName=inputFiles[tIndex], @@ -572,8 +563,7 @@ def run_task(self): innerDatasets = [] for regionIndex in range(nRegions): - self.logger.info(' region: {}'.format( - self.regionNames[regionIndex])) + logger.info(f' region: {self.regionNames[regionIndex]}') dsRegion = dsRegionMask.isel(nRegions=regionIndex) dsRegion.load() cellMask = dsRegion.cellMask @@ -585,8 +575,8 @@ def run_task(self): volCell = (localArea*localThickness).where(depthMask) volCell = volCell.transpose('nCells', 'nVertLevels') totalVol = volCell.sum(dim='nVertLevels').sum(dim='nCells') - self.logger.info(' totalVol (mil. km^3): {}'.format( - 1e-15*totalVol.values)) + logger.info( + f' totalVol (mil. km^3): {1e-15*totalVol.values}') dsOut = xarray.Dataset() dsOut['totalVol'] = totalVol @@ -594,14 +584,23 @@ def run_task(self): for var in variables: outName = var['name'] - self.logger.info(' {}'.format(outName)) + logger.info(f' {outName}') + integrated = False if outName == 'thermalForcing': timeSeries = self._add_thermal_forcing(dsIn, cellMask) units = 'degrees Celsius' description = 'potential temperature minus the ' \ 'potential freezing temperature' + elif outName == 'oceanHeatContent': + timeSeries = self._add_ohc(dsIn, cellMask) + units = '10^22 J' + description = 'ocean heat content' + integrated = True else: - mpasVarName = var['mpas'] + mpasVarNames = var['mpas'] + assert len(mpasVarNames) == 1 + mpasVarName = mpasVarNames[0] + timeSeries = \ dsIn[mpasVarName].where(cellMask, drop=True) units = timeSeries.units @@ -611,16 +610,20 @@ def run_task(self): if is3d: timeSeries = \ (volCell*timeSeries.where(depthMask)).sum( - dim='nVertLevels').sum(dim='nCells') / totalVol + dim='nVertLevels').sum(dim='nCells') + if not integrated: + timeSeries = timeSeries / totalVol else: timeSeries = \ - (localArea*timeSeries).sum( - dim='nCells') / totalArea + (localArea*timeSeries).sum(dim='nCells') + if not integrated: + timeSeries = timeSeries / totalArea dsOut[outName] = timeSeries dsOut[outName].attrs['units'] = units dsOut[outName].attrs['description'] = description dsOut[outName].attrs['is3d'] = str(is3d) + dsOut[outName].attrs['integrated'] = str(integrated) innerDatasets.append(dsOut) @@ -674,6 +677,27 @@ def _add_thermal_forcing(self, dsIn, cellMask): return timeSeries + def _add_ohc(self, dsIn, cellMask): + """ compute the ocean heat content """ + + vars = ['timeMonthly_avg_activeTracers_temperature', + 'timeMonthly_avg_layerThickness'] + ds = dsIn[vars].where(cellMask, drop=True) + + # specific heat [J/(kg*degC)] + cp = self.namelist.getfloat('config_specific_heat_sea_water') + # [kg/m3] + rho = self.namelist.getfloat('config_density0') + + temp = ds.timeMonthly_avg_activeTracers_temperature + thick = ds.timeMonthly_avg_layerThickness + + units_scale_factor = 1e-22 + + ohc = units_scale_factor * rho * cp * thick * temp + + return ohc + class CombineRegionalProfileTimeSeriesSubtask(AnalysisTask): """ @@ -830,7 +854,8 @@ def run_task(self): # ------- # Xylar Asay-Davis - self.logger.info(f"\nAveraging T and S for {self.regionName}...") + logger = self.logger + logger.info(f"\nAveraging T and S for {self.regionName}...") obsDict = self.obsDict config = self.config @@ -914,20 +939,20 @@ def run_task(self): obsFileName = build_obs_path( config, component=self.componentName, relativePath=obsDict['TFileName']) - self.logger.info(' Reading from {}...'.format(obsFileName)) + logger.info(f' Reading from {obsFileName}...') ds = xarray.open_dataset(obsFileName) if obsDict['SFileName'] != obsDict['TFileName']: obsFileName = build_obs_path( config, component=self.componentName, relativePath=obsDict['SFileName']) - self.logger.info(' Reading from {}...'.format(obsFileName)) + logger.info(f' Reading from {obsFileName}...') dsS = xarray.open_dataset(obsFileName) ds[SVarName] = dsS[SVarName] if obsDict['volFileName'] is None: # compute volume from lat, lon, depth bounds - self.logger.info(' Computing volume...') + logger.info(' Computing volume...') latBndsName = ds[latVarName].attrs['bounds'] lonBndsName = ds[lonVarName].attrs['bounds'] zBndsName = ds[zVarName].attrs['bounds'] @@ -947,7 +972,7 @@ def run_task(self): obsFileName = build_obs_path( config, component=self.componentName, relativePath=obsDict['volFileName']) - self.logger.info(' Reading from {}...'.format(obsFileName)) + logger.info(f' Reading from {obsFileName}...') dsVol = xarray.open_dataset(obsFileName) ds[volVarName] = dsVol[volVarName] @@ -1105,8 +1130,7 @@ def setup_and_check(self): # self.calendar super(PlotRegionTimeSeriesSubtask, self).setup_and_check() - self.variables = self.config.getexpression(self.sectionName, - 'variables') + self.variables = _get_variables_list(self.config, self.sectionName) self.xmlFileNames = [] for var in self.variables: @@ -1122,19 +1146,23 @@ def run_task(self): # ------- # Xylar Asay-Davis - self.logger.info("\nPlotting time series of ocean properties of {}" - "...".format(self.regionName)) + regionName = self.regionName + logger = self.logger + + logger.info(f"\nPlotting time series of ocean properties of " + f"{regionName}...") - self.logger.info(' Load time series...') + logger.info(' Load time series...') config = self.config calendar = self.calendar + movingAveragePoints = 1 fcAll = read_feature_collection(self.geojsonFileName) fc = FeatureCollection() for feature in fcAll.features: - if feature['properties']['name'] == self.regionName: + if feature['properties']['name'] == regionName: fc.add_feature(feature) break @@ -1146,11 +1174,36 @@ def run_task(self): regionGroup = self.regionGroup timeSeriesName = regionGroup.replace(' ', '') - inFileName = '{}/{}/{}_{:04d}-{:04d}.nc'.format( - baseDirectory, timeSeriesName, timeSeriesName, startYear, endYear) + inFileName = os.path.join( + baseDirectory, + timeSeriesName, + f'{timeSeriesName}_{startYear:04d}-{endYear:04d}.nc') + + maxTitleLength = config.getint('timeSeriesOceanRegions', + 'maxTitleLength') + + sectionName = self.sectionName + + anomalyVars = config.getexpression(sectionName, 'anomalies') dsIn = xarray.open_dataset(inFileName).isel(nRegions=self.regionIndex) + dsStart = None + dsStartRef = None + if len(anomalyVars) > 0: + anomalyRefYear = get_anomaly_ref_year(config, self.runStreams) + if anomalyRefYear < startYear or anomalyRefYear > endYear: + raise ValueError(f'Cannot plot anomalies with respect to a ' + f'year {anomalyRefYear:04d} not in the time ' + f'series {startYear:04d}-{endYear:04d}.') + anomalyRefFileName = os.path.join( + baseDirectory, + timeSeriesName, + f'{timeSeriesName}_{anomalyRefYear:04d}-' + f'{anomalyRefYear:04d}.nc') + dsStart = xarray.open_dataset(anomalyRefFileName).isel( + nRegions=self.regionIndex) + zbounds = dsIn.zbounds.values controlConfig = self.controlConfig @@ -1163,39 +1216,68 @@ def run_task(self): startYear = controlConfig.getint('timeSeries', 'startYear') endYear = controlConfig.getint('timeSeries', 'endYear') - inFileName = '{}/{}/{}_{:04d}-{:04d}.nc'.format( - baseDirectory, timeSeriesName, timeSeriesName, startYear, - endYear) + inFileName = os.path.join( + baseDirectory, + timeSeriesName, + f'{timeSeriesName}_{startYear:04d}-{endYear:04d}.nc') + dsRef = xarray.open_dataset(inFileName).isel( nRegions=self.regionIndex) zboundsRef = dsRef.zbounds.values + if len(anomalyVars) > 0: + anomalyRefYear = get_anomaly_ref_year(controlConfig, + self.runStreams) + anomalyRefFileName = os.path.join( + baseDirectory, + timeSeriesName, + f'{timeSeriesName}_{anomalyRefYear:04d}-' + f'{anomalyRefYear:04d}.nc') + dsStartRef = xarray.open_dataset(anomalyRefFileName).isel( + nRegions=self.regionIndex) + mainRunName = config.get('runs', 'mainRunName') - movingAveragePoints = 1 - self.logger.info(' Make plots...') + logger.info(' Make plots...') groupLink = self.regionGroup.replace(' ', '') for var in self.variables: varName = var['name'] mainArray = dsIn[varName] + anomaly = varName in anomalyVars + + varTitle = var['title'] + varUnits = var['units'] + is3d = mainArray.attrs['is3d'] == 'True' + integrated = mainArray.attrs['integrated'] == 'True' + if anomaly: + mainArray = mainArray - dsStart[varName].isel(Time=0) + varTitle = f'{varTitle} Anomaly' + if is3d: - title = 'Volume-Mean {} in {}'.format( - var['title'], self.regionName) + volArea = 'Volume' else: - title = 'Area-Mean {} in {}'.format(var['title'], - self.regionName) + volArea = 'Area' + + if integrated: + meanInteg = 'Integrated' + else: + meanInteg = 'Mean' + title = f'{volArea}-{meanInteg} {varTitle} in {regionName}' if plotControl: refArray = dsRef[varName] + if anomaly: + refArray = refArray - dsStartRef[varName].isel(Time=0) xLabel = 'Time (yr)' - yLabel = '{} ({})'.format(var['title'], var['units']) + yLabel = f'{varTitle} ({varUnits})' - filePrefix = '{}_{}'.format(self.prefix, varName) - outFileName = '{}/{}.png'.format(self.plotsDirectory, filePrefix) + filePrefix = f'{self.prefix}_{varName}' + outFileName = os.path.join(self.plotsDirectory, + f'{filePrefix}.png') fields = [mainArray] lineColors = [config.get('timeSeries', 'mainColor')] @@ -1207,9 +1289,9 @@ def run_task(self): lineWidths.append(1.2) legendText.append(controlRunName) - if varName in ['temperature', 'salinity']: + if varName in ['temperature', 'salinity'] and not anomaly: obsColors = [ - config.get('timeSeries', 'obsColor{}'.format(index + 1)) + config.get('timeSeries', f'obsColor{index + 1}') for index in range(5)] daysInMonth = constants.daysInMonth for obsName in self.obsSubtasks: @@ -1235,15 +1317,12 @@ def run_task(self): if is3d: if not plotControl or numpy.all(zbounds == zboundsRef): - title = '{} ({} < z < {} m)'.format(title, zbounds[0], - zbounds[1]) + title = f'{title} ({zbounds[0]} < z < {zbounds[1]} m)' else: - legendText[0] = '{} ({} < z < {} m)'.format( - legendText[0], zbounds[0], zbounds[1]) - legendText[1] = '{} ({} < z < {} m)'.format( - legendText[1], zboundsRef[0], zboundsRef[1]) - - sectionName = self.sectionName + legendText[0] = \ + f'{legendText[0]} ({zbounds[0]} < z < {zbounds[1]} m)' + legendText[1] = \ + f'{legendText[1]} ({zbounds[0]} < z < {zbounds[1]} m)' if config.has_option(sectionName, 'titleFontSize'): titleFontSize = config.getint(sectionName, 'titleFontSize') else: @@ -1259,7 +1338,8 @@ def run_task(self): ylabel=yLabel, movingAveragePoints=movingAveragePoints, lineColors=lineColors, lineWidths=lineWidths, legendText=legendText, titleFontSize=titleFontSize, - defaultFontSize=defaultFontSize) + defaultFontSize=defaultFontSize, + maxTitleLength=maxTitleLength) # do this before the inset because otherwise it moves the inset # and cartopy doesn't play too well with tight_layout anyway @@ -1269,15 +1349,56 @@ def run_task(self): savefig(outFileName, config, tight=False) - caption = 'Regional mean of {}'.format(title) + caption = f'Regional {title}' + write_image_xml( config=config, filePrefix=filePrefix, componentName='Ocean', componentSubdirectory='ocean', - galleryGroup='{} Time Series'.format(self.regionGroup), + galleryGroup=f'{self.regionGroup} Time Series', groupLink=groupLink, - gallery=var['title'], - thumbnailDescription=self.regionName, + gallery=varTitle, + thumbnailDescription=regionName, imageDescription=caption, imageCaption=caption) + + +def _get_variables_list(config, sectionName): + """ + Get a list of dict of variables for the given region + """ + availableVariables = config.getexpression('timeSeriesOceanRegions', + 'availableVariables') + + variableList = config.getexpression(sectionName, 'variables') + + variables = [] + + for varName in variableList: + found = False + for var in availableVariables: + if varName == var['name']: + found = True + break + if not found: + raise ValueError(f'Did not find {varName} in config option ' + f'availableVariables in ' + f'[timeSeriesOceanRegions]') + variables.append(var) + + return variables + + +def get_anomaly_ref_year(config, runStreams): + """ + Get the reference year for anomalies + """ + + if config.has_option('timeSeries', 'anomalyRefYear'): + anomalyYear = config.getint('timeSeries', 'anomalyRefYear') + else: + anomalyRefDate = get_simulation_start_time(runStreams) + anomalyYear = int(anomalyRefDate[0:4]) + + return anomalyYear