From 9c83bbbc7b67b8d1e80fb6554144046af3ebb48f Mon Sep 17 00:00:00 2001 From: Quarto GHA Workflow Runner Date: Fri, 5 Jul 2024 19:48:19 +0000 Subject: [PATCH] Built site for gh-pages --- .nojekyll | 2 +- external/DownloadDopplerScattData.html | 2 +- external/ECCO_cloud_direct_access_s3.html | 2 +- external/ECCO_download_data.html | 2 +- external/Introduction_to_xarray.html | 2 +- external/July_2022_Earthdata_Webinar.html | 2 +- external/SWOT_SSH_dashboard.html | 2 +- external/SWOT_to_kerchunk.html | 2 +- external/VisualizeDopplerScattData.html | 2 +- external/cof-zarr-reformat.html | 2 +- external/insitu_dataviz_demo.html | 2 +- external/zarr-eosdis-store.html | 2 +- external/zarr_access.html | 2 +- mcc_api/metadata_compliance_checker_API.html | 1732 ------------------ quarto_text/SWOT.html | 5 +- search.json | 54 +- sitemap.xml | 232 ++- 17 files changed, 143 insertions(+), 1906 deletions(-) delete mode 100644 mcc_api/metadata_compliance_checker_API.html diff --git a/.nojekyll b/.nojekyll index 66cb5f82..1ab22f4b 100644 --- a/.nojekyll +++ b/.nojekyll @@ -1 +1 @@ -49d0b1c2 \ No newline at end of file +34945a6a \ No newline at end of file diff --git a/external/DownloadDopplerScattData.html b/external/DownloadDopplerScattData.html index 477b3fa0..0eb5eb6e 100644 --- a/external/DownloadDopplerScattData.html +++ b/external/DownloadDopplerScattData.html @@ -1076,7 +1076,7 @@

S-MODE Workshop: Science Case Study Airborne Part 1

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop

diff --git a/external/ECCO_cloud_direct_access_s3.html b/external/ECCO_cloud_direct_access_s3.html index e6522da7..a774ad3c 100644 --- a/external/ECCO_cloud_direct_access_s3.html +++ b/external/ECCO_cloud_direct_access_s3.html @@ -1085,7 +1085,7 @@

Direct Access to ECCO V4r4 Datasets in the Cloud

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, ECCO.

diff --git a/external/ECCO_download_data.html b/external/ECCO_download_data.html index 82c2d543..71d3face 100644 --- a/external/ECCO_download_data.html +++ b/external/ECCO_download_data.html @@ -1080,7 +1080,7 @@

Access to ECCO V4r4 Datasets on a Local Machine

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, ECCO.

diff --git a/external/Introduction_to_xarray.html b/external/Introduction_to_xarray.html index 0562e147..b2c24723 100644 --- a/external/Introduction_to_xarray.html +++ b/external/Introduction_to_xarray.html @@ -1079,7 +1079,7 @@

Xarray

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from NASA Openscapes 2021 Cloud Hackathon Repository

diff --git a/external/July_2022_Earthdata_Webinar.html b/external/July_2022_Earthdata_Webinar.html index 440b5e14..a3ac986e 100644 --- a/external/July_2022_Earthdata_Webinar.html +++ b/external/July_2022_Earthdata_Webinar.html @@ -1136,7 +1136,7 @@

Earthdata Webinar

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, the-coding-club

diff --git a/external/SWOT_SSH_dashboard.html b/external/SWOT_SSH_dashboard.html index 8a1081bb..76e377e7 100644 --- a/external/SWOT_SSH_dashboard.html +++ b/external/SWOT_SSH_dashboard.html @@ -1096,7 +1096,7 @@

Integrating Dask, Kerchunk, Zarr and Xarray

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, the-coding-club.

diff --git a/external/SWOT_to_kerchunk.html b/external/SWOT_to_kerchunk.html index f4133e57..105333df 100644 --- a/external/SWOT_to_kerchunk.html +++ b/external/SWOT_to_kerchunk.html @@ -1098,7 +1098,7 @@

Kerchunk JSON Generation

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, the-coding-club.

diff --git a/external/VisualizeDopplerScattData.html b/external/VisualizeDopplerScattData.html index a7eea49e..c6b7a66e 100644 --- a/external/VisualizeDopplerScattData.html +++ b/external/VisualizeDopplerScattData.html @@ -1077,7 +1077,7 @@

S-MODE Workshop: Science Case Study Airborne Part 2

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop

diff --git a/external/cof-zarr-reformat.html b/external/cof-zarr-reformat.html index 0dfbf175..d1606904 100644 --- a/external/cof-zarr-reformat.html +++ b/external/cof-zarr-reformat.html @@ -1079,7 +1079,7 @@

COF Zarr Access via Reformat

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, ECCO.

diff --git a/external/insitu_dataviz_demo.html b/external/insitu_dataviz_demo.html index 5b3b6ca2..6262c92c 100644 --- a/external/insitu_dataviz_demo.html +++ b/external/insitu_dataviz_demo.html @@ -1081,7 +1081,7 @@

S-MODE Workshop: Science Case Study In Situ

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop

diff --git a/external/zarr-eosdis-store.html b/external/zarr-eosdis-store.html index 053c8657..634b4069 100644 --- a/external/zarr-eosdis-store.html +++ b/external/zarr-eosdis-store.html @@ -1070,7 +1070,7 @@

Zarr Example

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from NASA’s Zarr EOSDIS store notebook

diff --git a/external/zarr_access.html b/external/zarr_access.html index b38cebfe..ab91e7b3 100644 --- a/external/zarr_access.html +++ b/external/zarr_access.html @@ -1089,7 +1089,7 @@

Zarr Access for NetCDF4 files

-

imported on: 2024-07-03

+

imported on: 2024-07-05

This notebook is from NASA Openscapes 2021 Cloud Hackathon Repository

diff --git a/mcc_api/metadata_compliance_checker_API.html b/mcc_api/metadata_compliance_checker_API.html deleted file mode 100644 index 18edc760..00000000 --- a/mcc_api/metadata_compliance_checker_API.html +++ /dev/null @@ -1,1732 +0,0 @@ - - - - - - - - - -MCC - API POST – PO.DAAC Cookbook - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- -
- -
- - -
- - - -
- - - - -
-
-

MCC - API POST

-
- - - -
- - - - -
- - - -
- - -

Template for making API calls to Metadata Compliance Checker (MCC) using a local netCDF or HDF file, and outputing the response to JSON

-
-

MCC Endpoints

-

PROD - https://mcc.podaac.earthdatacloud.nasa.gov/

-

UAT - https://mcc.podaac.uat.earthdatacloud.nasa.gov/mcc

-
-
import json
-import requests
-import sys
-
-# Choose VENUE: UAT or PROD
-mcc_env = 'UAT'
-mcc_env = 'PROD'
-
-
-
-

MCC Dictionary - Environments

-
-
url_dict = {
-    'UAT': "https://mcc.podaac.uat.earthdatacloud.nasa.gov/mcc",
-    'PROD': "https://mcc.podaac.earthdatacloud.nasa.gov/"
-}
-
-mcc_host = url_dict.get(mcc_env)
-
-
-
# Print
-print(mcc_env, " - " , mcc_host)
-
-
-
-

MCC - API POST query example (for a local file)

-

See https://mcc.podaac.earthdatacloud.nasa.gov/mcc/about_api for a description of the query parameters (accessed via payload{} dictionary in this example)

-
-
url = mcc_host + "/check"
-
-payload = {
-'ACDD':'on',
-'ACDD-version':'1.3',
-'CF':'on',
-'CF-version':'1.7',
-'response':'json',
-}
-
-
-# set the path and filename to upload to MCC
-dirname = "my_local_path_to_the_file"
-filename = "myFile.nc"
-
-# working examples . . . comment out, modify or remove these two lines as needed
-dirname = "./data/"
-filename = "ascat_20210101_000900_metopa_73696_eps_o_coa_3202_ovw.l2.nc"
-
-
-files=[
-  ('file-upload',
-   (
-    filename,   
-    open(dirname+filename,'rb'),'application/octet-stream'
-
-   )
-  )
-]
-
-headers = {}
-
-# Ping the API; format the response in json; pretty print the json response
-response = requests.request("POST", url, headers=headers, data=payload, files=files)
-json_resp = json.loads(response.text)
-json_resp_formatted = json.dumps(json_resp, indent=2)
-print(json_resp_formatted)
-
- - -
- -
- -
- - - - - - \ No newline at end of file diff --git a/quarto_text/SWOT.html b/quarto_text/SWOT.html index 849e23c1..659a9e80 100644 --- a/quarto_text/SWOT.html +++ b/quarto_text/SWOT.html @@ -1261,8 +1261,8 @@

Transform

-
-

HiTIDE subsetter for Sea Surface Height Products - select KaRIn instrument in sensors

+
+

HiTIDE subsetter for Sea Surface Height Products - select KaRIn instrument in sensors, see video tutorial here

Hydrocron: Time series API - Currently for rivers, see Hydrocron documentation and more description under tools below. DOI

@@ -1278,6 +1278,7 @@

Tools

Hydrocron DOI - an API that repackages the river shapefile dataset (L2_HR_RiverSP) into csv or GeoJSON formats that make time-series analysis easier. SWOT data is archived as individually timestamped shapefiles, which would otherwise require users to perform potentially thousands of file operations per river feature to view the data as a timeseries. Hydrocron makes this possible with a single API call.

SWODLR - a system for generating on demand raster products from SWOT L2 raster data with custom resolutions, projections, and extents. -in development

+

HiTIDE subsetter for Sea Surface Height Products - select KaRIn instrument in sensors, see video tutorial here

SWORD of Science

diff --git a/search.json b/search.json index 97c07c6a..74bd034d 100644 --- a/search.json +++ b/search.json @@ -91,7 +91,7 @@ "href": "external/insitu_dataviz_demo.html", "title": "S-MODE Workshop: Science Case Study In Situ", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop\nimport glob\nfrom netCDF4 import Dataset\nimport xarray as xr\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport gsw", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop\nimport glob\nfrom netCDF4 import Dataset\nimport xarray as xr\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nimport gsw", "crumbs": [ "Tutorials", "Dataset Specific", @@ -117,7 +117,7 @@ "href": "external/SWOT_to_kerchunk.html", "title": "Kerchunk JSON Generation", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, the-coding-club.", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, the-coding-club.", "crumbs": [ "Advanced Cloud", "Kerchunk" @@ -352,7 +352,7 @@ "href": "external/zarr_access.html", "title": "Zarr Access for NetCDF4 files", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from NASA Openscapes 2021 Cloud Hackathon Repository", + "text": "imported on: 2024-07-05\nThis notebook is from NASA Openscapes 2021 Cloud Hackathon Repository", "crumbs": [ "Tutorials", "Cloud Optimized Examples", @@ -400,7 +400,7 @@ "href": "external/cof-zarr-reformat.html", "title": "COF Zarr Access via Reformat", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, ECCO.", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, ECCO.", "crumbs": [ "Tutorials", "Cloud Optimized Examples", @@ -424,7 +424,7 @@ "href": "external/Introduction_to_xarray.html", "title": "Xarray", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from NASA Openscapes 2021 Cloud Hackathon Repository" + "text": "imported on: 2024-07-05\nThis notebook is from NASA Openscapes 2021 Cloud Hackathon Repository" }, { "objectID": "external/Introduction_to_xarray.html#why-do-we-need-xarray", @@ -3100,34 +3100,6 @@ "section": "Running the Demo", "text": "Running the Demo\nThe remaining notebook walks through constructing a request that first subsets multiple files from a collection and then concatenates the results together into a single output file. This is accomplished using the Harmony coverages API through the use of the harmony-py python library.\nThe collection being used in the demonstration is the ASCATB-L2-25km collection which contains operational near-real-time Level 2 ocean surface wind vector retrievals from the Advanced Scatterometer (ASCAT) on MetOp-B at 25 km sampling resolution.\nThe first step is to import the libraries needed to run the demo.\n\nimport xarray as xr\nimport tempfile\nfrom IPython.display import display, JSON\nfrom datetime import datetime, timedelta, time\nfrom harmony import BBox, Client, Collection, Request, Environment, LinkType\n\nfrom mpl_toolkits.basemap import Basemap\nimport matplotlib.pyplot as plt\nimport cartopy.crs as ccrs\nimport s3fs\n\nimport warnings\nwarnings.filterwarnings('ignore')\n%matplotlib inline\n\nCreate Harmony-py client.\n\nharmony_client = Client(env=Environment.PROD)\n\nWith the client created, we can contruct and validate the request. As this is a subsetting + concatenation request, we specify options on the request that define spatial bounds, variables we are interested in, temporal bounds, and indicated the result should be concatenated. Since this is a near real time dataset, we will request the data from yesterday.\n\ncollection = Collection(id='C2075141559-POCLOUD')\n\nyesterday = datetime.today() - timedelta(days=1)\n\nrequest = Request(\n collection=collection,\n spatial=BBox(-180, -30, 180, 30),\n variables=[\n 'wind_speed', \n 'wind_dir'\n ],\n temporal={\n 'start': datetime.combine(yesterday, time.min),\n 'stop': datetime.combine(yesterday, time.max)\n },\n concatenate=True\n)\n\nrequest.is_valid()\n\nTrue\n\n\nNow that we have a valid request we simply need to call the submit function using the client we created earlier and pass in the request as a parameter.\nTip: if you want to see the request before submitting it, use the request_as_curl function on the client to get an equivalent curl command for the request that will be submitted.\n\nprint(harmony_client.request_as_curl(request))\njob_id = harmony_client.submit(request)\nprint(f'Job ID: {job_id}')\n\ncurl -X GET -H 'Accept: */*' -H 'Accept-Encoding: gzip, deflate' -H 'Connection: keep-alive' -H 'Cookie: urs_user_already_logged=yes; token=*****; _urs-gui_session=046f3430c6ca2f9e3e00d94c0bee2f70' -H 'User-Agent: Windows/10 harmony-py/0.4.2 CPython/3.8.12 python-requests/2.25.1' 'https://harmony.earthdata.nasa.gov/C2075141559-POCLOUD/ogc-api-coverages/1.0.0/collections/wind_speed,wind_dir/coverage/rangeset?forceAsync=true&subset=lat%28-30%3A30%29&subset=lon%28-180%3A180%29&subset=time%28%222022-10-19T00%3A00%3A00%22%3A%222022-10-19T23%3A59%3A59.999999%22%29&concatenate=true'\nJob ID: 87ec4775-7949-482c-96b2-11f5e6941d15\n\n\nAfter submitting the request it is possible to retrieve the current processing status by using the job ID returned from the submission.\n\nharmony_client.status(job_id)\n\n{'status': 'running',\n 'message': 'The job is being processed',\n 'progress': 0,\n 'created_at': datetime.datetime(2022, 10, 20, 22, 45, 28, 721000, tzinfo=tzutc()),\n 'updated_at': datetime.datetime(2022, 10, 20, 22, 45, 29, 72000, tzinfo=tzutc()),\n 'created_at_local': '2022-10-20T15:45:28-07:00',\n 'updated_at_local': '2022-10-20T15:45:29-07:00',\n 'data_expiration': datetime.datetime(2022, 11, 19, 22, 45, 28, 721000, tzinfo=tzutc()),\n 'data_expiration_local': '2022-11-19T14:45:28-08:00',\n 'request': 'https://harmony.earthdata.nasa.gov/C2075141559-POCLOUD/ogc-api-coverages/1.0.0/collections/wind_speed,wind_dir/coverage/rangeset?forceAsync=true&subset=lat(-30%3A30)&subset=lon(-180%3A180)&subset=time(%222022-10-19T00%3A00%3A00%22%3A%222022-10-19T23%3A59%3A59.999999%22)&concatenate=true',\n 'num_input_granules': 16}\n\n\nIf the request is still running, we can wait until the Harmony request has finished processing. This cell will wait until the request has finised.\n\nharmony_client.wait_for_processing(job_id, show_progress=True)\n\n [ Processing: 100% ] |###################################################| [|]\n\n\nNow that the request has completed we can inspect the results using xarray and matplotlib.\nFirst, let’s download the result into a temporary directory\n\ntemp_dir = tempfile.mkdtemp()\nfutures = harmony_client.download_all(job_id, directory=temp_dir, overwrite=True)\nfile_names = [f.result() for f in futures]\nfile_names\n\n['C:\\\\Users\\\\nickles\\\\AppData\\\\Local\\\\Temp\\\\tmpqzco2nld\\\\C2075141559-POCLOUD_merged.nc4']\n\n\nWith the output file downloaded, now we can open concatenated granule using xarray to inspect some of the metadata.\nNotice the variable subset has been successfully executed – only wind_dir and wind_speed vars are present. In addition, there is a new dimension subset_index added to each variable in the dataset. The index of this dimension corresponds to the original file named in the subset_files variable that contained the data at that index.\n\nds = xr.open_dataset(file_names[0], decode_times=False)\nds\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n<xarray.Dataset>\nDimensions: (subset_index: 16, NUMROWS: 596, NUMCELLS: 42)\nCoordinates:\n lat (subset_index, NUMROWS, NUMCELLS) float32 ...\n lon (subset_index, NUMROWS, NUMCELLS) float32 ...\nDimensions without coordinates: subset_index, NUMROWS, NUMCELLS\nData variables:\n subset_files (subset_index) object 'ascat_20221018_222700_metopb_52328_e...\n time (subset_index, NUMROWS, NUMCELLS) float64 ...\n wind_speed (subset_index, NUMROWS, NUMCELLS) float32 ...\n wind_dir (subset_index, NUMROWS, NUMCELLS) float32 ...\nAttributes: (12/18)\n title: MetOp-B ASCAT Level 2 25.0 km Ocean Sur...\n title_short_name: ASCATB-L2-25km\n Conventions: CF-1.6\n institution: EUMETSAT/OSI SAF/KNMI\n source: MetOp-B ASCAT\n software_identification_level_1: 1000\n ... ...\n processing_level: L2\n rev_orbit_period: 6081.7\n orbit_inclination: 98.7\n references: ASCAT Wind Product User Manual, https:/...\n comment: Orbit period and inclination are consta...\n history_json: [{\"date_time\": \"2022-10-20T22:45:37.904...xarray.DatasetDimensions:subset_index: 16NUMROWS: 596NUMCELLS: 42Coordinates: (2)lat(subset_index, NUMROWS, NUMCELLS)float32...valid_min :-9000000valid_max :9000000standard_name :latitudelong_name :latitudeunits :degrees_north[400512 values with dtype=float32]lon(subset_index, NUMROWS, NUMCELLS)float32...valid_min :0valid_max :36000000standard_name :longitudelong_name :longitudeunits :degrees_east[400512 values with dtype=float32]Data variables: (4)subset_files(subset_index)object...long_name :List of subsetted files used to create this merge product.array(['ascat_20221018_222700_metopb_52328_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_152100_metopb_52338_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_170300_metopb_52339_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_000900_metopb_52329_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_184500_metopb_52340_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_220600_metopb_52342_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_202700_metopb_52341_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_234800_metopb_52343_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_015100_metopb_52330_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_051200_metopb_52332_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_033300_metopb_52331_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_065400_metopb_52333_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_083600_metopb_52334_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_101800_metopb_52335_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_133900_metopb_52337_eps_o_250_3301_ovw.l2_subsetted.nc4',\n 'ascat_20221019_120000_metopb_52336_eps_o_250_3301_ovw.l2_subsetted.nc4'],\n dtype=object)time(subset_index, NUMROWS, NUMCELLS)float64...valid_min :0valid_max :2147483647standard_name :timelong_name :timeunits :seconds since 1990-01-01calendar :proleptic_gregorian[400512 values with dtype=float64]wind_speed(subset_index, NUMROWS, NUMCELLS)float32...valid_min :0valid_max :5000standard_name :wind_speedlong_name :wind speed at 10 munits :m s-1[400512 values with dtype=float32]wind_dir(subset_index, NUMROWS, NUMCELLS)float32...valid_min :0valid_max :3600standard_name :wind_to_directionlong_name :wind direction at 10 munits :degree[400512 values with dtype=float32]Attributes: (18)title :MetOp-B ASCAT Level 2 25.0 km Ocean Surface Wind Vector Producttitle_short_name :ASCATB-L2-25kmConventions :CF-1.6institution :EUMETSAT/OSI SAF/KNMIsource :MetOp-B ASCATsoftware_identification_level_1 :1000instrument_calibration_version :0software_identification_wind :3301pixel_size_on_horizontal :25.0 kmservice_type :epsprocessing_type :Ocontents :ovwprocessing_level :L2rev_orbit_period :6081.7orbit_inclination :98.7references :ASCAT Wind Product User Manual, https://osi-saf.eumetsat.int/, https://scatterometer.knmi.nl/comment :Orbit period and inclination are constant values. All wind directions in oceanographic convention (0 deg. flowing North)history_json :[{\"date_time\": \"2022-10-20T22:45:37.904685+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221018_222700_metopb_52328_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:40.891502+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_152100_metopb_52338_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:37.825551+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_184500_metopb_52340_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:38.951797+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_220600_metopb_52342_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:39.479597+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_015100_metopb_52330_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:40.201629+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_065400_metopb_52333_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:39.958642+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_101800_metopb_52335_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:37.611733+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_170300_metopb_52339_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:40.394288+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_202700_metopb_52341_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:39.465600+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_051200_metopb_52332_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:39.632834+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_083600_metopb_52334_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:43.428456+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_120000_metopb_52336_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:37.471227+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_000900_metopb_52329_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:39.335118+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_234800_metopb_52343_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:40.743323+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_033300_metopb_52331_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:43.732829+00:00\", \"derived_from\": \"https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/ASCATB-L2-25km/ascat_20221019_133900_metopb_52337_eps_o_250_3301_ovw.l2.nc\", \"program\": \"l2ss-py\", \"version\": \"2.2.0\", \"parameters\": \"bbox=[[-180, 180], [-30, 30]] cut=True\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S1962070864-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}, {\"date_time\": \"2022-10-20T22:45:48.424799+00:00\", \"derived_from\": [\"ascat_20221018_222700_metopb_52328_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_152100_metopb_52338_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_170300_metopb_52339_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_000900_metopb_52329_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_184500_metopb_52340_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_220600_metopb_52342_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_202700_metopb_52341_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_234800_metopb_52343_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_015100_metopb_52330_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_051200_metopb_52332_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_033300_metopb_52331_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_065400_metopb_52333_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_083600_metopb_52334_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_101800_metopb_52335_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_133900_metopb_52337_eps_o_250_3301_ovw.l2_subsetted.nc4\", \"ascat_20221019_120000_metopb_52336_eps_o_250_3301_ovw.l2_subsetted.nc4\"], \"program\": \"concise\", \"version\": \"0.5.0\", \"parameters\": \"input_files=[PosixPath('/tmp/tmp6qevy37z/ascat_20221018_222700_metopb_52328_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_152100_metopb_52338_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_170300_metopb_52339_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_000900_metopb_52329_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_184500_metopb_52340_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_220600_metopb_52342_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_202700_metopb_52341_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_234800_metopb_52343_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_015100_metopb_52330_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_051200_metopb_52332_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_033300_metopb_52331_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_065400_metopb_52333_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_083600_metopb_52334_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_101800_metopb_52335_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_133900_metopb_52337_eps_o_250_3301_ovw.l2_subsetted.nc4'), PosixPath('/tmp/tmp6qevy37z/ascat_20221019_120000_metopb_52336_eps_o_250_3301_ovw.l2_subsetted.nc4')]\", \"program_ref\": \"https://cmr.earthdata.nasa.gov:443/search/concepts/S2153799015-POCLOUD\", \"$schema\": \"https://harmony.earthdata.nasa.gov/schemas/history/0.1.0/history-v0.1.0.json\"}]\n\n\nUsing matplotlib, we can genearte a plot for each granule that makes up this concatenated granule. Plot wind_speed for each granule using subset_index dimension.\n\nfig = plt.figure(figsize=(20, 40))\n\nfor index in range(0, len(ds.subset_index)): \n ax = fig.add_subplot((len(ds.subset_index)+1)//2, 2, index+1, projection=ccrs.PlateCarree())\n\n p = ds.isel(subset_index=index).plot.scatter(\n y=\"lat\",\n x=\"lon\",\n hue=\"wind_speed\",\n s=1,\n levels=9,\n cmap=\"jet\",\n ax=ax\n )\n \n ax.set_global()\n ax.coastlines()\n\nplt.show()\n\n\n\n\n\n\n\n\nPlot wind_speed for all data in this concatenated granule on a single map. Notice that the data is within the spatial bounds we provided earlier.\n\nplt.figure(figsize=(12, 6))\nax = plt.axes(projection=ccrs.PlateCarree())\n\np = ds.plot.scatter(\n y=\"lat\",\n x=\"lon\",\n hue=\"wind_speed\",\n s=1,\n levels=9,\n cmap=\"jet\",\n ax=ax\n)\n\nax.set_global()\nax.coastlines()\nplt.show()" }, - { - "objectID": "mcc_api/metadata_compliance_checker_API.html", - "href": "mcc_api/metadata_compliance_checker_API.html", - "title": "MCC - API POST", - "section": "", - "text": "Template for making API calls to Metadata Compliance Checker (MCC) using a local netCDF or HDF file, and outputing the response to JSON" - }, - { - "objectID": "mcc_api/metadata_compliance_checker_API.html#mcc-endpoints", - "href": "mcc_api/metadata_compliance_checker_API.html#mcc-endpoints", - "title": "MCC - API POST", - "section": "MCC Endpoints", - "text": "MCC Endpoints\nPROD - https://mcc.podaac.earthdatacloud.nasa.gov/\nUAT - https://mcc.podaac.uat.earthdatacloud.nasa.gov/mcc\n\nimport json\nimport requests\nimport sys\n\n# Choose VENUE: UAT or PROD\nmcc_env = 'UAT'\nmcc_env = 'PROD'" - }, - { - "objectID": "mcc_api/metadata_compliance_checker_API.html#mcc-dictionary---environments", - "href": "mcc_api/metadata_compliance_checker_API.html#mcc-dictionary---environments", - "title": "MCC - API POST", - "section": "MCC Dictionary - Environments", - "text": "MCC Dictionary - Environments\n\nurl_dict = {\n 'UAT': \"https://mcc.podaac.uat.earthdatacloud.nasa.gov/mcc\",\n 'PROD': \"https://mcc.podaac.earthdatacloud.nasa.gov/\"\n}\n\nmcc_host = url_dict.get(mcc_env)\n\n\n# Print\nprint(mcc_env, \" - \" , mcc_host)" - }, - { - "objectID": "mcc_api/metadata_compliance_checker_API.html#mcc---api-post-query-example-for-a-local-file", - "href": "mcc_api/metadata_compliance_checker_API.html#mcc---api-post-query-example-for-a-local-file", - "title": "MCC - API POST", - "section": "MCC - API POST query example (for a local file)", - "text": "MCC - API POST query example (for a local file)\nSee https://mcc.podaac.earthdatacloud.nasa.gov/mcc/about_api for a description of the query parameters (accessed via payload{} dictionary in this example)\n\nurl = mcc_host + \"/check\"\n\npayload = {\n'ACDD':'on',\n'ACDD-version':'1.3',\n'CF':'on',\n'CF-version':'1.7',\n'response':'json',\n}\n\n\n# set the path and filename to upload to MCC\ndirname = \"my_local_path_to_the_file\"\nfilename = \"myFile.nc\"\n\n# working examples . . . comment out, modify or remove these two lines as needed\ndirname = \"./data/\"\nfilename = \"ascat_20210101_000900_metopa_73696_eps_o_coa_3202_ovw.l2.nc\"\n\n\nfiles=[\n ('file-upload',\n (\n filename, \n open(dirname+filename,'rb'),'application/octet-stream'\n\n )\n )\n]\n\nheaders = {}\n\n# Ping the API; format the response in json; pretty print the json response\nresponse = requests.request(\"POST\", url, headers=headers, data=payload, files=files)\njson_resp = json.loads(response.text)\njson_resp_formatted = json.dumps(json_resp, indent=2)\nprint(json_resp_formatted)" - }, { "objectID": "notebooks/harmony_concatenation/Harmony_Concatenation.html#what-is-concise", "href": "notebooks/harmony_concatenation/Harmony_Concatenation.html#what-is-concise", @@ -5284,7 +5256,7 @@ "href": "quarto_text/SWOT.html#swot-data-resources-tutorials", "title": "SWOT", "section": "SWOT Data Resources & Tutorials", - "text": "SWOT Data Resources & Tutorials\n\nSearch & Download\n\nVia Graphical User Interface:\n\nFind/download SWOT data on Earthdata Search\n\n\n\nProgrammatically: ie. within Python code workflows\n\nSearch and Download via earthaccess\nwith unique SWORD river reach ID\nwith unique Hydrologic Unit Code (HUC) basin ID\n\n\n\nVia Command Line - PO.DAAC subscriber/downloader examples:\nHydrology: These examples will download either the river vector files or the raster files for February 2024:\npodaac-data-downloader -c SWOT_L2_HR_RiverSP_2.0 -d ./SWOT_L2_HR_RiverSP_2.0/ --start-date 2024-02-01T00:00:00Z --end-date 2024-02-29T23:59:59Z\nThis only downloads 1 hours worth of data for the globe:\npodaac-data-downloader -c SWOT_L2_HR_Raster_2.0 -d ./SWOT_L2_HR_Raster_2.0/ --start-date 2024-02-01T00:00:00Z --end-date 2024-02-29T00:59:59Z\nOceanography: These examples will download modeled sea surface heights for the whole SSH collection and then the anomalies using the subscriber then downloader and finally, subset the data by bounding box:\npodaac-data-subscriber -c SWOT_L2_LR_SSH_2.0 -d ./SWOT_L2_LR_SSH_2.0/ --start-date 2023-03-29T00:00:00Z \npodaac-data-subscriber -c SWOT_L2_NALT_OGDR_SSHA_2.0 -d ./data/SWOT_L2_NALT_OGDR_SSHA_2.0 --start-date 2023-08-01T00:00:00Z --end-date 2023-08-02T00:00:00Z\npodaac-data-downloader -c SWOT_L2_NALT_OGDR_SSHA_2.0 -d ./data/SWOT_L2_NALT_OGDR_SSHA_2.0 --start-date 2023-06-23T00:00:00Z --end-date 2023-06-23T06:00:00Z\npodaac-data-downloader -c SWOT_L2_LR_SSH_Basic_2.0 -d ./data -sd 2023-11-25T00:00:00Z -ed 2023-12-15T00:00:00Z -b=\"-22.0,-27,6.5,0\" --subset\n\nSee how to Download/Subscribe for more information on how to use the PO.DAAC subscriber/downloader including with spatial queries.\n\n\n\nSearch SWOT Passes over Time\nCNES developed this dedicated visualization tool for a quick look at where SWOT has been, where it is, and where it will be. Once you have selected the area of interest, click the Search button to search for SWOT passes. The results are displayed in a table and the swaths that intersect the area of interest are displayed on the map. Click on the marker to view the pass number.\nTo launch the Binder application, click on this link.\nTo launch jupyterlab in Binder, clink on this link.\nNote: The Binder versions of this are for casual quick looks, but we recommend for extensive use to access the Jupyter Notebook directly here.\n\n\nSWOT Spatial Coverage\nTo identify spatial coverage/search terms for the science 21-day orbit, PO.DAAC has created a KMZ file that has layers of the SWOT passes and tiles, with corresponding scene numbers identified in the pop-up when a location is selected (see screenshot below). Each layer has direct links to Earthdata Search results (the ‘search’ links) for corresponding files. The passes layer has useful information for all SWOT products, but links to the LR products specifically, the tiles layer is useful for HR products (L1B_HR_SLC, L2_HR_PIXC, and L2_HR_PIXCVec products use tile spatial extents while the L2_HR_Raster product uses scenes. L2_HR_RiverSP and L2_HR_LakeSp use continent-level passes).\nTo download the KMZ file, for the science 21-day orbit, click here.\nFor the Beta Pre-validated data KMZ that used the cal/val 1-day orbit, click here.\nThese files can be opened in the Google Earth desktop application and viewed like the following:\n\n\n\n\n\nScreenshot of pass and tile layer in spatial coverage KMZ file viewed in the Google Earth Desktop application\n\n\n\nThe KaRIN HR Masks true/false text pop up for tiles comes from the two different masks used for different parts of the year. The ‘Seasonal’ mask is used from Dec 1st to March 1st and removes part of the Canadian archipelago coverage to collect additional data over sea ice instead, indicated by true/false statements.\n\n\nTips for SWOT Spatial Search\nTo support spatial search of SWOT data products, the following naming conventions may be of help. Tip: use these shortname identifiers below when searching for SWOT data in the NASA Earthdata Search portal or programmatically using the CMR API and/or earthaccess python library.\nSWOT HR data products use Tiles, Scenes, or Continent-level Swaths IDs depending on the product, which define the spatial extent of what is in each file, as follows in the chart below. Along-track scene and tile numbers are numbered sequentially following the spacecraft flight direction, so the numbers increase from south to north for ascending passes and from north to south for descending passes. SWOT LR products use global swaths and nadir tracks that use pass numbers. See SWOT Spatial Coverage Section above for information to find the pass, tile or scene numbers.\n\n\n\n\n\n\n\n\n\n\nProduct (organized by…)\nFile Naming Convention\nNotes\n\n\n\n\n\n\nL2_HR_RiverSP L2_HR_LakeSP (continent-level swaths)\nPPP_CC\nPPP = pass number (valid range: 001-584) CC = continent code (options listed below) AF - Africa EU - Europe and Middle East SI - Siberia AS - Central and Southeast Asia AU - Australia and Oceania SA - South America NA - North America and Caribbean AR - North American Arctic GR - Greenland Ex: 013_NA = pass 013, North America\n\n\n\n\nL2_HR_PIXC L2_HR_PIXCVec L1B_HR_SLC (tiles)\nPPP_TTTC\nPPP = pass number (valid range: 001-584) TTT = tile number (valid range: 001-308) C = character L or R corresponding to left or right swaths Ex: 001_120R = pass 001, right swath, tile 120\n\n\n\n\nL2_HR_Raster (scenes)\nPPP_SSS\nPPP = pass number (valid range: 001-584) SSS = scene number (valid range: 001-154) Scenes correspond to 2 x 2 sets of tiles scene number x 2 = tile number Ex: 001_060 = pass 001, scene 60, corresponding to the same location as the PIXC/PIXCVec tile example above.\n\n\n\n\nL2_RAD_(O/I)GDR L2_NALT_(O/I)GDR(nadir) L2_LR_SSH (swath)\nPPP_\nPPP = pass number (valid range: 001-584) Ex: 013_ = pass 013\n\n\n\n\n\nIn Earthdata Search GUI:\n\nUse the top left Search Box and search with keywords, e.g. SWOT L2 HR\nSelect a collection of interest\nA Filter Granule filtering capability will show up on the left hand side of the GUI. Recall naming convention is _cycle_pass_spatialIdentifier_.\n\nUse wildcards to narrow down spatially, using one of the codes from the table above depending on your use case. Tip: use underscores ( _ ) with your wildcard key words for a more specific search.\nExample: *_NA_* will filter the RiverSP or LakeSP collection selected to only return those granules (files) that are part of the North America collection\nExample: *_004_256_* will filter the RiverSP or LakeSP collection selected to only return those granules (files) that correspond to cycle 004, pass 256\nExample: *_004_253_128* will filter the Raster collection selected to only return those granules (files) that correspond to cycle 004, pass 253, scene 128\n\nIn addition, you can also draw a region of interest (ROI) on the map, using the Spatial Search Filter icon or the Advanced Search under the main search box. These will help to filter what is returned for the spatial search. Tip: It is recommended that ROI searches are used together with wildcards described above for a more accurate search.\n\n\n\n\nAccess & Visualization\n\n\n\n\n\nAccess SWOT Hydrology data in the cloud | locally\n\n\nAccess SWOT Oceanography data in the cloud | locally\n\n\nSWOT Raster Multifile Access & Quality Flag Application in the cloud | locally\n\n\nHydrocron: Time series API Multi-reach tutorial - See Hydrocron documentation and more description under tools below. \n\n\nQuality Flag Tutorial - Quality Flag Tips for all products, specifically demonstrates SSHA 8-bit quality flag application\n\n\n\nData Story\n\nSWOT Hydrology Science Workflow in the Cloud - Retrieving SWOT attributes (WSE, width, slope) and plotting a longitudinal profile along a river or over a basin\n\n\n\nGIS workflows\n\nSWOT: Through a GIS Lens StoryMap\n\n\nShapefile exploration\n\n\nTransform SWOT Datetime field for use in GIS Software\n\n\n\nTransform\n\nHiTIDE subsetter for Sea Surface Height Products - select KaRIn instrument in sensors\n\n\nHydrocron: Time series API - Currently for rivers, see Hydrocron documentation and more description under tools below. \n\n\nTransform SWOT Hydrology lake shapefiles into time series - work around for lake time series while Hydrocron is under development to include lakes.\n\n\nNetCDF to Geotiff Conversion - mac or Linux | Windows\n\n\n\nTools\nHydrocron - an API that repackages the river shapefile dataset (L2_HR_RiverSP) into csv or GeoJSON formats that make time-series analysis easier. SWOT data is archived as individually timestamped shapefiles, which would otherwise require users to perform potentially thousands of file operations per river feature to view the data as a timeseries. Hydrocron makes this possible with a single API call.\nSWODLR - a system for generating on demand raster products from SWOT L2 raster data with custom resolutions, projections, and extents. -in development\n\n\nSWORD of Science\nThe SWORD of Science (SoS) is a community-driven dataset produced for and from the execution of the Confluence workflow which is a cloud-based workflow that executes on SWOT observations to produce river discharge parameter estimates. Data granules contain two files, priors and results. The priors file contains prior information, such as in-situ gauge data and model output that is used to generate the discharge products. The results file contains the resulting river discharge data products.\n\nExplore river discharge\nExplore river discharge with gauge data\nPlot ALL river discharge algorithms\nVisualize river discharge", + "text": "SWOT Data Resources & Tutorials\n\nSearch & Download\n\nVia Graphical User Interface:\n\nFind/download SWOT data on Earthdata Search\n\n\n\nProgrammatically: ie. within Python code workflows\n\nSearch and Download via earthaccess\nwith unique SWORD river reach ID\nwith unique Hydrologic Unit Code (HUC) basin ID\n\n\n\nVia Command Line - PO.DAAC subscriber/downloader examples:\nHydrology: These examples will download either the river vector files or the raster files for February 2024:\npodaac-data-downloader -c SWOT_L2_HR_RiverSP_2.0 -d ./SWOT_L2_HR_RiverSP_2.0/ --start-date 2024-02-01T00:00:00Z --end-date 2024-02-29T23:59:59Z\nThis only downloads 1 hours worth of data for the globe:\npodaac-data-downloader -c SWOT_L2_HR_Raster_2.0 -d ./SWOT_L2_HR_Raster_2.0/ --start-date 2024-02-01T00:00:00Z --end-date 2024-02-29T00:59:59Z\nOceanography: These examples will download modeled sea surface heights for the whole SSH collection and then the anomalies using the subscriber then downloader and finally, subset the data by bounding box:\npodaac-data-subscriber -c SWOT_L2_LR_SSH_2.0 -d ./SWOT_L2_LR_SSH_2.0/ --start-date 2023-03-29T00:00:00Z \npodaac-data-subscriber -c SWOT_L2_NALT_OGDR_SSHA_2.0 -d ./data/SWOT_L2_NALT_OGDR_SSHA_2.0 --start-date 2023-08-01T00:00:00Z --end-date 2023-08-02T00:00:00Z\npodaac-data-downloader -c SWOT_L2_NALT_OGDR_SSHA_2.0 -d ./data/SWOT_L2_NALT_OGDR_SSHA_2.0 --start-date 2023-06-23T00:00:00Z --end-date 2023-06-23T06:00:00Z\npodaac-data-downloader -c SWOT_L2_LR_SSH_Basic_2.0 -d ./data -sd 2023-11-25T00:00:00Z -ed 2023-12-15T00:00:00Z -b=\"-22.0,-27,6.5,0\" --subset\n\nSee how to Download/Subscribe for more information on how to use the PO.DAAC subscriber/downloader including with spatial queries.\n\n\n\nSearch SWOT Passes over Time\nCNES developed this dedicated visualization tool for a quick look at where SWOT has been, where it is, and where it will be. Once you have selected the area of interest, click the Search button to search for SWOT passes. The results are displayed in a table and the swaths that intersect the area of interest are displayed on the map. Click on the marker to view the pass number.\nTo launch the Binder application, click on this link.\nTo launch jupyterlab in Binder, clink on this link.\nNote: The Binder versions of this are for casual quick looks, but we recommend for extensive use to access the Jupyter Notebook directly here.\n\n\nSWOT Spatial Coverage\nTo identify spatial coverage/search terms for the science 21-day orbit, PO.DAAC has created a KMZ file that has layers of the SWOT passes and tiles, with corresponding scene numbers identified in the pop-up when a location is selected (see screenshot below). Each layer has direct links to Earthdata Search results (the ‘search’ links) for corresponding files. The passes layer has useful information for all SWOT products, but links to the LR products specifically, the tiles layer is useful for HR products (L1B_HR_SLC, L2_HR_PIXC, and L2_HR_PIXCVec products use tile spatial extents while the L2_HR_Raster product uses scenes. L2_HR_RiverSP and L2_HR_LakeSp use continent-level passes).\nTo download the KMZ file, for the science 21-day orbit, click here.\nFor the Beta Pre-validated data KMZ that used the cal/val 1-day orbit, click here.\nThese files can be opened in the Google Earth desktop application and viewed like the following:\n\n\n\n\n\nScreenshot of pass and tile layer in spatial coverage KMZ file viewed in the Google Earth Desktop application\n\n\n\nThe KaRIN HR Masks true/false text pop up for tiles comes from the two different masks used for different parts of the year. The ‘Seasonal’ mask is used from Dec 1st to March 1st and removes part of the Canadian archipelago coverage to collect additional data over sea ice instead, indicated by true/false statements.\n\n\nTips for SWOT Spatial Search\nTo support spatial search of SWOT data products, the following naming conventions may be of help. Tip: use these shortname identifiers below when searching for SWOT data in the NASA Earthdata Search portal or programmatically using the CMR API and/or earthaccess python library.\nSWOT HR data products use Tiles, Scenes, or Continent-level Swaths IDs depending on the product, which define the spatial extent of what is in each file, as follows in the chart below. Along-track scene and tile numbers are numbered sequentially following the spacecraft flight direction, so the numbers increase from south to north for ascending passes and from north to south for descending passes. SWOT LR products use global swaths and nadir tracks that use pass numbers. See SWOT Spatial Coverage Section above for information to find the pass, tile or scene numbers.\n\n\n\n\n\n\n\n\n\n\nProduct (organized by…)\nFile Naming Convention\nNotes\n\n\n\n\n\n\nL2_HR_RiverSP L2_HR_LakeSP (continent-level swaths)\nPPP_CC\nPPP = pass number (valid range: 001-584) CC = continent code (options listed below) AF - Africa EU - Europe and Middle East SI - Siberia AS - Central and Southeast Asia AU - Australia and Oceania SA - South America NA - North America and Caribbean AR - North American Arctic GR - Greenland Ex: 013_NA = pass 013, North America\n\n\n\n\nL2_HR_PIXC L2_HR_PIXCVec L1B_HR_SLC (tiles)\nPPP_TTTC\nPPP = pass number (valid range: 001-584) TTT = tile number (valid range: 001-308) C = character L or R corresponding to left or right swaths Ex: 001_120R = pass 001, right swath, tile 120\n\n\n\n\nL2_HR_Raster (scenes)\nPPP_SSS\nPPP = pass number (valid range: 001-584) SSS = scene number (valid range: 001-154) Scenes correspond to 2 x 2 sets of tiles scene number x 2 = tile number Ex: 001_060 = pass 001, scene 60, corresponding to the same location as the PIXC/PIXCVec tile example above.\n\n\n\n\nL2_RAD_(O/I)GDR L2_NALT_(O/I)GDR(nadir) L2_LR_SSH (swath)\nPPP_\nPPP = pass number (valid range: 001-584) Ex: 013_ = pass 013\n\n\n\n\n\nIn Earthdata Search GUI:\n\nUse the top left Search Box and search with keywords, e.g. SWOT L2 HR\nSelect a collection of interest\nA Filter Granule filtering capability will show up on the left hand side of the GUI. Recall naming convention is _cycle_pass_spatialIdentifier_.\n\nUse wildcards to narrow down spatially, using one of the codes from the table above depending on your use case. Tip: use underscores ( _ ) with your wildcard key words for a more specific search.\nExample: *_NA_* will filter the RiverSP or LakeSP collection selected to only return those granules (files) that are part of the North America collection\nExample: *_004_256_* will filter the RiverSP or LakeSP collection selected to only return those granules (files) that correspond to cycle 004, pass 256\nExample: *_004_253_128* will filter the Raster collection selected to only return those granules (files) that correspond to cycle 004, pass 253, scene 128\n\nIn addition, you can also draw a region of interest (ROI) on the map, using the Spatial Search Filter icon or the Advanced Search under the main search box. These will help to filter what is returned for the spatial search. Tip: It is recommended that ROI searches are used together with wildcards described above for a more accurate search.\n\n\n\n\nAccess & Visualization\n\n\n\n\n\nAccess SWOT Hydrology data in the cloud | locally\n\n\nAccess SWOT Oceanography data in the cloud | locally\n\n\nSWOT Raster Multifile Access & Quality Flag Application in the cloud | locally\n\n\nHydrocron: Time series API Multi-reach tutorial - See Hydrocron documentation and more description under tools below. \n\n\nQuality Flag Tutorial - Quality Flag Tips for all products, specifically demonstrates SSHA 8-bit quality flag application\n\n\n\nData Story\n\nSWOT Hydrology Science Workflow in the Cloud - Retrieving SWOT attributes (WSE, width, slope) and plotting a longitudinal profile along a river or over a basin\n\n\n\nGIS workflows\n\nSWOT: Through a GIS Lens StoryMap\n\n\nShapefile exploration\n\n\nTransform SWOT Datetime field for use in GIS Software\n\n\n\nTransform\n\nHiTIDE subsetter for Sea Surface Height Products - select KaRIn instrument in sensors, see video tutorial here\n\n\nHydrocron: Time series API - Currently for rivers, see Hydrocron documentation and more description under tools below. \n\n\nTransform SWOT Hydrology lake shapefiles into time series - work around for lake time series while Hydrocron is under development to include lakes.\n\n\nNetCDF to Geotiff Conversion - mac or Linux | Windows\n\n\n\nTools\nHydrocron - an API that repackages the river shapefile dataset (L2_HR_RiverSP) into csv or GeoJSON formats that make time-series analysis easier. SWOT data is archived as individually timestamped shapefiles, which would otherwise require users to perform potentially thousands of file operations per river feature to view the data as a timeseries. Hydrocron makes this possible with a single API call.\nSWODLR - a system for generating on demand raster products from SWOT L2 raster data with custom resolutions, projections, and extents. -in development\nHiTIDE subsetter for Sea Surface Height Products - select KaRIn instrument in sensors, see video tutorial here\n\n\nSWORD of Science\nThe SWORD of Science (SoS) is a community-driven dataset produced for and from the execution of the Confluence workflow which is a cloud-based workflow that executes on SWOT observations to produce river discharge parameter estimates. Data granules contain two files, priors and results. The priors file contains prior information, such as in-situ gauge data and model output that is used to generate the discharge products. The results file contains the resulting river discharge data products.\n\nExplore river discharge\nExplore river discharge with gauge data\nPlot ALL river discharge algorithms\nVisualize river discharge", "crumbs": [ "Tutorials", "Dataset Specific", @@ -5590,7 +5562,7 @@ "href": "external/VisualizeDopplerScattData.html", "title": "S-MODE Workshop: Science Case Study Airborne Part 2", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop\n%load_ext autoreload\n%autoreload 2\nimport sys\nsys.path.append('../src')\nfrom matplotlib import pyplot as plt\n%matplotlib inline\nfrom pathlib import Path\nimport numpy as np\nimport rioxarray\nimport xarray as xr\nfrom plot_dopplerscatt_data import make_streamplot_image\nimport warnings\nwarnings.simplefilter('ignore')", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop\n%load_ext autoreload\n%autoreload 2\nimport sys\nsys.path.append('../src')\nfrom matplotlib import pyplot as plt\n%matplotlib inline\nfrom pathlib import Path\nimport numpy as np\nimport rioxarray\nimport xarray as xr\nfrom plot_dopplerscatt_data import make_streamplot_image\nimport warnings\nwarnings.simplefilter('ignore')", "crumbs": [ "Tutorials", "Dataset Specific", @@ -5616,7 +5588,7 @@ "href": "external/ECCO_cloud_direct_access_s3.html", "title": "Direct Access to ECCO V4r4 Datasets in the Cloud", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, ECCO.", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, ECCO.", "crumbs": [ "Tutorials", "Dataset Specific", @@ -5676,7 +5648,7 @@ "href": "external/ECCO_download_data.html", "title": "Access to ECCO V4r4 Datasets on a Local Machine", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, ECCO.\nDuped+slightly modified version of the s3 access ipynb. Tested on JPL-issued macbook and my linux box. It starts by setting up a most trusted strategy for batch downloads behind URS ussing curl/wget. Will attempt to add line(s) to your netrc file if needed btw; then it writes your urs cookies to a local file that should effectively “pre-authenticate” future download sessions for those sub domains.", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, ECCO.\nDuped+slightly modified version of the s3 access ipynb. Tested on JPL-issued macbook and my linux box. It starts by setting up a most trusted strategy for batch downloads behind URS ussing curl/wget. Will attempt to add line(s) to your netrc file if needed btw; then it writes your urs cookies to a local file that should effectively “pre-authenticate” future download sessions for those sub domains.", "crumbs": [ "Tutorials", "Dataset Specific", @@ -5714,7 +5686,7 @@ "href": "external/SWOT_SSH_dashboard.html", "title": "Integrating Dask, Kerchunk, Zarr and Xarray", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, the-coding-club.", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, the-coding-club.", "crumbs": [ "Advanced Cloud", "Dask, Kerchunk, & Zarr" @@ -5912,7 +5884,7 @@ "href": "external/July_2022_Earthdata_Webinar.html", "title": "Earthdata Webinar", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, the-coding-club", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, the-coding-club", "crumbs": [ "Webinars", "July 2022 Earthdata Webinar Notebook" @@ -6089,7 +6061,7 @@ "href": "external/DownloadDopplerScattData.html", "title": "S-MODE Workshop: Science Case Study Airborne Part 1", "section": "", - "text": "imported on: 2024-07-03\nThis notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop", + "text": "imported on: 2024-07-05\nThis notebook is from a different repository in NASA’s PO.DAAC, 2022-SMODE-Open-Data-Workshop", "crumbs": [ "Tutorials", "Dataset Specific", @@ -6166,7 +6138,7 @@ "href": "external/zarr-eosdis-store.html", "title": "Zarr Example", "section": "", - "text": "imported on: 2024-07-03\n\nThis notebook is from NASA’s Zarr EOSDIS store notebook\n\n\nThe original source for this document is https://github.com/nasa/zarr-eosdis-store/blob/main/presentation/example.ipynb\n\n\nzarr-eosdis-store example\nInstall dependencies\n\nimport sys\n\n# zarr and zarr-eosdis-store, the main libraries being demoed\n!{sys.executable} -m pip install zarr zarr-eosdis-store\n\n# Notebook-specific libraries\n!{sys.executable} -m pip install matplotlib\n\nImportant: To run this, you must first create an Earthdata Login account (https://urs.earthdata.nasa.gov) and place your credentials in ~/.netrc e.g.:\n machine urs.earthdata.nasa.gov login YOUR_USER password YOUR_PASSWORD\nNever share or commit your password / .netrc file!\nBasic usage. After these lines, we work with ds as though it were a normal Zarr dataset\n\nimport zarr\nfrom eosdis_store import EosdisStore\n\nurl = 'https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/MUR-JPL-L4-GLOB-v4.1/20210715090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc'\n\nds = zarr.open(EosdisStore(url))\n\nView the file’s variable structure\n\nprint(ds.tree())\n\n/\n ├── analysed_sst (1, 17999, 36000) int16\n ├── analysis_error (1, 17999, 36000) int16\n ├── dt_1km_data (1, 17999, 36000) int16\n ├── lat (17999,) float32\n ├── lon (36000,) float32\n ├── mask (1, 17999, 36000) int16\n ├── sea_ice_fraction (1, 17999, 36000) int16\n ├── sst_anomaly (1, 17999, 36000) int16\n └── time (1,) int32\n\n\nFetch the latitude and longitude arrays and determine start and end indices for our area of interest. In this case, we’re looking at the Great Lakes, which have a nice, recognizeable shape. Latitudes 41 to 49, longitudes -93 to 76.\n\nlats = ds['lat'][:]\nlons = ds['lon'][:]\nlat_range = slice(lats.searchsorted(41), lats.searchsorted(49))\nlon_range = slice(lons.searchsorted(-93), lons.searchsorted(-76))\n\nGet the analysed sea surface temperature variable over our area of interest and apply scale factor and offset from the file metadata. In a future release, scale factor and add offset will be automatically applied.\n\nvar = ds['analysed_sst']\nanalysed_sst = var[0, lat_range, lon_range] * var.attrs['scale_factor'] + var.attrs['add_offset']\n\nDraw a pretty picture\n\nfrom matplotlib import pyplot as plt\n\nplt.rcParams[\"figure.figsize\"] = [16, 8]\nplt.imshow(analysed_sst[::-1, :])\nNone\n\n\n\n\n\n\n\n\nIn a dozen lines of code and a few seconds, we have managed to fetch and visualize the 3.2 megabyte we needed from a 732 megabyte file using the original archive URL and no processing services", + "text": "imported on: 2024-07-05\n\nThis notebook is from NASA’s Zarr EOSDIS store notebook\n\n\nThe original source for this document is https://github.com/nasa/zarr-eosdis-store/blob/main/presentation/example.ipynb\n\n\nzarr-eosdis-store example\nInstall dependencies\n\nimport sys\n\n# zarr and zarr-eosdis-store, the main libraries being demoed\n!{sys.executable} -m pip install zarr zarr-eosdis-store\n\n# Notebook-specific libraries\n!{sys.executable} -m pip install matplotlib\n\nImportant: To run this, you must first create an Earthdata Login account (https://urs.earthdata.nasa.gov) and place your credentials in ~/.netrc e.g.:\n machine urs.earthdata.nasa.gov login YOUR_USER password YOUR_PASSWORD\nNever share or commit your password / .netrc file!\nBasic usage. After these lines, we work with ds as though it were a normal Zarr dataset\n\nimport zarr\nfrom eosdis_store import EosdisStore\n\nurl = 'https://archive.podaac.earthdata.nasa.gov/podaac-ops-cumulus-protected/MUR-JPL-L4-GLOB-v4.1/20210715090000-JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1.nc'\n\nds = zarr.open(EosdisStore(url))\n\nView the file’s variable structure\n\nprint(ds.tree())\n\n/\n ├── analysed_sst (1, 17999, 36000) int16\n ├── analysis_error (1, 17999, 36000) int16\n ├── dt_1km_data (1, 17999, 36000) int16\n ├── lat (17999,) float32\n ├── lon (36000,) float32\n ├── mask (1, 17999, 36000) int16\n ├── sea_ice_fraction (1, 17999, 36000) int16\n ├── sst_anomaly (1, 17999, 36000) int16\n └── time (1,) int32\n\n\nFetch the latitude and longitude arrays and determine start and end indices for our area of interest. In this case, we’re looking at the Great Lakes, which have a nice, recognizeable shape. Latitudes 41 to 49, longitudes -93 to 76.\n\nlats = ds['lat'][:]\nlons = ds['lon'][:]\nlat_range = slice(lats.searchsorted(41), lats.searchsorted(49))\nlon_range = slice(lons.searchsorted(-93), lons.searchsorted(-76))\n\nGet the analysed sea surface temperature variable over our area of interest and apply scale factor and offset from the file metadata. In a future release, scale factor and add offset will be automatically applied.\n\nvar = ds['analysed_sst']\nanalysed_sst = var[0, lat_range, lon_range] * var.attrs['scale_factor'] + var.attrs['add_offset']\n\nDraw a pretty picture\n\nfrom matplotlib import pyplot as plt\n\nplt.rcParams[\"figure.figsize\"] = [16, 8]\nplt.imshow(analysed_sst[::-1, :])\nNone\n\n\n\n\n\n\n\n\nIn a dozen lines of code and a few seconds, we have managed to fetch and visualize the 3.2 megabyte we needed from a 732 megabyte file using the original archive URL and no processing services", "crumbs": [ "Tutorials", "Cloud Optimized Examples", diff --git a/sitemap.xml b/sitemap.xml index 4b91e2bb..3f478b2b 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -2,462 +2,458 @@ https://podaac.github.io/tutorials/external/access-local-python.html - 2024-07-03T23:30:58.076Z + 2024-07-05T19:42:37.293Z https://podaac.github.io/tutorials/external/Direct_S3_Access_NetCDF.html - 2024-07-03T23:30:22.268Z + 2024-07-05T19:41:56.826Z https://podaac.github.io/tutorials/external/insitu_dataviz_demo.html - 2024-07-03T23:30:57.300Z + 2024-07-05T19:42:36.501Z https://podaac.github.io/tutorials/external/SWOT_to_kerchunk.html - 2024-07-03T23:30:59.240Z + 2024-07-05T19:42:38.297Z https://podaac.github.io/tutorials/external/access-cloud-python.html - 2024-07-03T23:30:57.940Z + 2024-07-05T19:42:37.133Z https://podaac.github.io/tutorials/external/zarr_access.html - 2024-07-03T23:30:54.484Z + 2024-07-05T19:42:33.797Z https://podaac.github.io/tutorials/external/cof-zarr-reformat.html - 2024-07-03T23:30:54.024Z + 2024-07-05T19:42:33.533Z https://podaac.github.io/tutorials/external/Introduction_to_xarray.html - 2024-07-03T23:30:55.548Z + 2024-07-05T19:42:34.973Z https://podaac.github.io/tutorials/external/NASA_Earthdata_Authentication.html - 2024-07-03T23:30:22.308Z + 2024-07-05T19:41:56.870Z https://podaac.github.io/tutorials/external/Subscriber.html - 2024-07-03T23:30:58.824Z + 2024-07-05T19:42:37.961Z https://podaac.github.io/tutorials/external/read_data.html - 2024-07-03T23:30:58.476Z + 2024-07-05T19:42:37.749Z https://podaac.github.io/tutorials/index.html - 2024-07-03T23:30:22.568Z + 2024-07-05T19:41:57.130Z https://podaac.github.io/tutorials/quarto_text/Advanced.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/DatasetSpecificExamples.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/TechGuides.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Experimental.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/DataSubscriberDownloader.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Dask_Coiled.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/ECCO.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/GHRSST.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Webinars.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/HowTo.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/SMODE.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/CloudOptimizedExamples.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/notebooks/HUC Feature Translation Service Examples-updated-20210804.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/harmony subsetting/Harmony L2 Subsetter.html - 2024-07-03T23:30:23.088Z + 2024-07-05T19:41:57.614Z https://podaac.github.io/tutorials/notebooks/DataStories/SWOTHR_Science_Application.html - 2024-07-03T23:30:22.656Z + 2024-07-05T19:41:57.218Z https://podaac.github.io/tutorials/notebooks/sentinel-6/S6_Cloud_Notebook.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/notebooks/Cloud L2SS subset and plot - JH.html - 2024-07-03T23:30:22.584Z + 2024-07-05T19:41:57.146Z https://podaac.github.io/tutorials/notebooks/MODIS_L2P_SST_DataCube.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/l2-regridding/reprojection notebook.html - 2024-07-03T23:30:23.100Z + 2024-07-05T19:41:57.626Z https://podaac.github.io/tutorials/notebooks/Pre-SWOT_Numerical_Simulation_Demo.html - 2024-07-03T23:30:22.700Z + 2024-07-05T19:41:57.262Z https://podaac.github.io/tutorials/notebooks/podaac_cmr_s3_links.html - 2024-07-03T23:30:23.204Z + 2024-07-05T19:41:57.722Z https://podaac.github.io/tutorials/notebooks/Tutorials_TEMPLATE.html - 2024-07-03T23:30:22.704Z + 2024-07-05T19:41:57.266Z https://podaac.github.io/tutorials/notebooks/aws_lambda_sst/docs/documentation.html - 2024-07-03T23:30:22.704Z + 2024-07-05T19:41:57.266Z https://podaac.github.io/tutorials/notebooks/aws_lambda_sst/sst-global-mean-exploratory.html - 2024-07-03T23:30:22.712Z + 2024-07-05T19:41:57.274Z https://podaac.github.io/tutorials/notebooks/datasets/enso_MUR_tutorial_final.html - 2024-07-03T23:30:23.060Z + 2024-07-05T19:41:57.590Z https://podaac.github.io/tutorials/notebooks/datasets/SWOT_L4_DAWG_SOS_DISCHARGE_gauges.html - 2024-07-03T23:30:22.868Z + 2024-07-05T19:41:57.422Z https://podaac.github.io/tutorials/notebooks/datasets/DirectCloud_Access_SWOT_Oceanography.html - 2024-07-03T23:30:22.716Z + 2024-07-05T19:41:57.278Z https://podaac.github.io/tutorials/notebooks/datasets/SWOT_Raster_Notebook_local.html - 2024-07-03T23:30:23.048Z + 2024-07-05T19:41:57.578Z https://podaac.github.io/tutorials/notebooks/datasets/SWOT_L4_DAWG_SOS_DISCHARGE.html - 2024-07-03T23:30:22.868Z + 2024-07-05T19:41:57.422Z https://podaac.github.io/tutorials/notebooks/datasets/MUR_SST_Washington_Comparison.html - 2024-07-03T23:30:22.788Z + 2024-07-05T19:41:57.346Z https://podaac.github.io/tutorials/notebooks/datasets/OPERA_GIS_Notebook.html - 2024-07-03T23:30:22.808Z + 2024-07-05T19:41:57.366Z https://podaac.github.io/tutorials/notebooks/datasets/SWOT_quality_flag_demo.html - 2024-07-03T23:30:23.048Z + 2024-07-05T19:41:57.578Z https://podaac.github.io/tutorials/notebooks/datasets/SWOT_L4_DAWG_SOS_DISCHARGE_visualize.html - 2024-07-03T23:30:22.872Z + 2024-07-05T19:41:57.430Z https://podaac.github.io/tutorials/notebooks/Advanced_cloud/basic_dask.html - 2024-07-03T23:30:22.568Z + 2024-07-05T19:41:57.130Z https://podaac.github.io/tutorials/notebooks/Advanced_cloud/dask_delayed_01.html - 2024-07-03T23:30:22.584Z + 2024-07-05T19:41:57.146Z https://podaac.github.io/tutorials/notebooks/SWOT-EA-2021/Estuary_explore_inCloud_zarr.html - 2024-07-03T23:30:22.704Z + 2024-07-05T19:41:57.266Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/swot_ea_hackweek_2022/HLS-WaterDetection-Local.html - 2024-07-03T23:30:23.104Z + 2024-07-05T19:41:57.630Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/workshop_osm_2022/Cloud_DirectDownload_AmazonRiver_Estuary_Exploration.html - 2024-07-03T23:30:23.192Z + 2024-07-05T19:41:57.710Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/workshop_osm_2022/ECCO_ssh_sst_corr.html - 2024-07-03T23:30:23.196Z + 2024-07-05T19:41:57.714Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/swot_ea_workshop_sept2022/SWOTHR_s3Access.html - 2024-07-03T23:30:23.160Z + 2024-07-05T19:41:57.682Z https://podaac.github.io/tutorials/notebooks/PODAAC_Data_Subscriber.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/GIS/GDAL_NetCDF_GeoTIFF.html - 2024-07-03T23:30:22.664Z + 2024-07-05T19:41:57.222Z https://podaac.github.io/tutorials/notebooks/GIS/SWOT_datetime_GIS.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/GIS/MUR_SSTA_QGIS.html - 2024-07-03T23:30:22.664Z + 2024-07-05T19:41:57.222Z https://podaac.github.io/tutorials/notebooks/harmony_concatenation/Harmony_Subsetting_Concatenation.html - 2024-07-03T23:30:23.100Z - - - https://podaac.github.io/tutorials/mcc_api/metadata_compliance_checker_API.html - 2024-07-03T23:30:22.568Z + 2024-07-05T19:41:57.622Z https://podaac.github.io/tutorials/notebooks/harmony_concatenation/Harmony_Concatenation.html - 2024-07-03T23:30:23.092Z + 2024-07-05T19:41:57.618Z https://podaac.github.io/tutorials/notebooks/batch_download_podaac_data.html - 2024-07-03T23:30:22.712Z + 2024-07-05T19:41:57.274Z https://podaac.github.io/tutorials/notebooks/GIS/SWOTshp_CSVconversion.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/GIS/SWOT_GISshapefiles.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/GIS/Subscriber_nc_to_tif_SWOT.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/arctic_2019.html - 2024-07-03T23:30:23.104Z + 2024-07-05T19:41:57.630Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/workshop_osm_2022/S6_OPeNDAP_Access_Gridding.html - 2024-07-03T23:30:23.204Z + 2024-07-05T19:41:57.718Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/workshop_osm_2022/CloudAWS_AmazonRiver_Estuary_Exploration.html - 2024-07-03T23:30:23.176Z + 2024-07-05T19:41:57.694Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/swot_ea_hackweek_2022/HLS-WaterDetection-Cloud.html - 2024-07-03T23:30:23.104Z + 2024-07-05T19:41:57.630Z https://podaac.github.io/tutorials/notebooks/meetings_workshops/swot_ea_hackweek_2022/River_Heights_in_the_Cloud.html - 2024-07-03T23:30:23.160Z + 2024-07-05T19:41:57.682Z https://podaac.github.io/tutorials/notebooks/SWOT-EA-2021/Colocate_satellite_insitu_ocean.html - 2024-07-03T23:30:22.704Z + 2024-07-05T19:41:57.266Z https://podaac.github.io/tutorials/notebooks/Advanced_cloud/coiled_cluster_01.html - 2024-07-03T23:30:22.572Z + 2024-07-05T19:41:57.134Z https://podaac.github.io/tutorials/notebooks/Advanced_cloud/coiled_function_01.html - 2024-07-03T23:30:22.580Z + 2024-07-05T19:41:57.142Z https://podaac.github.io/tutorials/notebooks/datasets/SWOTHR_localmachine.html - 2024-07-03T23:30:22.832Z + 2024-07-05T19:41:57.390Z https://podaac.github.io/tutorials/notebooks/datasets/OPERA_GIS_Cloud.html - 2024-07-03T23:30:22.800Z + 2024-07-05T19:41:57.358Z https://podaac.github.io/tutorials/notebooks/datasets/Localmachine_SWOT_Oceanography.html - 2024-07-03T23:30:22.752Z + 2024-07-05T19:41:57.310Z https://podaac.github.io/tutorials/notebooks/datasets/SWOT_L4_DAWG_SOS_DISCHARGE_gauges_plot_all.html - 2024-07-03T23:30:22.868Z + 2024-07-05T19:41:57.426Z https://podaac.github.io/tutorials/notebooks/datasets/Hydrocron_SWOT_timeseries_examples.html - 2024-07-03T23:30:22.752Z + 2024-07-05T19:41:57.310Z https://podaac.github.io/tutorials/notebooks/datasets/SWOT_Raster_Notebook_cloud.html - 2024-07-03T23:30:22.984Z + 2024-07-05T19:41:57.530Z https://podaac.github.io/tutorials/notebooks/datasets/SWOTHR_s3Access.html - 2024-07-03T23:30:22.864Z + 2024-07-05T19:41:57.422Z https://podaac.github.io/tutorials/notebooks/datasets/smap_imerg_tutorial.html - 2024-07-03T23:30:23.088Z + 2024-07-05T19:41:57.614Z https://podaac.github.io/tutorials/notebooks/datasets/OISSS_L4_multimission_monthly_v1.html - 2024-07-03T23:30:22.796Z + 2024-07-05T19:41:57.350Z https://podaac.github.io/tutorials/notebooks/aws_lambda_sst/podaac-lambda-invoke-sst-global-mean.html - 2024-07-03T23:30:22.712Z + 2024-07-05T19:41:57.274Z https://podaac.github.io/tutorials/notebooks/SearchDownload_SWOTviaCMR.html - 2024-07-03T23:30:22.704Z + 2024-07-05T19:41:57.266Z https://podaac.github.io/tutorials/notebooks/s3/S3-Access.html - 2024-07-03T23:30:23.208Z + 2024-07-05T19:41:57.722Z https://podaac.github.io/tutorials/notebooks/SWORD_River_Demo.html - 2024-07-03T23:30:22.700Z + 2024-07-05T19:41:57.262Z https://podaac.github.io/tutorials/notebooks/PODAAC_CMR_Shapefile_Search_MODIS_UAT.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/notebooks/opendap/MUR-OPeNDAP.html - 2024-07-03T23:30:23.204Z + 2024-07-05T19:41:57.722Z https://podaac.github.io/tutorials/notebooks/podaac_cmr_tutorial.html - 2024-07-03T23:30:23.208Z + 2024-07-05T19:41:57.722Z https://podaac.github.io/tutorials/notebooks/sentinel-6/Access_Sentinel6_By_CyclePass.html - 2024-07-03T23:30:23.208Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/notebooks/sentinel-6/Access_Sentinel6_NRT.html - 2024-07-03T23:30:23.208Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/notebooks/DataStories/eof_example_ersst.html - 2024-07-03T23:30:22.664Z + 2024-07-05T19:41:57.222Z https://podaac.github.io/tutorials/notebooks/harmony subsetting/shapefile_subset.html - 2024-07-03T23:30:23.092Z + 2024-07-05T19:41:57.614Z https://podaac.github.io/tutorials/notebooks/Harmony API.html - 2024-07-03T23:30:22.696Z + 2024-07-05T19:41:57.258Z https://podaac.github.io/tutorials/quarto_text/ScienceDataStories.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/SWOT.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Sentinel6MF.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/SMAP.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/CloudvsLocalWorkflows.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/GIS.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/OPERA.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Questions.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Contribute.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Workshops.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/cheatsheet.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/quarto_text/Tutorials.html - 2024-07-03T23:30:23.212Z + 2024-07-05T19:41:57.726Z https://podaac.github.io/tutorials/external/VisualizeDopplerScattData.html - 2024-07-03T23:30:56.924Z + 2024-07-05T19:42:36.117Z https://podaac.github.io/tutorials/external/ECCO_cloud_direct_access_s3.html - 2024-07-03T23:30:57.496Z + 2024-07-05T19:42:36.729Z https://podaac.github.io/tutorials/external/Downloader.html - 2024-07-03T23:30:58.652Z + 2024-07-05T19:42:37.857Z https://podaac.github.io/tutorials/external/ECCO_download_data.html - 2024-07-03T23:30:57.780Z + 2024-07-05T19:42:36.937Z https://podaac.github.io/tutorials/external/access-local-opendap.html - 2024-07-03T23:30:58.172Z + 2024-07-05T19:42:37.405Z https://podaac.github.io/tutorials/external/SWOT_SSH_dashboard.html - 2024-07-03T23:31:03.073Z + 2024-07-05T19:42:42.041Z https://podaac.github.io/tutorials/external/July_2022_Earthdata_Webinar.html - 2024-07-03T23:30:55.872Z + 2024-07-05T19:42:35.189Z https://podaac.github.io/tutorials/external/earthdata_search.html - 2024-07-03T23:30:58.268Z + 2024-07-05T19:42:37.533Z https://podaac.github.io/tutorials/external/DownloadDopplerScattData.html - 2024-07-03T23:30:55.992Z + 2024-07-05T19:42:35.333Z https://podaac.github.io/tutorials/external/find_data_programmatically.html - 2024-07-03T23:30:58.372Z + 2024-07-05T19:42:37.641Z https://podaac.github.io/tutorials/external/zarr-eosdis-store.html - 2024-07-03T23:30:53.828Z + 2024-07-05T19:42:33.261Z