Skip to content

Re-Run Unverified PRs #2

Re-Run Unverified PRs

Re-Run Unverified PRs #2

GitHub Actions / Regression test results for uat failed Apr 28, 2024 in 0s

3 fail, 77 pass in 54m 11s

80 tests   77 ✅  54m 11s ⏱️
 1 suites   0 💤
 1 files     3 ❌

Results for commit a81e4df.

Annotations

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_concatenate[C1256783391-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 21s]
Raw output
OSError: [Errno -51] NetCDF: Unknown file format: 'C1256783391-POCLOUD_merged.nc4'
collection_concept_id = 'C1256783391-POCLOUD'
harmony_env = <Environment.UAT: 3>
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...LYxyzmX5kruGmecTBH0oLAkmaBiJHqsuOHSUdInwX3m82oH20B1kE8Y17ULwkilNLfZchCfHMikRkFN48hGuCctO2qPYD3NGQ-xVWI4hO2xdPhDFhffAeg'

    @pytest.mark.timeout(600)
    def test_concatenate(collection_concept_id, harmony_env, bearer_token):
    
        max_results = 2
    
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        collection = harmony.Collection(id=collection_concept_id)
    
        request = harmony.Request(
            collection=collection,
            concatenate=True,
            max_results=max_results,
            skip_preview=True,
            format="application/x-netcdf4",
        )
    
        request.is_valid()
    
        print(harmony_client.request_as_curl(request))
    
        job1_id = harmony_client.submit(request)
    
        print(f'\n{job1_id}')
    
        print(harmony_client.status(job1_id))
    
        print('\nWaiting for the job to finish')
    
        results = harmony_client.result_json(job1_id)
    
        print('\nDownloading results:')
    
        futures = harmony_client.download_all(job1_id)
        file_names = [f.result() for f in futures]
        print('\nDone downloading.')
    
        filename = file_names[0]
    
        # Handle time dimension and variables dropping
>       merge_dataset = netCDF4.Dataset(filename, 'r')

verify_collection.py:475: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
src/netCDF4/_netCDF4.pyx:2469: in netCDF4._netCDF4.Dataset.__init__
    ???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

>   ???
E   OSError: [Errno -51] NetCDF: Unknown file format: 'C1256783391-POCLOUD_merged.nc4'

src/netCDF4/_netCDF4.pyx:2028: OSError
--------------------------------- Captured Log ---------------------------------

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_concatenate[C1234724471-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 30s]
Raw output
KeyError: 'sea_surface_temperature_4um'
collection_concept_id = 'C1234724471-POCLOUD'
harmony_env = <Environment.UAT: 3>
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...LYxyzmX5kruGmecTBH0oLAkmaBiJHqsuOHSUdInwX3m82oH20B1kE8Y17ULwkilNLfZchCfHMikRkFN48hGuCctO2qPYD3NGQ-xVWI4hO2xdPhDFhffAeg'

    @pytest.mark.timeout(600)
    def test_concatenate(collection_concept_id, harmony_env, bearer_token):
    
        max_results = 2
    
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        collection = harmony.Collection(id=collection_concept_id)
    
        request = harmony.Request(
            collection=collection,
            concatenate=True,
            max_results=max_results,
            skip_preview=True,
            format="application/x-netcdf4",
        )
    
        request.is_valid()
    
        print(harmony_client.request_as_curl(request))
    
        job1_id = harmony_client.submit(request)
    
        print(f'\n{job1_id}')
    
        print(harmony_client.status(job1_id))
    
        print('\nWaiting for the job to finish')
    
        results = harmony_client.result_json(job1_id)
    
        print('\nDownloading results:')
    
        futures = harmony_client.download_all(job1_id)
        file_names = [f.result() for f in futures]
        print('\nDone downloading.')
    
        filename = file_names[0]
    
        # Handle time dimension and variables dropping
        merge_dataset = netCDF4.Dataset(filename, 'r')
    
        headers = {
            "Authorization": f"Bearer {bearer_token}"
        }
    
        original_files = merge_dataset.variables['subset_files']
        history_json = json.loads(merge_dataset.history_json)
        assert len(original_files) == max_results
    
        for url in history_json[0].get("derived_from"):
            local_file_name = os.path.basename(url)
            download_file(url, local_file_name, headers)
    
        for i, file in enumerate(original_files):
            origin_dataset = netCDF4.Dataset(file)
>           verify_groups(merge_dataset, origin_dataset, i, file=file)

verify_collection.py:491: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:417: in verify_groups
    verify_variables(merged_group, origin_group, subset_index, both_merged)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

merged_group = <class 'netCDF4._netCDF4.Dataset'>
root group (NETCDF4 data model, file format HDF5):
    Conventions: CF-1.7, ACDD-1...., time, nj, ni), int8 wind_speed(subset_index, time, nj, ni), int8 dt_analysis(subset_index, time, nj, ni)
    groups: 
origin_group = <class 'netCDF4._netCDF4.Dataset'>
root group (NETCDF4 data model, file format HDF5):
    Conventions: CF-1.7, ACDD-1....8 sses_standard_deviation_4um(time, nj, ni), int8 wind_speed(time, nj, ni), int8 dt_analysis(time, nj, ni)
    groups: 
subset_index = 1, both_merged = False

    def verify_variables(merged_group, origin_group, subset_index, both_merged):
        for var in origin_group.variables:
>           merged_var = merged_group.variables[var]
E           KeyError: 'sea_surface_temperature_4um'

verify_collection.py:386: KeyError
--------------------------------- Captured Log ---------------------------------

Check warning on line 0 in tests.verify_collection

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

test_concatenate[C1238538224-POCLOUD] (tests.verify_collection) failed

test-results/uat_test_report.xml [took 22s]
Raw output
requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))
self = <urllib3.connectionpool.HTTPSConnectionPool object at 0x7f276f149390>
method = 'GET'
url = '/podaac-uat-cumulus-protected/JASON_CS_S6A_L2_ALT_HR_STD_OST_NRT_F/S6A_P4_2__HR_STD__NR_002_090_20180622T101035_20180622T102035_F00.nc'
body = None
headers = {'User-Agent': 'python-requests/2.31.0', 'Accept-Encoding': 'gzip, deflate', 'Accept': '*/*', 'Connection': 'keep-aliv...YxyzmX5kruGmecTBH0oLAkmaBiJHqsuOHSUdInwX3m82oH20B1kE8Y17ULwkilNLfZchCfHMikRkFN48hGuCctO2qPYD3NGQ-xVWI4hO2xdPhDFhffAeg'}
retries = Retry(total=0, connect=None, read=False, redirect=None, status=None)
redirect = False, assert_same_host = False
timeout = Timeout(connect=None, read=None, total=None), pool_timeout = None
release_conn = False, chunked = False, body_pos = None, preload_content = False
decode_content = False, response_kw = {}
parsed_url = Url(scheme=None, auth=None, host=None, port=None, path='/podaac-uat-cumulus-protected/JASON_CS_S6A_L2_ALT_HR_STD_OST_NRT_F/S6A_P4_2__HR_STD__NR_002_090_20180622T101035_20180622T102035_F00.nc', query=None, fragment=None)
destination_scheme = None, conn = None, release_this_conn = True
http_tunnel_required = False, err = None, clean_exit = False

    def urlopen(  # type: ignore[override]
        self,
        method: str,
        url: str,
        body: _TYPE_BODY | None = None,
        headers: typing.Mapping[str, str] | None = None,
        retries: Retry | bool | int | None = None,
        redirect: bool = True,
        assert_same_host: bool = True,
        timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
        pool_timeout: int | None = None,
        release_conn: bool | None = None,
        chunked: bool = False,
        body_pos: _TYPE_BODY_POSITION | None = None,
        preload_content: bool = True,
        decode_content: bool = True,
        **response_kw: typing.Any,
    ) -> BaseHTTPResponse:
        """
        Get a connection from the pool and perform an HTTP request. This is the
        lowest level call for making a request, so you'll need to specify all
        the raw details.
    
        .. note::
    
           More commonly, it's appropriate to use a convenience method
           such as :meth:`request`.
    
        .. note::
    
           `release_conn` will only behave as expected if
           `preload_content=False` because we want to make
           `preload_content=False` the default behaviour someday soon without
           breaking backwards compatibility.
    
        :param method:
            HTTP request method (such as GET, POST, PUT, etc.)
    
        :param url:
            The URL to perform the request on.
    
        :param body:
            Data to send in the request body, either :class:`str`, :class:`bytes`,
            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
    
        :param headers:
            Dictionary of custom headers to send, such as User-Agent,
            If-None-Match, etc. If None, pool headers are used. If provided,
            these headers completely replace any pool-specific headers.
    
        :param retries:
            Configure the number of retries to allow before raising a
            :class:`~urllib3.exceptions.MaxRetryError` exception.
    
            If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
            :class:`~urllib3.util.retry.Retry` object for fine-grained control
            over different types of retries.
            Pass an integer number to retry connection errors that many times,
            but no other types of errors. Pass zero to never retry.
    
            If ``False``, then retries are disabled and any exception is raised
            immediately. Also, instead of raising a MaxRetryError on redirects,
            the redirect response will be returned.
    
        :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
    
        :param redirect:
            If True, automatically handle redirects (status codes 301, 302,
            303, 307, 308). Each redirect counts as a retry. Disabling retries
            will disable redirect, too.
    
        :param assert_same_host:
            If ``True``, will make sure that the host of the pool requests is
            consistent else will raise HostChangedError. When ``False``, you can
            use the pool on an HTTP proxy and request foreign hosts.
    
        :param timeout:
            If specified, overrides the default timeout for this one
            request. It may be a float (in seconds) or an instance of
            :class:`urllib3.util.Timeout`.
    
        :param pool_timeout:
            If set and the pool is set to block=True, then this method will
            block for ``pool_timeout`` seconds and raise EmptyPoolError if no
            connection is available within the time period.
    
        :param bool preload_content:
            If True, the response's body will be preloaded into memory.
    
        :param bool decode_content:
            If True, will attempt to decode the body based on the
            'content-encoding' header.
    
        :param release_conn:
            If False, then the urlopen call will not release the connection
            back into the pool once a response is received (but will release if
            you read the entire contents of the response such as when
            `preload_content=True`). This is useful if you're not preloading
            the response's content immediately. You will need to call
            ``r.release_conn()`` on the response ``r`` to return the connection
            back into the pool. If None, it takes the value of ``preload_content``
            which defaults to ``True``.
    
        :param bool chunked:
            If True, urllib3 will send the body using chunked transfer
            encoding. Otherwise, urllib3 will send the body using the standard
            content-length form. Defaults to False.
    
        :param int body_pos:
            Position to seek to in file-like body in the event of a retry or
            redirect. Typically this won't need to be set because urllib3 will
            auto-populate the value when needed.
        """
        parsed_url = parse_url(url)
        destination_scheme = parsed_url.scheme
    
        if headers is None:
            headers = self.headers
    
        if not isinstance(retries, Retry):
            retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
    
        if release_conn is None:
            release_conn = preload_content
    
        # Check host
        if assert_same_host and not self.is_same_host(url):
            raise HostChangedError(self, url, retries)
    
        # Ensure that the URL we're connecting to is properly encoded
        if url.startswith("/"):
            url = to_str(_encode_target(url))
        else:
            url = to_str(parsed_url.url)
    
        conn = None
    
        # Track whether `conn` needs to be released before
        # returning/raising/recursing. Update this variable if necessary, and
        # leave `release_conn` constant throughout the function. That way, if
        # the function recurses, the original value of `release_conn` will be
        # passed down into the recursive call, and its value will be respected.
        #
        # See issue #651 [1] for details.
        #
        # [1] <https://github.com/urllib3/urllib3/issues/651>
        release_this_conn = release_conn
    
        http_tunnel_required = connection_requires_http_tunnel(
            self.proxy, self.proxy_config, destination_scheme
        )
    
        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
        # have to copy the headers dict so we can safely change it without those
        # changes being reflected in anyone else's copy.
        if not http_tunnel_required:
            headers = headers.copy()  # type: ignore[attr-defined]
            headers.update(self.proxy_headers)  # type: ignore[union-attr]
    
        # Must keep the exception bound to a separate variable or else Python 3
        # complains about UnboundLocalError.
        err = None
    
        # Keep track of whether we cleanly exited the except block. This
        # ensures we do proper cleanup in finally.
        clean_exit = False
    
        # Rewind body position, if needed. Record current position
        # for future rewinds in the event of a redirect/retry.
        body_pos = set_file_position(body, body_pos)
    
        try:
            # Request a connection from the queue.
            timeout_obj = self._get_timeout(timeout)
            conn = self._get_conn(timeout=pool_timeout)
    
            conn.timeout = timeout_obj.connect_timeout  # type: ignore[assignment]
    
            # Is this a closed/new connection that requires CONNECT tunnelling?
            if self.proxy is not None and http_tunnel_required and conn.is_closed:
                try:
                    self._prepare_proxy(conn)
                except (BaseSSLError, OSError, SocketTimeout) as e:
                    self._raise_timeout(
                        err=e, url=self.proxy.url, timeout_value=conn.timeout
                    )
                    raise
    
            # If we're going to release the connection in ``finally:``, then
            # the response doesn't need to know about the connection. Otherwise
            # it will also try to release it and we'll have a double-release
            # mess.
            response_conn = conn if not release_conn else None
    
            # Make the request on the HTTPConnection object
>           response = self._make_request(
                conn,
                method,
                url,
                timeout=timeout_obj,
                body=body,
                headers=headers,
                chunked=chunked,
                retries=retries,
                response_conn=response_conn,
                preload_content=preload_content,
                decode_content=decode_content,
                **response_kw,
            )

../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
    response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
    httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
    response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
    version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7f2768f49c30>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           http.client.RemoteDisconnected: Remote end closed connection without response

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: RemoteDisconnected

During handling of the above exception, another exception occurred:

self = <requests.adapters.HTTPAdapter object at 0x7f276f14a170>
request = <PreparedRequest [GET]>, stream = True
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
>           resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )

../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/requests/adapters.py:486: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:847: in urlopen
    retries = retries.increment(
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/util/retry.py:470: in increment
    raise reraise(type(error), error, _stacktrace)
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/util/util.py:38: in reraise
    raise value.with_traceback(tb)
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:793: in urlopen
    response = self._make_request(
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/connectionpool.py:537: in _make_request
    response = conn.getresponse()
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/urllib3/connection.py:466: in getresponse
    httplib_response = super().getresponse()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:1375: in getresponse
    response.begin()
/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:318: in begin
    version, status, reason = self._read_status()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <http.client.HTTPResponse object at 0x7f2768f49c30>

    def _read_status(self):
        line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
        if len(line) > _MAXLINE:
            raise LineTooLong("status line")
        if self.debuglevel > 0:
            print("reply:", repr(line))
        if not line:
            # Presumably, the server closed the connection before
            # sending a valid response.
>           raise RemoteDisconnected("Remote end closed connection without"
                                     " response")
E           urllib3.exceptions.ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))

/opt/hostedtoolcache/Python/3.10.14/x64/lib/python3.10/http/client.py:287: ProtocolError

During handling of the above exception, another exception occurred:

collection_concept_id = 'C1238538224-POCLOUD'
harmony_env = <Environment.UAT: 3>
bearer_token = 'eyJ0eXAiOiJKV1QiLCJvcmlnaW4iOiJFYXJ0aGRhdGEgTG9naW4iLCJzaWciOiJlZGxqd3RwdWJrZXlfdWF0IiwiYWxnIjoiUlMyNTYifQ.eyJ0eXBlIj...LYxyzmX5kruGmecTBH0oLAkmaBiJHqsuOHSUdInwX3m82oH20B1kE8Y17ULwkilNLfZchCfHMikRkFN48hGuCctO2qPYD3NGQ-xVWI4hO2xdPhDFhffAeg'

    @pytest.mark.timeout(600)
    def test_concatenate(collection_concept_id, harmony_env, bearer_token):
    
        max_results = 2
    
        harmony_client = harmony.Client(env=harmony_env, token=bearer_token)
        collection = harmony.Collection(id=collection_concept_id)
    
        request = harmony.Request(
            collection=collection,
            concatenate=True,
            max_results=max_results,
            skip_preview=True,
            format="application/x-netcdf4",
        )
    
        request.is_valid()
    
        print(harmony_client.request_as_curl(request))
    
        job1_id = harmony_client.submit(request)
    
        print(f'\n{job1_id}')
    
        print(harmony_client.status(job1_id))
    
        print('\nWaiting for the job to finish')
    
        results = harmony_client.result_json(job1_id)
    
        print('\nDownloading results:')
    
        futures = harmony_client.download_all(job1_id)
        file_names = [f.result() for f in futures]
        print('\nDone downloading.')
    
        filename = file_names[0]
    
        # Handle time dimension and variables dropping
        merge_dataset = netCDF4.Dataset(filename, 'r')
    
        headers = {
            "Authorization": f"Bearer {bearer_token}"
        }
    
        original_files = merge_dataset.variables['subset_files']
        history_json = json.loads(merge_dataset.history_json)
        assert len(original_files) == max_results
    
        for url in history_json[0].get("derived_from"):
            local_file_name = os.path.basename(url)
>           download_file(url, local_file_name, headers)

verify_collection.py:487: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
verify_collection.py:426: in download_file
    response = requests.get(url, stream=True, headers=headers)
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/requests/api.py:73: in get
    return request("get", url, params=params, **kwargs)
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/requests/api.py:59: in request
    return session.request(method=method, url=url, **kwargs)
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/requests/sessions.py:589: in request
    resp = self.send(prep, **send_kwargs)
../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/requests/sessions.py:703: in send
    r = adapter.send(request, **kwargs)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = <requests.adapters.HTTPAdapter object at 0x7f276f14a170>
request = <PreparedRequest [GET]>, stream = True
timeout = Timeout(connect=None, read=None, total=None), verify = True
cert = None, proxies = OrderedDict()

    def send(
        self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None
    ):
        """Sends PreparedRequest object. Returns Response object.
    
        :param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
        :param stream: (optional) Whether to stream the request content.
        :param timeout: (optional) How long to wait for the server to send
            data before giving up, as a float, or a :ref:`(connect timeout,
            read timeout) <timeouts>` tuple.
        :type timeout: float or tuple or urllib3 Timeout object
        :param verify: (optional) Either a boolean, in which case it controls whether
            we verify the server's TLS certificate, or a string, in which case it
            must be a path to a CA bundle to use
        :param cert: (optional) Any user-provided SSL certificate to be trusted.
        :param proxies: (optional) The proxies dictionary to apply to the request.
        :rtype: requests.Response
        """
    
        try:
            conn = self.get_connection(request.url, proxies)
        except LocationValueError as e:
            raise InvalidURL(e, request=request)
    
        self.cert_verify(conn, request.url, verify, cert)
        url = self.request_url(request, proxies)
        self.add_headers(
            request,
            stream=stream,
            timeout=timeout,
            verify=verify,
            cert=cert,
            proxies=proxies,
        )
    
        chunked = not (request.body is None or "Content-Length" in request.headers)
    
        if isinstance(timeout, tuple):
            try:
                connect, read = timeout
                timeout = TimeoutSauce(connect=connect, read=read)
            except ValueError:
                raise ValueError(
                    f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, "
                    f"or a single float to set both timeouts to the same value."
                )
        elif isinstance(timeout, TimeoutSauce):
            pass
        else:
            timeout = TimeoutSauce(connect=timeout, read=timeout)
    
        try:
            resp = conn.urlopen(
                method=request.method,
                url=url,
                body=request.body,
                headers=request.headers,
                redirect=False,
                assert_same_host=False,
                preload_content=False,
                decode_content=False,
                retries=self.max_retries,
                timeout=timeout,
                chunked=chunked,
            )
    
        except (ProtocolError, OSError) as err:
>           raise ConnectionError(err, request=request)
E           requests.exceptions.ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response'))

../../../../.cache/pypoetry/virtualenvs/concise-autotest-tfGktGnJ-py3.10/lib/python3.10/site-packages/requests/adapters.py:501: ConnectionError
--------------------------------- Captured Log ---------------------------------

Check notice on line 0 in .github

See this annotation in the file changed.

@github-actions github-actions / Regression test results for uat

80 tests found

There are 80 tests, see "Raw output" for the full list of tests.
Raw output
tests.verify_collection ‑ test_concatenate[C1234071416-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1234208436-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1234208438-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1234724470-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1234724471-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238538224-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238538225-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238538231-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238538240-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238543223-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621087-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621102-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621111-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621112-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621115-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621172-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621176-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238621219-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238657959-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238657960-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238657961-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238658049-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238658051-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238658086-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238658392-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238687282-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238687534-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1238687546-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739526-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739547-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739577-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739606-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739611-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739644-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739686-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739688-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739691-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739719-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739726-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739734-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1240739764-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1242274070-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1242274079-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1242735870-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1244459498-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1244810554-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1245295751-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1247485682-LARC_CLOUD]
tests.verify_collection ‑ test_concatenate[C1247485685-LARC_CLOUD]
tests.verify_collection ‑ test_concatenate[C1247485690-LARC_CLOUD]
tests.verify_collection ‑ test_concatenate[C1256122852-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256420924-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256420925-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256507988-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256507989-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256507990-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256783381-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256783382-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256783386-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256783388-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1256783391-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1258237266-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1258237267-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1259115166-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1259115177-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1259115178-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1259966654-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072645-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072646-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072648-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072651-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072654-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072655-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072656-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072658-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072659-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072661-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261072662-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261591414-POCLOUD]
tests.verify_collection ‑ test_concatenate[C1261591569-POCLOUD]