Skip to content

Commit

Permalink
[gcloud] Add a setting for connect/read timeouts
Browse files Browse the repository at this point in the history
  • Loading branch information
mlazowik committed Mar 10, 2022
1 parent 06cf365 commit 7f10a3c
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 9 deletions.
16 changes: 16 additions & 0 deletions docs/backends/gcloud.rst
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,22 @@ Note: Default Google Compute Engine (GCE) Service accounts are
The ``GS_EXPIRATION`` value is handled by the underlying `Google library <https://googlecloudplatform.github.io/google-cloud-python/latest/storage/blobs.html#google.cloud.storage.blob.Blob.generate_signed_url>`_.
It supports `timedelta`, `datetime`, or `integer` seconds since epoch time.

``GS_TIMEOUT`` (optional: default is ``60``, float or tuple)

Connect/read timeout. The amount of time, in seconds, to wait for the connection to the server to establish, and between
bytes sent from the server. If float is given it's applied to both, if a tuple – the first value is for the connect
timeout, second for read.

Note that read timeout =/= download timeout. It’s the number of seconds that the client will wait *between* bytes sent
from the server. In 99.9% of cases, this is the time before the server sends the first byte.

See https://docs.python-requests.org/en/master/user/advanced/#timeouts

Sometimes requests can get stuck, so it's better if the timeout is low, couple of seconds. This means that a new request
(via retry) will be made sooner. The default is higher to keep the behavior from before this setting was introduced.

Timeouts will be automatically retried when using `google-cloud-storage` version that includes
https://github.com/googleapis/python-storage/pull/727

Usage
-----
Expand Down
24 changes: 15 additions & 9 deletions storages/backends/gcloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def __init__(self, name, mode, storage):
self.mime_type = mimetypes.guess_type(name)[0]
self._mode = mode
self._storage = storage
self.blob = storage.bucket.get_blob(name)
self.blob = storage.bucket.get_blob(name, timeout=storage.timeout)
if not self.blob and 'w' in mode:
self.blob = Blob(
self.name, storage.bucket,
Expand All @@ -55,7 +55,7 @@ def _get_file(self):
)
if 'r' in self._mode:
self._is_dirty = False
self.blob.download_to_file(self._file)
self.blob.download_to_file(self._file, timeout=self._storage.timeout)
self._file.seek(0)
if self._storage.gzip and self.blob.content_encoding == 'gzip':
self._file = self._decompress_file(mode=self._mode, file=self._file)
Expand Down Expand Up @@ -87,7 +87,8 @@ def close(self):
blob_params = self._storage.get_object_parameters(self.name)
self.blob.upload_from_file(
self.file, rewind=True, content_type=self.mime_type,
predefined_acl=blob_params.get('acl', self._storage.default_acl))
predefined_acl=blob_params.get('acl', self._storage.default_acl),
timeout=self._storage.timeout)
self._file.close()
self._file = None

Expand Down Expand Up @@ -128,6 +129,7 @@ def get_default_settings(self):
# roll over.
"max_memory_size": setting('GS_MAX_MEMORY_SIZE', 0),
"blob_chunk_size": setting('GS_BLOB_CHUNK_SIZE'),
"timeout": setting('GS_TIMEOUT', 60)
}

@property
Expand Down Expand Up @@ -186,7 +188,10 @@ def _save(self, name, content):
for prop, val in blob_params.items():
setattr(file_object.blob, prop, val)

file_object.blob.upload_from_file(content, rewind=True, size=getattr(content, 'size', None), **upload_params)
file_object.blob.upload_from_file(content, rewind=True,
size=getattr(content, 'size', None),
timeout=self.timeout,
**upload_params)
return cleaned_name

def get_object_parameters(self, name):
Expand All @@ -209,20 +214,20 @@ def get_object_parameters(self, name):
def delete(self, name):
name = self._normalize_name(clean_name(name))
try:
self.bucket.delete_blob(name)
self.bucket.delete_blob(name, timeout=self.timeout)
except NotFound:
pass

def exists(self, name):
if not name: # root element aka the bucket
try:
self.client.get_bucket(self.bucket)
self.client.get_bucket(self.bucket, timeout=self.timeout)
return True
except NotFound:
return False

name = self._normalize_name(clean_name(name))
return bool(self.bucket.get_blob(name))
return bool(self.bucket.get_blob(name, timeout=self.timeout))

def listdir(self, name):
name = self._normalize_name(clean_name(name))
Expand All @@ -231,7 +236,8 @@ def listdir(self, name):
if name and not name.endswith('/'):
name += '/'

iterator = self.bucket.list_blobs(prefix=name, delimiter='/')
iterator = self.bucket.list_blobs(prefix=name, delimiter='/',
timeout=self.timeout)
blobs = list(iterator)
prefixes = iterator.prefixes

Expand All @@ -249,7 +255,7 @@ def listdir(self, name):

def _get_blob(self, name):
# Wrap google.cloud.storage's blob to raise if the file doesn't exist
blob = self.bucket.get_blob(name)
blob = self.bucket.get_blob(name, timeout=self.timeout)

if blob is None:
raise NotFound('File does not exist: {}'.format(name))
Expand Down

0 comments on commit 7f10a3c

Please sign in to comment.