diff --git a/awscrt/s3.py b/awscrt/s3.py index 7509d14da..7e5574641 100644 --- a/awscrt/s3.py +++ b/awscrt/s3.py @@ -180,6 +180,11 @@ class S3Client(NativeResource): Gigabits per second (Gbps) that we are trying to reach. You can also use `get_recommended_throughput_target_gbps()` to get recommended value for your system. 10.0 Gbps by default (may change in future) + + memory_limit (Optional[int]): Memory limit, in bytes, of how much memory + client can use for buffering data for requests. + Default values scale with target throughput and are currently + between 2GiB and 8GiB (may change in future) """ __slots__ = ('shutdown_event', '_region') @@ -195,7 +200,8 @@ def __init__( tls_connection_options=None, part_size=None, multipart_upload_threshold=None, - throughput_target_gbps=None): + throughput_target_gbps=None, + memory_limit=None): assert isinstance(bootstrap, ClientBootstrap) or bootstrap is None assert isinstance(region, str) assert isinstance(signing_config, AwsSigningConfig) or signing_config is None @@ -236,6 +242,8 @@ def on_shutdown(): multipart_upload_threshold = 0 if throughput_target_gbps is None: throughput_target_gbps = 0 + if memory_limit is None: + memory_limit = 0 self._binding = _awscrt.s3_client_new( bootstrap, @@ -248,6 +256,7 @@ def on_shutdown(): part_size, multipart_upload_threshold, throughput_target_gbps, + memory_limit, s3_client_core) def make_request( diff --git a/crt/aws-c-io b/crt/aws-c-io index c9cb77747..df64f57fe 160000 --- a/crt/aws-c-io +++ b/crt/aws-c-io @@ -1 +1 @@ -Subproject commit c9cb77747d3fd2809cf3d9c43be7d5decc17e4b3 +Subproject commit df64f57feb63ab1a489ded86a87b756a48c46f35 diff --git a/crt/aws-c-s3 b/crt/aws-c-s3 index 83008e577..f961971f3 160000 --- a/crt/aws-c-s3 +++ b/crt/aws-c-s3 @@ -1 +1 @@ -Subproject commit 83008e577804643bc632ae4e603f36ab96219b9b +Subproject commit f961971f3851782d40126f0597f007c73ba4ae0c diff --git a/source/s3_client.c b/source/s3_client.c index 0a5acb713..af2e88d53 100644 --- a/source/s3_client.c +++ b/source/s3_client.c @@ -249,10 +249,11 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { uint64_t part_size; /* K */ uint64_t multipart_upload_threshold; /* K */ double throughput_target_gbps; /* d */ + uint64_t mem_limit; /* K */ PyObject *py_core; /* O */ if (!PyArg_ParseTuple( args, - "OOOOOs#iKKdO", + "OOOOOs#iKKdKO", &bootstrap_py, &signing_config_py, &credential_provider_py, @@ -264,6 +265,7 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { &part_size, &multipart_upload_threshold, &throughput_target_gbps, + &mem_limit, &py_core)) { return NULL; } @@ -328,6 +330,7 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) { .tls_mode = tls_mode, .signing_config = signing_config, .part_size = part_size, + .memory_limit_in_bytes = mem_limit, .multipart_upload_threshold = multipart_upload_threshold, .tls_connection_options = tls_options, .throughput_target_gbps = throughput_target_gbps, diff --git a/test/test_s3.py b/test/test_s3.py index e3cb391a6..67e5a8c89 100644 --- a/test/test_s3.py +++ b/test/test_s3.py @@ -148,7 +148,7 @@ def full_path(self, filename): return os.path.join(self.rootdir, filename) -def s3_client_new(secure, region, part_size=0, is_cancel_test=False): +def s3_client_new(secure, region, part_size=0, is_cancel_test=False, mem_limit=None): if is_cancel_test: # for cancellation tests, make things slow, so it's less likely that @@ -177,6 +177,7 @@ def s3_client_new(secure, region, part_size=0, is_cancel_test=False): signing_config=signing_config, tls_connection_options=tls_option, part_size=part_size, + memory_limit=mem_limit, throughput_target_gbps=throughput_target_gbps) return s3_client @@ -313,10 +314,11 @@ def _test_s3_put_get_object( request, request_type, exception_name=None, + mem_limit=None, **kwargs, ): - s3_client = s3_client_new(False, self.region, 5 * MB) + s3_client = s3_client_new(False, self.region, 5 * MB, mem_limit=mem_limit) s3_request = s3_client.make_request( request=request, type=request_type, @@ -342,6 +344,10 @@ def test_get_object(self): request = self._get_object_request(self.get_test_object_path) self._test_s3_put_get_object(request, S3RequestType.GET_OBJECT) + def test_get_object_mem_limit(self): + request = self._get_object_request(self.get_test_object_path) + self._test_s3_put_get_object(request, S3RequestType.GET_OBJECT, mem_limit=2 * GB) + def test_put_object(self): put_body_stream = open(self.temp_put_obj_file_path, "rb") content_length = os.stat(self.temp_put_obj_file_path).st_size @@ -349,6 +355,13 @@ def test_put_object(self): self._test_s3_put_get_object(request, S3RequestType.PUT_OBJECT) put_body_stream.close() + def test_put_object_mem_limit(self): + put_body_stream = open(self.temp_put_obj_file_path, "rb") + content_length = os.stat(self.temp_put_obj_file_path).st_size + request = self._put_object_request(put_body_stream, content_length) + self._test_s3_put_get_object(request, S3RequestType.PUT_OBJECT, mem_limit=2 * GB) + put_body_stream.close() + def test_put_object_unknown_content_length(self): put_body_stream = open(self.temp_put_obj_file_path, "rb") content_length = os.stat(self.temp_put_obj_file_path).st_size