Skip to content

Commit

Permalink
Bind out mem limiter (#529)
Browse files Browse the repository at this point in the history
Co-authored-by: Waqar Ahmed Khan <[email protected]>
Co-authored-by: Michael Graeb <[email protected]>
  • Loading branch information
3 people authored Nov 22, 2023
1 parent 4c5cc5e commit 4c3718a
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 6 deletions.
11 changes: 10 additions & 1 deletion awscrt/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,11 @@ class S3Client(NativeResource):
Gigabits per second (Gbps) that we are trying to reach.
You can also use `get_recommended_throughput_target_gbps()` to get recommended value for your system.
10.0 Gbps by default (may change in future)
memory_limit (Optional[int]): Memory limit, in bytes, of how much memory
client can use for buffering data for requests.
Default values scale with target throughput and are currently
between 2GiB and 8GiB (may change in future)
"""

__slots__ = ('shutdown_event', '_region')
Expand All @@ -195,7 +200,8 @@ def __init__(
tls_connection_options=None,
part_size=None,
multipart_upload_threshold=None,
throughput_target_gbps=None):
throughput_target_gbps=None,
memory_limit=None):
assert isinstance(bootstrap, ClientBootstrap) or bootstrap is None
assert isinstance(region, str)
assert isinstance(signing_config, AwsSigningConfig) or signing_config is None
Expand Down Expand Up @@ -236,6 +242,8 @@ def on_shutdown():
multipart_upload_threshold = 0
if throughput_target_gbps is None:
throughput_target_gbps = 0
if memory_limit is None:
memory_limit = 0

self._binding = _awscrt.s3_client_new(
bootstrap,
Expand All @@ -248,6 +256,7 @@ def on_shutdown():
part_size,
multipart_upload_threshold,
throughput_target_gbps,
memory_limit,
s3_client_core)

def make_request(
Expand Down
5 changes: 4 additions & 1 deletion source/s3_client.c
Original file line number Diff line number Diff line change
Expand Up @@ -249,10 +249,11 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) {
uint64_t part_size; /* K */
uint64_t multipart_upload_threshold; /* K */
double throughput_target_gbps; /* d */
uint64_t mem_limit; /* K */
PyObject *py_core; /* O */
if (!PyArg_ParseTuple(
args,
"OOOOOs#iKKdO",
"OOOOOs#iKKdKO",
&bootstrap_py,
&signing_config_py,
&credential_provider_py,
Expand All @@ -264,6 +265,7 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) {
&part_size,
&multipart_upload_threshold,
&throughput_target_gbps,
&mem_limit,
&py_core)) {
return NULL;
}
Expand Down Expand Up @@ -328,6 +330,7 @@ PyObject *aws_py_s3_client_new(PyObject *self, PyObject *args) {
.tls_mode = tls_mode,
.signing_config = signing_config,
.part_size = part_size,
.memory_limit_in_bytes = mem_limit,
.multipart_upload_threshold = multipart_upload_threshold,
.tls_connection_options = tls_options,
.throughput_target_gbps = throughput_target_gbps,
Expand Down
17 changes: 15 additions & 2 deletions test/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def full_path(self, filename):
return os.path.join(self.rootdir, filename)


def s3_client_new(secure, region, part_size=0, is_cancel_test=False):
def s3_client_new(secure, region, part_size=0, is_cancel_test=False, mem_limit=None):

if is_cancel_test:
# for cancellation tests, make things slow, so it's less likely that
Expand Down Expand Up @@ -177,6 +177,7 @@ def s3_client_new(secure, region, part_size=0, is_cancel_test=False):
signing_config=signing_config,
tls_connection_options=tls_option,
part_size=part_size,
memory_limit=mem_limit,
throughput_target_gbps=throughput_target_gbps)

return s3_client
Expand Down Expand Up @@ -313,10 +314,11 @@ def _test_s3_put_get_object(
request,
request_type,
exception_name=None,
mem_limit=None,
**kwargs,
):

s3_client = s3_client_new(False, self.region, 5 * MB)
s3_client = s3_client_new(False, self.region, 5 * MB, mem_limit=mem_limit)
s3_request = s3_client.make_request(
request=request,
type=request_type,
Expand All @@ -342,13 +344,24 @@ def test_get_object(self):
request = self._get_object_request(self.get_test_object_path)
self._test_s3_put_get_object(request, S3RequestType.GET_OBJECT)

def test_get_object_mem_limit(self):
request = self._get_object_request(self.get_test_object_path)
self._test_s3_put_get_object(request, S3RequestType.GET_OBJECT, mem_limit=2 * GB)

def test_put_object(self):
put_body_stream = open(self.temp_put_obj_file_path, "rb")
content_length = os.stat(self.temp_put_obj_file_path).st_size
request = self._put_object_request(put_body_stream, content_length)
self._test_s3_put_get_object(request, S3RequestType.PUT_OBJECT)
put_body_stream.close()

def test_put_object_mem_limit(self):
put_body_stream = open(self.temp_put_obj_file_path, "rb")
content_length = os.stat(self.temp_put_obj_file_path).st_size
request = self._put_object_request(put_body_stream, content_length)
self._test_s3_put_get_object(request, S3RequestType.PUT_OBJECT, mem_limit=2 * GB)
put_body_stream.close()

def test_put_object_unknown_content_length(self):
put_body_stream = open(self.temp_put_obj_file_path, "rb")
content_length = os.stat(self.temp_put_obj_file_path).st_size
Expand Down

0 comments on commit 4c3718a

Please sign in to comment.