Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix #13027 (daca@home does not handle well when results are too large) #6711

Merged
merged 2 commits into from
Aug 18, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 13 additions & 4 deletions tools/donate-cpu-server.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
# changes)
SERVER_VERSION = "1.3.55"
SERVER_VERSION = "1.3.56"

OLD_VERSION = '2.14.0'

Expand Down Expand Up @@ -1254,9 +1254,8 @@ def read_data(connection, cmd, pos_nl, max_data_size, check_done, cmd_name, time

if data and (len(data) >= max_data_size):
print_ts('Maximum allowed data ({} bytes) exceeded ({}).'.format(max_data_size, cmd_name))
data = None

if data and check_done and not data.endswith('\nDONE'):
elif data and check_done and not data.endswith('\nDONE'):
print_ts('Incomplete data received ({}).'.format(cmd_name))
data = None

Expand Down Expand Up @@ -1334,9 +1333,10 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
continue
elif cmd.startswith('write\nftp://') or cmd.startswith('write\nhttp://'):
t_start = time.perf_counter()
data = read_data(connection, cmd, pos_nl, max_data_size=2.5 * 1024 * 1024, check_done=True, cmd_name='write')
data = read_data(connection, cmd, pos_nl, max_data_size=1024 * 1024, check_done=True, cmd_name='write')
if data is None:
continue
truncated_data = len(data) >= 1024 * 1024

pos = data.find('\n')
if pos == -1:
Expand Down Expand Up @@ -1376,6 +1376,10 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
print_ts('Unexpected old version. Ignoring result data.')
continue
filename = os.path.join(resultPath, res.group(1))
if truncated_data:
print_ts('Data is too large. Removing result.')
os.remove(filename)
continue
with open(filename, 'wt') as f:
f.write(strDateTime() + '\n' + data)
# track latest added results..
Expand All @@ -1393,6 +1397,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
data = read_data(connection, cmd, pos_nl, max_data_size=7 * 1024 * 1024, check_done=True, cmd_name='write_info')
if data is None:
continue
truncated_data = len(data) >= 7 * 1024 * 1024

pos = data.find('\n')
if pos == -1:
Expand All @@ -1418,6 +1423,10 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
if not os.path.exists(info_path):
os.mkdir(info_path)
filename = info_path + '/' + res.group(1)
if truncated_data:
print_ts('Data is too large. Removing result.')
os.remove(filename)
continue
with open(filename, 'wt') as f:
f.write(strDateTime() + '\n' + data)
print_ts('write_info finished for {} ({} bytes / {}s)'.format(res.group(1), len(data), (time.perf_counter() - t_start)))
Expand Down
Loading