Skip to content

Commit

Permalink
Fix #13027 (daca@home does not handle well when results are too large) (
Browse files Browse the repository at this point in the history
#6711)

If the upload result is too large it's better that the old result on the
disk is removed. The old result on the disk will be invalid/deprecated
in one way or another.
  • Loading branch information
danmar authored Aug 18, 2024
1 parent 7e1cec8 commit 8349fe2
Showing 1 changed file with 13 additions and 4 deletions.
17 changes: 13 additions & 4 deletions tools/donate-cpu-server.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
# changes)
SERVER_VERSION = "1.3.55"
SERVER_VERSION = "1.3.56"

OLD_VERSION = '2.14.0'

Expand Down Expand Up @@ -1254,9 +1254,8 @@ def read_data(connection, cmd, pos_nl, max_data_size, check_done, cmd_name, time

if data and (len(data) >= max_data_size):
print_ts('Maximum allowed data ({} bytes) exceeded ({}).'.format(max_data_size, cmd_name))
data = None

if data and check_done and not data.endswith('\nDONE'):
elif data and check_done and not data.endswith('\nDONE'):
print_ts('Incomplete data received ({}).'.format(cmd_name))
data = None

Expand Down Expand Up @@ -1334,9 +1333,10 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
continue
elif cmd.startswith('write\nftp://') or cmd.startswith('write\nhttp://'):
t_start = time.perf_counter()
data = read_data(connection, cmd, pos_nl, max_data_size=2.5 * 1024 * 1024, check_done=True, cmd_name='write')
data = read_data(connection, cmd, pos_nl, max_data_size=1024 * 1024, check_done=True, cmd_name='write')
if data is None:
continue
truncated_data = len(data) >= 1024 * 1024

pos = data.find('\n')
if pos == -1:
Expand Down Expand Up @@ -1376,6 +1376,10 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
print_ts('Unexpected old version. Ignoring result data.')
continue
filename = os.path.join(resultPath, res.group(1))
if truncated_data:
print_ts('Data is too large. Removing result.')
os.remove(filename)
continue
with open(filename, 'wt') as f:
f.write(strDateTime() + '\n' + data)
# track latest added results..
Expand All @@ -1393,6 +1397,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
data = read_data(connection, cmd, pos_nl, max_data_size=7 * 1024 * 1024, check_done=True, cmd_name='write_info')
if data is None:
continue
truncated_data = len(data) >= 7 * 1024 * 1024

pos = data.find('\n')
if pos == -1:
Expand All @@ -1418,6 +1423,10 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
if not os.path.exists(info_path):
os.mkdir(info_path)
filename = info_path + '/' + res.group(1)
if truncated_data:
print_ts('Data is too large. Removing result.')
os.remove(filename)
continue
with open(filename, 'wt') as f:
f.write(strDateTime() + '\n' + data)
print_ts('write_info finished for {} ({} bytes / {}s)'.format(res.group(1), len(data), (time.perf_counter() - t_start)))
Expand Down

0 comments on commit 8349fe2

Please sign in to comment.