Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

donate-cpu-server.py: added timing information to logging and adjusted some log messages #6362

Merged
merged 3 commits into from
Apr 29, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 13 additions & 7 deletions tools/donate-cpu-server.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
# Version scheme (MAJOR.MINOR.PATCH) should orientate on "Semantic Versioning" https://semver.org/
# Every change in this script should result in increasing the version number accordingly (exceptions may be cosmetic
# changes)
SERVER_VERSION = "1.3.51"
SERVER_VERSION = "1.3.52"

OLD_VERSION = '2.14.0'

Expand Down Expand Up @@ -1085,7 +1085,7 @@ class HttpClientThread(Thread):
def __init__(self, connection: socket.socket, cmd: str, resultPath: str, latestResults: list) -> None:
Thread.__init__(self)
self.connection = connection
self.cmd = cmd[:cmd.find('\r\n')]
self.cmd = cmd
self.resultPath = resultPath
self.infoPath = os.path.join(self.resultPath, 'info_output')
self.latestResults = latestResults
Expand All @@ -1102,12 +1102,12 @@ def parse_req(cmd):
def run(self):
try:
cmd = self.cmd
print_ts(cmd)
url, queryParams = self.parse_req(cmd)
if url is None:
print_ts('invalid request: {}'.format(cmd))
self.connection.close()
return
t_start = time.perf_counter()
if url == '/':
html = overviewReport()
httpGetResponse(self.connection, html, 'text/html')
Expand Down Expand Up @@ -1204,6 +1204,7 @@ def run(self):
with open(filename, 'rt') as f:
data = f.read()
httpGetResponse(self.connection, data, 'text/plain')
print_ts('{} finished in {}s'.format(url, (time.perf_counter() - t_start)))
except:
tb = "".join(traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))
print_ts(tb)
Expand Down Expand Up @@ -1287,7 +1288,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
continue
pos_nl = cmd.find('\n')
if pos_nl < 1:
print_ts('No newline found in data.')
print_ts("No newline found in data: '{}'".format(cmd))
connection.close()
continue
firstLine = cmd[:pos_nl]
Expand All @@ -1296,8 +1297,11 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
connection.close()
continue
if cmd.startswith('GET /'):
cmd = cmd[:cmd.find('\r\n')]
print_ts(cmd)
newThread = HttpClientThread(connection, cmd, resultPath, latestResults)
newThread.start()
continue
elif cmd == 'GetCppcheckVersions\n':
reply = 'head ' + OLD_VERSION
print_ts('GetCppcheckVersions: ' + reply)
Expand All @@ -1321,6 +1325,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
connection.close()
continue
elif cmd.startswith('write\nftp://') or cmd.startswith('write\nhttp://'):
t_start = time.perf_counter()
data = read_data(connection, cmd, pos_nl, max_data_size=2.5 * 1024 * 1024, check_done=True, cmd_name='write')
if data is None:
continue
Expand Down Expand Up @@ -1362,7 +1367,6 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
if old_version_wrong:
print_ts('Unexpected old version. Ignoring result data.')
continue
print_ts('results added for package ' + res.group(1) + ' (' + str(len(data)) + ' bytes)')
filename = os.path.join(resultPath, res.group(1))
with open(filename, 'wt') as f:
f.write(strDateTime() + '\n' + data)
Expand All @@ -1374,8 +1378,10 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
f.write(' '.join(latestResults))
# generate package.diff..
generate_package_diff_statistics(filename)
print_ts('write finished for {} ({} bytes / {}s)'.format(res.group(1), len(data), (time.perf_counter() - t_start)))
continue
elif cmd.startswith('write_info\nftp://') or cmd.startswith('write_info\nhttp://'):
t_start = time.perf_counter()
data = read_data(connection, cmd, pos_nl, max_data_size=7 * 1024 * 1024, check_done=True, cmd_name='write_info')
if data is None:
continue
Expand All @@ -1400,13 +1406,13 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
if url not in packages:
print_ts('Url is not in packages. Ignoring information data.')
continue
print_ts('adding info output for package ' + res.group(1) + ' (' + str(len(data)) + ' bytes)')
info_path = resultPath + '/' + 'info_output'
if not os.path.exists(info_path):
os.mkdir(info_path)
filename = info_path + '/' + res.group(1)
with open(filename, 'wt') as f:
f.write(strDateTime() + '\n' + data)
print_ts('write_info finished for {} ({} bytes / {}s)'.format(res.group(1), len(data), (time.perf_counter() - t_start)))
continue
elif cmd == 'getPackagesCount\n':
packages_count = str(len(packages))
Expand All @@ -1421,7 +1427,7 @@ def server(server_address_port: int, packages: list, packageIndex: int, resultPa
connection.send(pkg.encode('utf-8', 'ignore'))
print_ts('getPackageIdx: ' + pkg)
else:
print_ts('getPackageIdx: index is out of range')
print_ts('getPackageIdx: index {} is out of range'.format(request_idx))
connection.close()
continue
elif cmd.startswith('write_nodata\nftp://'):
Expand Down
Loading