-
Notifications
You must be signed in to change notification settings - Fork 0
/
stats.py
286 lines (249 loc) · 10.5 KB
/
stats.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
# Created by MapleShade20
# Intance folders here should be named like: Instance 4
import json
import os
import pandas as pd
import logging
import datetime
import shutil
name_match = {'enter_nether':'enter_nether', 'enter_bastion':'goto_bastion',
'enter_fortress':'bart_travel', 'nether_travel':'fight_blaze',
'enter_stronghold':'eye_spy', 'enter_end':'locate_room',
'kill_ender_dragon':'kill_dragon'}
old_names = list(name_match.keys())
new_names = list(name_match.values())
# Fetch config.json file
try:
with open('./config.json', "r") as f:
config = json.load(f)
py_dir = os.getcwd()
mc_dir = config['mc_dir']
read_incomplete = config['read_incomplete']
ignore_lastrun = config['ignore_lastrun']
#ignore_lastrun = True
log_level = config['log_level']
version = config['version']
empty_bopping = config['empty_bopping']
no_blind_bopping = config['no_blind_bopping']
replace_old_csv = config['replace_old_csv']
except Exception:
print('Cannot find config.json. Exiting...')
exit()
# Check ./output/stats_output.csv version by verifying if the columns are latest.
# If not, ask user to delete the old csv, and then exit.
if os.path.exists('./output/stats_output.csv'):
with open('./output/stats_output.csv', 'r') as f:
if f.readline().strip() != 'category,run_type,is_completed,final_igt_converted,date_converted,' + ','.join(new_names) + ',save_path,date,final_igt,final_rta':
print('You have an outdated stats_output.csv. Please delete it. Exiting...')
exit()
else:
pass
# Set up logging
os.chdir(py_dir)
logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s', filename='stats.log', filemode='a', encoding='utf-8', level=log_level)
logging.info(f'Running at {os.getcwd()}')
# Log config used
logging.info(f'Config used: {config}')
# Check if stats_last_run.txt exists, and define last_run_time
if os.path.exists("stats_last_run.txt"):
with open("./stats_last_run.txt", "r") as f:
try:
last_run_time = float(str(f.read()))
except Exception:
logging.error('stats_last_run.txt is broken.')
print('Please delete stats_last_run.txt. Exiting...')
exit()
else:
logging.info("Cannot find stats_last_run.txt")
last_run_time = 0
# Change working directory to MultiMC
os.chdir(mc_dir)
# Check if path is valid, by verifying if mc_dir is a string ending with 'instances'
if not mc_dir.endswith('instances'):
print(f'Invalid path: {mc_dir}. Exiting...')
exit()
# Define a function to check if a folder is new
def is_new_folder(folder, last_run_time):
# Get folder create time
folder_ctime = os.path.getctime(folder)
if last_run_time != 0:
logging.debug(f'Save \'{folder}\' created at: {datetime.datetime.fromtimestamp(folder_ctime)}. Last stats: {datetime.datetime.fromtimestamp(last_run_time)}.')
# If folder create time is later than last run time, return True
return folder_ctime > last_run_time
else:
return True
# Define a function to convert milliseconds to minutes and seconds
def convert_millis(millis):
delta = datetime.timedelta(seconds = millis/1000) # in seconds
# Get string 'HH:MM:SS.ffffff'
delta_str = str(delta)
# Split to get 'HH:MM:SS.fff'
hour, minute, second = delta_str.split(':')
second = second[:6]
result = f'{hour}:{minute}:{second}'
return result
# Main record reader module
def read_record(record, old_names, new_names):
# 'record': json.load() object, 'old_names': list, 'new_names': list
# Import record timelines
df = pd.DataFrame(record["timelines"]).set_index("name").T
df = df.drop(index='rta')
if not ('enter_fortress' in df.columns and 'enter_bastion' in df.columns and 'nether_travel' in df.columns):
# logging.debug(f'In {save}, you didn\'t complete bastion & fortress. Skipped.')
return None
# Calculate time
base = pd.DataFrame()
for i in range(len(old_names)):
# check if df[old_names[i]] exists
if old_names[i] not in df.columns:
# set all remaining columns to -1
for j in range(i, len(old_names)):
base[new_names[j]] = -1
break
if i == 0:
base[new_names[i]] = df[old_names[i]] / 1000 # enter_nether
elif df[old_names[i]].item() < df[old_names[i-1]].item():
# i: enter_fortress, i-1: enter_bastion
# original: bart_travel = enter_fortress - enter_bastion < 0
# new: bart_travel = nether_travel - enter_bastion > 0
base[new_names[i]] = (df[old_names[i+1]] - df[old_names[i-1]]) / 1000
# original: goto_bastion = enter_bastion - enter_nether
# new: goto_bastion = enter_bastion - enter_fortress
base[new_names[i-1]] = (df[old_names[i-1]] - df[old_names[i]]) / 1000
# unchanged: fight_blaze = nether_travel - enter_fortress
else:
base[new_names[i]] = (df[old_names[i]] - df[old_names[i-1]]) / 1000
return base.astype(int)
# ------
# READ
# ------
resets = 0
attempts = 0
count = 0
data = None
for instance in os.listdir("."):
# Accept 'Instance *'
if not instance.startswith('Instance'):
continue
logging.info(f'Current folder: {instance}')
# Read the reset count of atum mod.
try:
with open(f'./{instance}/.minecraft/config/atum/atum.properties', 'r') as f:
contents = f.read()
# Extract ssgAttempts and rsgAttempts values from contents
ssg_attempts = int(contents.split('\n')[-4].split('=')[1])
rsg_attempts = int(contents.split('\n')[-3].split('=')[1])
resets += ssg_attempts + rsg_attempts
except Exception:
print('failed')
pass
for save in os.listdir(f"./{instance}/.minecraft/saves"):
path_info = f"/{instance}/{save}"
if not save.startswith("Random Speedrun") and not save.startswith("Set Speedrun"):
continue
#logging.debug(f"Start checking {path_info}")
record_path = os.path.join(f"./{instance}/.minecraft/saves", save, "speedrunigt/record.json")
save_path = os.path.join(f"./{instance}/.minecraft/saves", save)
# Skip if record.json doesn't exist
if not os.path.exists(record_path):
logging.warning(f'Cannot find record in \'{record_path}\'.')
continue
with open(record_path, "r") as f:
record = json.load(f)
# Skip empty saves, and do world bopping if enabled
if record["timelines"] == []:
if empty_bopping:
shutil.rmtree(save_path)
logging.debug(f"Deleted empty save {path_info}.")
continue
# Skip old saves
if not ignore_lastrun:
if not is_new_folder(save_path, last_run_time):
continue
# Main read record module
logging.info(f"{path_info} matches!")
attempts += 1
df = read_record(record, old_names, new_names)
if df is None:
if read_incomplete:
logging.debug(f"[INC] {path_info} skipped for no \'nether_travel\'.")
if no_blind_bopping:
shutil.rmtree(save_path)
logging.info(f"Deleted incomplete save {path_info}.")
continue
# date and igt conversion. date is divided by 1000 because the Python timestamp is in seconds
# date is a millisecond timestamp that represents the number of milliseconds from 1970-1-1 0:0:0 (UTC) to a certain time point
date_converted = datetime.datetime.fromtimestamp(record["date"] / 1000).strftime("%Y-%m-%d %H:%M")
final_igt_converted = convert_millis(record["final_igt"])
# Add columns
df["category"] = record["category"]
df["run_type"] = record["run_type"]
df["final_igt"] = record["final_igt"]
df["final_rta"] = record["final_rta"]
df["date"] = record["date"]
df["date_converted"] = date_converted
df["final_igt_converted"] = final_igt_converted
df["save_path"] = path_info
df["is_completed"] = record["is_completed"]
# Reorder columns
df = df[['category','run_type','is_completed','final_igt_converted',
'date_converted',*new_names,
'save_path','date','final_igt','final_rta']]
count += 1
logging.info(f"Record {path_info} successfully.")
if data is not None:
data = pd.concat([data, df], axis=0, ignore_index=True)
else:
data = df
# Make a subfolder of py_dir named 'output'
os.chdir(py_dir)
if not os.path.exists('output'):
os.makedirs('output')
os.chdir('output')
# Save the data frame object to stats_output.csv file
if data is None:
print('No new runs detected.')
else:
if not os.path.exists('stats_output.csv'):
logging.info('Initiating stats_output.csv')
elif replace_old_csv:
logging.info('Replacing stats_output.csv')
else:
logging.info('Writing into stats_output.csv')
data.to_csv("stats_output.csv", mode='a', index=False, header=False)
data = pd.read_csv("stats_output.csv")
# Sort the csv file by date acsendingly and then remove identical rows
logging.info('Sorting stats_output.csv')
data = data.sort_values(by='date', ascending=True)
data = data.drop_duplicates(subset=['save_path'], keep='last')
data.to_csv("stats_output.csv", mode='w', index=False, header=True)
# ----------
# TIME WRITE
# ----------
os.chdir(py_dir)
current_time = datetime.datetime.now().timestamp()
with open("stats_last_run.txt", "w") as f:
f.write(str(float(current_time)))
logging.info(f"Execute time: {datetime.datetime.now()}, {current_time}")
logging.info(f"{attempts} runs are read. {count} runs are recorded. For atum, {resets} resets are found.")
print(f"Congrats! {attempts} runs are read. {count} runs are recorded.")
# There is a file: ./output/obs_display.txt
# It contains the following lines:
# Maple20 (He/Him)
# RSG pb 26:49
# FSG pb 21:43
# Resets 21459
# Runs 3604
# Read contents of obs_display.txt
os.chdir('output')
with open('obs_display.txt', 'r') as f:
lines = f.readlines()
for i in range(len(lines)):
if 'Runs' in lines[i]:
runs = int(lines[i].split()[-1]) + attempts
lines[i] = f"Runs {runs}\n"
elif 'Resets' in lines[i]:
lines[i] = f"Resets {resets}\n"
with open('obs_display.txt', 'w') as f:
f.writelines(lines)
print("obs_display.txt updated.")