-
Notifications
You must be signed in to change notification settings - Fork 0
/
remote_run.py
211 lines (165 loc) · 6.97 KB
/
remote_run.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
from os import path, makedirs
import torch.cuda
from struct import pack, unpack
from json import loads, dumps
import base64
import socket
import gdown
from termcolor import colored
from tqdm import tqdm
import time
import json
import gc
ip = input("Server ip>")
port = int(input("Server port>"))
main_dir = "data"
models_path = path.join(main_dir, "models")
videos_path = path.join(main_dir, "videos")
# print(models_path, videos_path)
makedirs(models_path, exist_ok=True)
makedirs(videos_path, exist_ok=True)
def send_data(connection, data):
size = len(data)
size_bytes = pack("<I", size)
connection.send(size_bytes)
connection.send(data)
def receive_data(connection):
size_bytes = connection.recv(4)
size = unpack("<I", size_bytes)[0]
data = b''
while len(data) < size:
data += connection.recv(size - len(data))
return data
def send_string(connection, string):
send_data(connection, string.encode('utf-8'))
def receive_string(connection):
return receive_data(connection).decode('utf-8')
def send_json(conn, json):
send_string(conn, dumps(json))
def receive_json(conn):
return loads(receive_string(conn))
def receive_file(conn, file_path):
# Получаем длину json с файлом
f_len_byte = b""
while len(f_len_byte) != 4:
f_len_byte += conn.recv(4)
json_len = unpack('<I', f_len_byte)[0]
# Получаем сам json
json_b = b""
while len(json_b) != json_len:
json_b += conn.recv(json_len - len(json_b))
data_name = loads(json_b.decode("utf-8"))
# Разбираем json
name = data_name["name"]
content = base64.b64decode(data_name["content"])
# Записываем файл
with open(path.join(file_path, name), "wb") as file:
file.write(content)
conn.send("DO IT".encode("utf-8"))
def ask_file(conn, file_path, asked_file, ftype):
send_json(conn, {"type": "ask_files", "filename": asked_file, "ftype": ftype})
receive_file(conn, file_path)
def ask(conn, type: str):
send_json(conn, {"type": type})
return receive_json(conn)
if __name__ == "__main__":
system_name = input("System Name>")
print("Try to find test.py and connect to data-server")
try:
from test import bench_model, parse_model_name, print_machine_info
print(colored("File loaded", "green"))
sock = socket.socket()
sock.connect((ip, port))
except ModuleNotFoundError:
print("File not found")
sock = socket.socket()
sock.connect((ip, port))
ask_file(sock, "", "test.py")
print("File downloaded")
from test import bench_model, parse_model_name, print_machine_info
print("File imported")
print_machine_info()
from ultralytics import YOLO
models = ask(sock, "get_models")
print("Got models list")
want_models = []
to_test_models = []
for model_type in models.keys():
model_use = input(f"Do you want to test {colored(model_type, 'yellow')} models? (y - yes, other - not): ")
if model_use.lower() == "y":
for model_name in models[model_type]:
to_test_models.append(model_name[0])
if not path.isfile(path.join(models_path, model_name[0])) and not path.isdir(path.join(models_path, model_name[0])) :
want_models.append(model_name)
if len(want_models) != 0: print(f"We need to download models: {', '.join([i[0] for i in want_models])}")
for model_name in want_models:
print(model_name)
if not model_name[2]: # File
gdown.download(id=model_name[1], output=path.join(models_path, model_name[0]))
else: # Folder
gdown.download_folder(id=model_name[1], output=path.join(models_path, model_name[0]))
print(colored(f"We downloaded {model_name}", "green"))
videos = ask(sock, "get_videos")
if len(videos) != 0: print(f"We need to download videos: {', '.join([i[0] for i in videos])} ({len(videos)})")
for video in videos:
if not path.isfile(path.join(videos_path, video[0])):
gdown.download(id=video[1], output=path.join(videos_path, video[0]))
print(colored(f"Downloaded {video}", "green"))
print(colored("All models and videos downloaded.", "green"))
print(colored(f"Going to test: {len(to_test_models)} models"))
for video in videos:
for model_number, model_name in enumerate(to_test_models):
if torch.cuda.is_available() and model_name[-3:] == ".pt":
model = YOLO(path.join(models_path, model_name))
model.to("cuda")
torch.cuda.set_device(0)
print(model_name, "cuda")
if not model_name.endswith(".pt"): # Base models don't have args
args = parse_model_name(model_name, models_path)
else: args = ()
args = list(args)
args.append("cuda")
args = tuple(args)
res = bench_model(model, path.join(videos_path, video[0]), args)
print(colored(f"Model test results: \n{res}", "green"))
attempts = 0
with open("backup.txt", "a") as fd:
fd.write(json.dumps({model.ckpt_path: res}) + "\n")
while attempts < 5:
try:
send_json(sock, {"type": "send_stats", "save_name": f"{system_name}.csv", "results": {model.ckpt_path: res}})
break
except:
print(colored(f"Can't send data", "red"))
attempts += 1
time.sleep(1)
# Clean system after inference
del model
torch.cuda.empty_cache()
gc.collect()
model = YOLO(path.join(models_path, model_name))
print(model_name, "non cuda")
if not model_name.endswith(".pt"): # Base models don't have args
args = parse_model_name(model_name, models_path)
else:
args = ()
res = bench_model(model, path.join(videos_path, video[0]), args)
print(colored(f"Model test results: \n{res}", "green"))
attempts = 0
with open("backup.txt", "a") as fd:
fd.write(json.dumps({model.ckpt_path: res}) + "\n")
while attempts < 5:
try:
send_json(sock, {"type": "send_stats", "save_name": f"{system_name}.csv",
"results": {model.ckpt_path: res}})
break
except:
print(colored(f"Can't send data", "red"))
attempts += 1
time.sleep(1)
print(f"Tested: {colored(str(model_number + 1), 'red')}/{colored(str(len(to_test_models)), 'green')}")
# Clean system after inference
del model
torch.cuda.empty_cache()
gc.collect()
sock.close()