diff --git a/requirements-dev-ext.txt b/requirements-dev-ext.txt index 45f1e066..47bb1312 100644 --- a/requirements-dev-ext.txt +++ b/requirements-dev-ext.txt @@ -1,2 +1,3 @@ polib==1.2.0 -pyinstaller==6.1.0 \ No newline at end of file +pyinstaller==6.1.0 +pip-tools==7.4.1 \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 1913902d..98f3e3dd 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -9,4 +9,5 @@ pydantic==1.10.14 scipy==1.13.0 numpy==1.26.4 shapely==2.0.4 -pyclipper==1.3.0.post5 \ No newline at end of file +pyclipper==1.3.0.post5 +pytz==2024.1 \ No newline at end of file diff --git a/requirements-prod.txt b/requirements-prod.txt index 7355628d..35b5e1e9 100644 --- a/requirements-prod.txt +++ b/requirements-prod.txt @@ -4,7 +4,6 @@ # # pip-compile --annotation-style=line --index-url=https://pypi.tuna.tsinghua.edu.cn/simple --output-file=requirements-prod.txt requirements-dev.txt # ---index-url https://pypi.tuna.tsinghua.edu.cn/simple anyio==4.4.0 # via httpx, starlette, watchfiles arrow==1.3.0 # via cookiecutter @@ -61,6 +60,7 @@ python-dotenv==1.0.1 # via uvicorn python-multipart==0.0.9 # via fastapi python-slugify==8.0.4 # via cookiecutter pytweening==1.2.0 # via pyautogui +pytz==2024.1 # via -r requirements-dev.txt pyyaml==6.0.1 # via cookiecutter, uvicorn qrcode==7.4.2 # via flet repath==0.9.0 # via flet-core diff --git a/src/sr/performance_recorder.py b/src/sr/performance_recorder.py index 95fc9b7a..ffa8abd4 100644 --- a/src/sr/performance_recorder.py +++ b/src/sr/performance_recorder.py @@ -1,7 +1,6 @@ import time import os import yaml -import psutil from basic import os_utils from basic.log_utils import log @@ -92,11 +91,6 @@ def save_performance_record(): """ path = os.path.join(os_utils.get_path_under_work_dir('.log'), 'performance.txt') data = {} - data['cpu_frequency'] = f"{psutil.cpu_freq().current}MHz" - data['cpu_count'] = psutil.cpu_count() - memory_info = psutil.virtual_memory() - data['memory_total'] = f"{memory_info.total / (1024.0 ** 3)} GB" - data['memory_used'] = f"{memory_info.used / (1024.0 ** 3)} GB" for k, v in recorder.record_map.items(): data['time_%s' % k] = v.avg diff --git a/src/sryolo/detector.py b/src/sryolo/detector.py index 3f782bba..eb045224 100644 --- a/src/sryolo/detector.py +++ b/src/sryolo/detector.py @@ -1,14 +1,15 @@ -import os import time -import urllib.request -import zipfile -from typing import Optional, List, Tuple +import csv import cv2 import numpy as np import onnxruntime as ort -import pandas as pd +import os +import urllib.request +import zipfile from cv2.typing import MatLike +from typing import Optional, List, Tuple + from basic.log_utils import log @@ -191,14 +192,16 @@ def load_detect_classes(self, model_dir_path: str): :return: """ csv_path = os.path.join(model_dir_path, 'labels.csv') - labels_df = pd.read_csv(csv_path, encoding='utf-8') - self.idx_2_class = {} - self.class_2_idx = {} - self.cate_2_idx = {} - for _, row in labels_df.iterrows(): - self.idx_2_class[row['idx']] = DetectClass(row['idx'], row['label'], row['cate']) - self.class_2_idx[row['label']] = row['idx'] - self.cate_2_idx[row['cate']] = row['idx'] + with open(csv_path, mode='r', encoding='utf-8') as file: + csv_reader = csv.reader(file) + for row in csv_reader: + if row[0] != 'idx': + idx = int(row[0]) + label = row[1] + cate = row[2] + self.idx_2_class[idx] = DetectClass(idx, label, cate) + self.class_2_idx[label] =idx + self.cate_2_idx[cate] =idx def detect(self, image: MatLike, conf: float = 0.7,