From c083d2ef98c7bfcba08ae1b68b7d1178671d664d Mon Sep 17 00:00:00 2001 From: Adrien Date: Thu, 16 Nov 2023 21:24:34 +0100 Subject: [PATCH] Format, lint, check, test (#296) * same test prodedure (1 line) for CI and devs * added project for quick dev onboarding * added packaging with pyproject.toml * added and configured black * added and configured pyright * added a few type hints * applied formatting --- .github/workflows/ci.yml | 57 +- .gitignore | 3 + Dockerfile-test | 5 + comptages/__init__.py | 11 +- comptages/chart/chart_dialog.py | 285 ++++--- comptages/comptages.py | 422 ++++++----- comptages/config/config_creator.py | 49 +- comptages/core/bulk_create_manager.py | 4 +- comptages/core/definitions.py | 276 +++---- comptages/core/delete_dialog.py | 2 +- comptages/core/filter_dialog.py | 25 +- comptages/core/importer.py | 371 ++++----- comptages/core/importer_task.py | 30 +- comptages/core/layers.py | 705 ++++++++++-------- comptages/core/report.py | 556 ++++++-------- comptages/core/report_task.py | 33 +- comptages/core/settings.py | 54 +- comptages/core/statistics.py | 332 +++++---- comptages/core/utils.py | 26 +- comptages/core/yearly_report_dialog.py | 2 +- comptages/datamodel/apps.py | 11 +- .../management/commands/importdata.py | 46 +- .../datamodel/management/commands/tjmreset.py | 10 +- .../datamodel/migrations/0001_initial.py | 1 - .../datamodel/migrations/0002_search_path.py | 12 +- .../migrations/0003_auto_20210820_0626.py | 7 +- .../migrations/0004_delete_basetjmok.py | 5 +- .../migrations/0005_auto_20210820_0843.py | 11 +- .../migrations/0006_auto_20210820_1115.py | 19 +- .../migrations/0007_alter_classcategory_id.py | 11 +- .../migrations/0008_auto_20210820_1143.py | 51 +- .../migrations/0009_auto_20210820_1143.py | 27 +- comptages/datamodel/migrations/0010_tjm.py | 29 +- .../datamodel/migrations/0011_tjm_week_day.py | 7 +- .../migrations/0012_remove_tjm_day.py | 7 +- .../migrations/0013_auto_20211001_0643.py | 11 +- .../datamodel/migrations/0014_count_tjm.py | 7 +- comptages/datamodel/migrations/0015_sector.py | 16 +- .../migrations/0016_alter_sector_geometry.py | 11 +- .../0017_alter_countdetail_id_count.py | 13 +- .../0018_alter_countdetail_id_category.py | 14 +- .../migrations/0019_alter_tjm_count.py | 13 +- .../datamodel/migrations/0020_delete_tjm.py | 5 +- .../migrations/0021_alter_count_tjm.py | 7 +- .../migrations/0022_auto_20211217_0624.py | 105 ++- .../migrations/0023_installation_alias.py | 7 +- .../migrations/0024_category_trash.py | 7 +- .../migrations/0025_auto_20220204_1353.py | 17 +- .../migrations/0026_alter_modelclass_id.py | 11 +- .../datamodel/migrations/0027_default_uuid.py | 23 +- .../datamodel/migrations/0028_municipality.py | 16 +- comptages/datamodel/models.py | 170 +++-- comptages/ics/ics_importer.py | 40 +- comptages/plan/plan_creator.py | 136 ++-- comptages/qgissettingmanager | 2 +- comptages/report/yearly_report_bike.py | 317 ++++---- comptages/test/test_import.py | 31 +- comptages/test/test_report.py | 9 +- comptages/test/test_statistics.py | 201 ++--- comptages/test/utils.py | 4 +- comptages/ui/resources.py | 13 +- docker-compose.yml | 18 +- manage.py | 4 +- pyproject.toml | 42 ++ requirements.txt | 2 +- test_data/dev.qgz | Bin 0 -> 33791 bytes 66 files changed, 2578 insertions(+), 2196 deletions(-) create mode 100644 Dockerfile-test create mode 100644 pyproject.toml create mode 100644 test_data/dev.qgz diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index db12ed82..a8c9dd18 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,39 +1,44 @@ -name: Docker +name: Lint, Check, Build, Test + +concurrency: + group: ${{ github.ref }} + cancel-in-progress: true on: push: - branches: - - '*' - - pull_request: branches: - master + pull_request: jobs: - test: - name: Test - runs-on: ubuntu-20.04 - + build_package_lint_check: + name: Build, package, lint, check + runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v2 - - - name: Install requirements + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + - name: Setup, build, install run: | - pip3 install -r requirements.txt + pip install build + python -m build . + pip install ./dist/comptages-0.1-py3-none-any.whl[check] + - name: Package + run: qgis-plugin-ci package 'test' + - name: Lint + run: black . --check - - name: Install QGIS - run: | - sudo apt update && sudo apt install -y qgis + # - name: Check + # run: pyright . + test: + name: Test + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 - name: Start docker stack - run: docker-compose up --build -d - - - name: Wait a little bit - run: sleep 5 - - - name: Running migrations - run: python manage.py migrate - + run: docker compose up db -d - name: Running tests - run: | - python manage.py test comptages.test -v2 + run: docker compose run qgis_tester diff --git a/.gitignore b/.gitignore index 3ff38d28..58153e7b 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,6 @@ comptages/__pycache__/ .docker/tests/ .docker/tests/screenshot.png .env +.vscode/settings.json +shell.nix +Pipfile \ No newline at end of file diff --git a/Dockerfile-test b/Dockerfile-test new file mode 100644 index 00000000..b8a5039c --- /dev/null +++ b/Dockerfile-test @@ -0,0 +1,5 @@ +FROM opengisch/qgis:3.28.0-jammy +RUN apt update && apt install iputils-ping glibc-tools libpq-dev python3-pip -y +COPY requirements.txt . +RUN pip3 install -r requirements.txt +WORKDIR /OpenComptage \ No newline at end of file diff --git a/comptages/__init__.py b/comptages/__init__.py index 3f769702..ca5dcd5e 100644 --- a/comptages/__init__.py +++ b/comptages/__init__.py @@ -15,8 +15,8 @@ def prepare_django(default_db=None, **additional_settings): # (i.e. the command is lauched from the QGIS python console), we # use the one in the plugin settings if not default_db: - from comptages.core.settings import Settings as PluginSettings + plugin_settings = PluginSettings() default_db = { "ENGINE": "django.contrib.gis.db.backends.postgis", @@ -40,12 +40,12 @@ def prepare_django(default_db=None, **additional_settings): additional_settings["SPATIALITE_LIBRARY_PATH"] = SPATIALITE_LIBRARY_PATH_ENV django_settings.configure( - BASE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir), + BASE_DIR=os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir), DATABASES={"default": default_db}, - INSTALLED_APPS=('comptages.datamodel.apps.ComptagesConfig',), + INSTALLED_APPS=("comptages.datamodel.apps.ComptagesConfig",), USE_TZ=True, - TIME_ZONE='Europe/Zurich', - SECRET_KEY='09n+dhzh+02+_#$!1+8h-&(s-wbda#0*2mrv@lx*y#&fzlv&l)', + TIME_ZONE="Europe/Zurich", + SECRET_KEY="09n+dhzh+02+_#$!1+8h-&(s-wbda#0*2mrv@lx*y#&fzlv&l)", **additional_settings ) django.setup() @@ -59,4 +59,5 @@ def classFactory(iface): """ prepare_django() from .comptages import Comptages + return Comptages(iface) diff --git a/comptages/chart/chart_dialog.py b/comptages/chart/chart_dialog.py index af7e7c3d..f3b6b7c0 100644 --- a/comptages/chart/chart_dialog.py +++ b/comptages/chart/chart_dialog.py @@ -8,16 +8,15 @@ from qgis.PyQt.QtWidgets import QDockWidget, QListWidgetItem, QTabWidget from qgis.core import QgsMessageLog, Qgis -from comptages.core.utils import get_ui_class, push_warning, push_info +from comptages.core.utils import get_ui_class, push_info from comptages.ui.resources import * from comptages.core import statistics, definitions from comptages.datamodel import models -FORM_CLASS = get_ui_class('chart_dock.ui') +FORM_CLASS = get_ui_class("chart_dock.ui") class ChartDock(QDockWidget, FORM_CLASS): - def __init__(self, iface, layers, parent=None): QDockWidget.__init__(self, parent) self.setupUi(self) @@ -25,19 +24,29 @@ def __init__(self, iface, layers, parent=None): self.count = None self.sensor = None - def set_attributes(self, count): + def set_attributes(self, count: models.Count): self.count = count - self.setWindowTitle("Comptage: {}, installation: {}".format(count.id, count.id_installation.name)) + self.setWindowTitle( + "Comptage: {}, installation: {}".format( + count.id, count.id_installation.name + ) + ) # Exit and show message if there are no data to show if not models.CountDetail.objects.filter(id_count=count).exists(): self.hide() - push_info("Installation {}: Il n'y a pas de données à montrer pour " - "le comptage {}".format(count.id_installation.name, count.id)) + push_info( + "Installation {}: Il n'y a pas de données à montrer pour " + "le comptage {}".format(count.id_installation.name, count.id) + ) QgsMessageLog.logMessage( - '{} - Generate chart action : No data for count {}'.format(datetime.now(), count.id), - 'Comptages', Qgis.Info) + "{} - Generate chart action : No data for count {}".format( + datetime.now(), count.id + ), + "Comptages", + Qgis.Info, + ) return self.show() @@ -49,12 +58,12 @@ def set_attributes(self, count): id_count=self.count, ) - self.startDate.setDateTime(qs.earliest('timestamp').timestamp) - self.endDate.setDateTime(qs.latest('timestamp').timestamp) + self.startDate.setDateTime(qs.earliest("timestamp").timestamp) + self.endDate.setDateTime(qs.latest("timestamp").timestamp) self._create_tabs(count) - def _create_tabs(self, count): + def _create_tabs(self, count: models.Count): try: self.tabWidget.currentChanged.disconnect(self.current_tab_changed) except Exception: @@ -64,17 +73,20 @@ def _create_tabs(self, count): self.tabWidget.currentChanged.connect(self.current_tab_changed) # We do by section and not by count because of special cases. - sections = models.Section.objects.filter(lane__id_installation__count=count).distinct() + sections = models.Section.objects.filter( + lane__id_installation__count=count + ).distinct() for section in sections: tab = ChartTab() self.tabWidget.addTab(tab, section.id) self._populate_tab(tab, section, count) - def _populate_tab(self, tab, section, count): - + def _populate_tab(self, tab, section: models.Section, count: models.Count): # Check if there is data to be validated approval_process = False - if models.CountDetail.objects.filter(id_count=count, import_status=definitions.IMPORT_STATUS_QUARANTINE).exists(): + if models.CountDetail.objects.filter( + id_count=count, import_status=definitions.IMPORT_STATUS_QUARANTINE + ).exists(): approval_process = True # Remove previous items try: @@ -82,7 +94,10 @@ def _populate_tab(self, tab, section, count): except Exception: pass QgsMessageLog.logMessage( - '{} - Debug_GL: Graphique démarré'.format(datetime.now()), 'Comptages', Qgis.Info) + "{} - Debug_GL: Graphique démarré".format(datetime.now()), + "Comptages", + Qgis.Info, + ) for i in range(tab.chartList.count()): tab.chartList.takeItem(0) @@ -97,19 +112,21 @@ def _populate_tab(self, tab, section, count): tab.buttonValidate.hide() tab.buttonRefuse.hide() - sensor_type = count.id_sensor_type + sensor_type: models.SensorType = count.id_sensor_type lanes = models.Lane.objects.filter(id_section=section) - directions = lanes.values('direction').distinct().values_list('direction', flat=True) + directions = ( + lanes.values("direction").distinct().values_list("direction", flat=True) + ) start = self.startDate.date().toPyDate() end = self.endDate.date().toPyDate() + timedelta(days=1) - if(sensor_type.name == 'Boucle'): + if sensor_type.name == "Boucle": # By lane for i, lane in enumerate(lanes): tab.chartList.addItem( - QListWidgetItem('Par heure, voie {}'.format( - lane.number))) + QListWidgetItem("Par heure, voie {}".format(lane.number)) + ) tab.charts.append( ChartTime( count=count, @@ -117,14 +134,15 @@ def _populate_tab(self, tab, section, count): lane=lane, start=start, end=end, - ).get_div()) + ).get_div() + ) else: # By direction for i, direction in enumerate(directions): tab.chartList.addItem( - QListWidgetItem('Par heure, direction {}'.format( - direction))) + QListWidgetItem("Par heure, direction {}".format(direction)) + ) tab.charts.append( ChartTime( count=count, @@ -132,32 +150,35 @@ def _populate_tab(self, tab, section, count): direction=direction, start=start, end=end, - ).get_div()) + ).get_div() + ) - tab.chartList.addItem(QListWidgetItem('Par catégorie')) + tab.chartList.addItem(QListWidgetItem("Par catégorie")) tab.charts.append( ChartCat( count=count, section=section, start=start, end=end, - ).get_div()) + ).get_div() + ) - tab.chartList.addItem(QListWidgetItem('Par vitesse')) + tab.chartList.addItem(QListWidgetItem("Par vitesse")) tab.charts.append( ChartSpeed( count=count, section=section, start=start, end=end, - ).get_div()) + ).get_div() + ) - if(sensor_type.name == 'Boucle'): + if sensor_type.name == "Boucle": # By lane for i, lane in enumerate(lanes): tab.chartList.addItem( - QListWidgetItem('Par TJM, voie {}'.format( - lane.number))) + QListWidgetItem("Par TJM, voie {}".format(lane.number)) + ) tab.charts.append( ChartTjm( count=count, @@ -165,14 +186,15 @@ def _populate_tab(self, tab, section, count): lane=lane, start=start, end=end, - ).get_div()) + ).get_div() + ) else: # By direction for i, direction in enumerate(directions): tab.chartList.addItem( - QListWidgetItem('Par TJM, direction {}'.format( - direction))) + QListWidgetItem("Par TJM, direction {}".format(direction)) + ) tab.charts.append( ChartTjm( count=count, @@ -180,17 +202,18 @@ def _populate_tab(self, tab, section, count): direction=direction, start=start, end=end, - ).get_div()) + ).get_div() + ) - tab.chartList.addItem( - QListWidgetItem('Par TJM total')) + tab.chartList.addItem(QListWidgetItem("Par TJM total")) tab.charts.append( ChartTjm( count=count, section=section, start=start, end=end, - ).get_div()) + ).get_div() + ) self.layers.select_and_zoom_on_section_of_count(count.id) if tab.chartList.currentRow() == 0: @@ -199,18 +222,22 @@ def _populate_tab(self, tab, section, count): tab.chartList.setCurrentRow(0) QgsMessageLog.logMessage( - '{} - Debug_GL: Graphique terminé'.format(datetime.now()), 'Comptages', Qgis.Info) + "{} - Debug_GL: Graphique terminé".format(datetime.now()), + "Comptages", + Qgis.Info, + ) def chart_selection_changed(self, row): tab = self.tabWidget.currentWidget() tab.webView.setHtml(tab.charts[row]) - def current_tab_changed(self, index): + def current_tab_changed(self, index: int): tab = self.tabWidget.currentWidget() if tab.chartList.currentRow() == 0: self.chart_selection_changed(0) def set_dates(self): + assert self.count self._create_tabs(self.count) qs = models.CountDetail.objects.filter( @@ -218,13 +245,13 @@ def set_dates(self): ) start = self.startDate.date().toPyDate() - if not start == qs.earliest('timestamp').timestamp.date(): + if not start == qs.earliest("timestamp").timestamp.date(): self.startDate.setStyleSheet("background-color:orange;") else: self.startDate.setStyleSheet("background-color:white;") end = self.endDate.date().toPyDate() - if not end == qs.latest('timestamp').timestamp.date(): + if not end == qs.latest("timestamp").timestamp.date(): self.endDate.setStyleSheet("background-color:orange;") else: self.endDate.setStyleSheet("background-color:white;") @@ -234,18 +261,19 @@ def reset_dates(self): id_count=self.count, ) - self.startDate.setDateTime(qs.earliest('timestamp').timestamp) - self.endDate.setDateTime(qs.latest('timestamp').timestamp) + self.startDate.setDateTime(qs.earliest("timestamp").timestamp) + self.endDate.setDateTime(qs.latest("timestamp").timestamp) self.startDate.setStyleSheet("background-color:white;") self.endDate.setStyleSheet("background-color:white") + assert self.count self._create_tabs(self.count) def validate_count(self, section): QgsMessageLog.logMessage( - '{} - Accept data started'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Accept data started".format(datetime.now()), "Comptages", Qgis.Info + ) tab = self.tabWidget.currentWidget() @@ -253,6 +281,7 @@ def validate_count(self, section): # start = self.startDate.date().toPyDate() # end = self.endDate.date().toPyDate() + timedelta(days=1) + assert self.count start = self.count.start_process_date end = self.count.end_process_date + timedelta(days=1) @@ -263,8 +292,7 @@ def validate_count(self, section): timestamp__lt=end, ) - qs.update( - import_status=definitions.IMPORT_STATUS_DEFINITIVE) + qs.update(import_status=definitions.IMPORT_STATUS_DEFINITIVE) # Delete not imported data of the count models.CountDetail.objects.filter( @@ -278,21 +306,22 @@ def validate_count(self, section): self.count, section=None, status=definitions.IMPORT_STATUS_DEFINITIVE, - exclude_trash=True) + exclude_trash=True, + ) self.count.tjm = tjm - self.count.save(update_fields=['tjm']) + self.count.save(update_fields=["tjm"]) self.show_next_quarantined_chart() QgsMessageLog.logMessage( - '{} - Accept data ended'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Accept data ended".format(datetime.now()), "Comptages", Qgis.Info + ) def refuse_count(self, section): QgsMessageLog.logMessage( - '{} - Reject data started'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Reject data started".format(datetime.now()), "Comptages", Qgis.Info + ) tab = self.tabWidget.currentWidget() @@ -310,13 +339,15 @@ def refuse_count(self, section): self.show_next_quarantined_chart() QgsMessageLog.logMessage( - '{} - Reject data ended'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Reject data ended".format(datetime.now()), "Comptages", Qgis.Info + ) def show_next_quarantined_chart(self): QgsMessageLog.logMessage( - '{} - Generate validation chart started'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate validation chart started".format(datetime.now()), + "Comptages", + Qgis.Info, + ) quarantined_counts = models.Count.objects.filter( countdetail__import_status=definitions.IMPORT_STATUS_QUARANTINE @@ -325,29 +356,35 @@ def show_next_quarantined_chart(self): self.hide() push_info("Il n'y a pas de données à valider") QgsMessageLog.logMessage( - '{} - Generate validation chart ended : No data to validate'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate validation chart ended : No data to validate".format( + datetime.now() + ), + "Comptages", + Qgis.Info, + ) return self.set_attributes(quarantined_counts[0]) self.show() QgsMessageLog.logMessage( - '{} - Generate validation chart ended'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate validation chart ended".format(datetime.now()), + "Comptages", + Qgis.Info, + ) -TAB_CLASS = get_ui_class('chart_tab.ui') +TAB_CLASS = get_ui_class("chart_tab.ui") -class ChartTab(QTabWidget, TAB_CLASS): +class ChartTab(QTabWidget, TAB_CLASS): def __init__(self, parent=None): QTabWidget.__init__(self, parent) self.setupUi(self) self.charts = [] -class Chart(): +class Chart: def __init__(self, count, section, lane=None, direction=None, start=None, end=None): self.count = count self.section = section @@ -359,9 +396,9 @@ def __init__(self, count, section, lane=None, direction=None, start=None, end=No def get_div(self): pass + class ChartTjm(Chart): def get_div(self): - df, mean = statistics.get_day_data( self.count, self.section, @@ -369,27 +406,27 @@ def get_div(self): self.direction, start=self.start, end=self.end, - exclude_trash=True + exclude_trash=True, ) if df.empty: return - labels = {'tj': 'Véhicules', 'date': 'Jour', 'import_status': 'État'} + labels = {"tj": "Véhicules", "date": "Jour", "import_status": "État"} fig = px.bar( df, - x='date', - y='tj', + x="date", + y="tj", title="Véhicules par jour", labels=labels, - color='import_status', + color="import_status", ) fig.update_layout( - xaxis = dict( - tickmode = 'auto', - tickangle = -45, + xaxis=dict( + tickmode="auto", + tickangle=-45, ) ) @@ -400,13 +437,11 @@ def get_div(self): line_color="red", annotation_text=int(mean), ) - return plotly.offline.plot(fig, output_type='div') + return plotly.offline.plot(fig, output_type="div") class ChartTime(Chart): - def get_div(self): - df = statistics.get_time_data( self.count, self.section, @@ -419,39 +454,43 @@ def get_div(self): if df.empty: return - title = 'Véhicules par heure' + title = "Véhicules par heure" if self.lane is not None: - title = 'Véhicules par heure, voie {}'.format(self.lane.number) + title = "Véhicules par heure, voie {}".format(self.lane.number) elif self.direction is not None: - title = 'Véhicules par heure, direction {}'.format(self.direction) + title = "Véhicules par heure, direction {}".format(self.direction) - labels = {'thm': 'Véhicules', 'date': 'Jour', 'hour': 'Heure', 'import_status': 'État'} + labels = { + "thm": "Véhicules", + "date": "Jour", + "hour": "Heure", + "import_status": "État", + } fig = px.line( df, - x='hour', - y='thm', - color='date', - render_mode='svg', + x="hour", + y="thm", + color="date", + render_mode="svg", labels=labels, - line_dash='import_status', - title=title) + line_dash="import_status", + title=title, + ) fig.update_layout( - xaxis = dict( - tickmode = 'array', - tickvals = [x for x in range(24)], - ticktext = [f"{x}h-{x+1}h" for x in range(24)], - tickangle = -45, + xaxis=dict( + tickmode="array", + tickvals=[x for x in range(24)], + ticktext=[f"{x}h-{x+1}h" for x in range(24)], + tickangle=-45, ) ) - return plotly.offline.plot(fig, output_type='div') + return plotly.offline.plot(fig, output_type="div") class ChartCat(Chart): - def get_div(self): - df_existing = statistics.get_category_data( self.count, self.section, @@ -480,42 +519,46 @@ def get_div(self): specs = [[]] for i in range(num_of_charts): - specs[0].append({'type': 'domain'}) + specs[0].append({"type": "domain"}) fig = make_subplots(rows=1, cols=num_of_charts, specs=specs) if not df_existing.empty: fig.add_trace( go.Pie( - values = df_existing['value'], - labels = df_existing['cat_name_code'], - textposition='inside', - textinfo='label+percent', + values=df_existing["value"], + labels=df_existing["cat_name_code"], + textposition="inside", + textinfo="label+percent", title="Existant", - name="Existant"), - 1, 1) + name="Existant", + ), + 1, + 1, + ) if not df_new.empty: fig.add_trace( go.Pie( - values= df_new['value'], - labels = df_new['cat_name_code'], - textposition='inside', - textinfo='label+percent', - title='Nouveau', - name="Nouveau"), - 1, num_of_charts) + values=df_new["value"], + labels=df_new["cat_name_code"], + textposition="inside", + textinfo="label+percent", + title="Nouveau", + name="Nouveau", + ), + 1, + num_of_charts, + ) fig.update_traces(hoverinfo="label+percent+name+value") fig.update_layout(title_text="Véhicules groupés par catégorie") - return plotly.offline.plot(fig, output_type='div') + return plotly.offline.plot(fig, output_type="div") class ChartSpeed(Chart): - def get_div(self): - df = statistics.get_speed_data( self.count, self.section, @@ -526,17 +569,17 @@ def get_div(self): if df.empty: return - labels = {'times': 'Véhicules', 'bins': 'Vitesse', 'import_status': 'État'} + labels = {"times": "Véhicules", "bins": "Vitesse", "import_status": "État"} fig = px.bar( df, - x='speed', - y='times', + x="speed", + y="times", title="Véhicules groupés par vitesse", - text='times', + text="times", labels=labels, - barmode='group', - color='import_status', + barmode="group", + color="import_status", ) - return plotly.offline.plot(fig, output_type='div') + return plotly.offline.plot(fig, output_type="div") diff --git a/comptages/comptages.py b/comptages/comptages.py index ca8114cd..06fc2e47 100644 --- a/comptages/comptages.py +++ b/comptages/comptages.py @@ -7,8 +7,12 @@ from qgis.PyQt.QtWidgets import QAction, QFileDialog, QMessageBox from qgis.PyQt.QtCore import QObject, Qt, QDateTime from qgis.core import ( - QgsMessageLog, Qgis, QgsApplication, QgsExpressionContextUtils, - QgsProject) + QgsMessageLog, + Qgis, + QgsApplication, + QgsExpressionContextUtils, + QgsProject, +) from qgis.utils import qgsfunction, plugins from comptages.core.settings import Settings, SettingsDialog @@ -29,7 +33,6 @@ class Comptages(QObject): - def __init__(self, iface): QObject.__init__(self) @@ -50,85 +53,68 @@ def __init__(self, iface): def initGui(self): self.connect_db_action = QAction( - QIcon(':/plugins/Comptages/images/power.png'), - 'Connection DB', - self.iface.mainWindow() + QIcon(":/plugins/Comptages/images/power.png"), + "Connection DB", + self.iface.mainWindow(), ) self.create_new_action = QAction( - QIcon(':/plugins/Comptages/images/measure.png'), - 'Créer un nouveau comptage', - None + QIcon(":/plugins/Comptages/images/measure.png"), + "Créer un nouveau comptage", + None, ) self.select_edit_action = QAction( - QIcon(':/plugins/Comptages/images/select_edit.png'), - 'Modifier comptage', - None + QIcon(":/plugins/Comptages/images/select_edit.png"), + "Modifier comptage", + None, ) self.import_files_action = QAction( - QIcon(':/plugins/Comptages/images/import.png'), - 'Importation', - None + QIcon(":/plugins/Comptages/images/import.png"), "Importation", None ) self.validate_imported_files = QAction( - QIcon(':/plugins/Comptages/images/validate.png'), - 'Validation', - None + QIcon(":/plugins/Comptages/images/validate.png"), "Validation", None ) self.filter_action = QAction( - QIcon(':/plugins/Comptages/images/filter.png'), - 'Filtrer', - None + QIcon(":/plugins/Comptages/images/filter.png"), "Filtrer", None ) self.yearly_report_action = QAction( - QIcon(':/plugins/Comptages/images/filled_file.png'), - 'Rapport annuel', - None + QIcon(":/plugins/Comptages/images/filled_file.png"), "Rapport annuel", None ) self.import_ics_action = QAction( - QIcon(':/plugins/Comptages/images/calendar.png'), - 'Importer fichier ics', - None + QIcon(":/plugins/Comptages/images/calendar.png"), + "Importer fichier ics", + None, ) self.settings_action = QAction( - QIcon(':/plugins/Comptages/images/settings.png'), - 'Réglages', - None + QIcon(":/plugins/Comptages/images/settings.png"), "Réglages", None ) - self.connect_db_action.triggered.connect( - self.do_connect_db_action) + self.connect_db_action.triggered.connect(self.do_connect_db_action) - self.create_new_action.triggered.connect( - self.do_create_new_action) + self.create_new_action.triggered.connect(self.do_create_new_action) - self.select_edit_action.triggered.connect( - self.do_select_edit_action) + self.select_edit_action.triggered.connect(self.do_select_edit_action) - self.import_files_action.triggered.connect( - self.do_import_files_action) + self.import_files_action.triggered.connect(self.do_import_files_action) self.validate_imported_files.triggered.connect( - self.do_validate_imported_files_action) + self.do_validate_imported_files_action + ) - self.filter_action.triggered.connect( - self.do_filter_action) + self.filter_action.triggered.connect(self.do_filter_action) - self.yearly_report_action.triggered.connect( - self.do_yearly_report_action) + self.yearly_report_action.triggered.connect(self.do_yearly_report_action) - self.import_ics_action.triggered.connect( - self.do_import_ics_action) + self.import_ics_action.triggered.connect(self.do_import_ics_action) - self.settings_action.triggered.connect( - self.do_settings_action) + self.settings_action.triggered.connect(self.do_settings_action) self.create_new_action.setEnabled(False) self.select_edit_action.setEnabled(False) @@ -138,19 +124,19 @@ def initGui(self): self.yearly_report_action.setEnabled(False) self.import_ics_action.setEnabled(False) - self.iface.addPluginToMenu('Comptages', self.connect_db_action) - self.iface.addPluginToMenu('Comptages', self.create_new_action) - self.iface.addPluginToMenu('Comptages', self.select_edit_action) - self.iface.addPluginToMenu('Comptages', self.import_files_action) - self.iface.addPluginToMenu('Comptages', self.validate_imported_files) - self.iface.addPluginToMenu('Comptages', self.filter_action) - self.iface.addPluginToMenu('Comptages', self.yearly_report_action) - self.iface.addPluginToMenu('Comptages', self.import_ics_action) - self.iface.addPluginToMenu('Comptages', self.settings_action) + self.iface.addPluginToMenu("Comptages", self.connect_db_action) + self.iface.addPluginToMenu("Comptages", self.create_new_action) + self.iface.addPluginToMenu("Comptages", self.select_edit_action) + self.iface.addPluginToMenu("Comptages", self.import_files_action) + self.iface.addPluginToMenu("Comptages", self.validate_imported_files) + self.iface.addPluginToMenu("Comptages", self.filter_action) + self.iface.addPluginToMenu("Comptages", self.yearly_report_action) + self.iface.addPluginToMenu("Comptages", self.import_ics_action) + self.iface.addPluginToMenu("Comptages", self.settings_action) - self.toolbar = self.iface.addToolBar('Comptages') - self.toolbar.setObjectName('Comptages') - self.toolbar.setToolTip('Comptages toolbar') + self.toolbar = self.iface.addToolBar("Comptages") + self.toolbar.setObjectName("Comptages") + self.toolbar.setToolTip("Comptages toolbar") self.toolbar.addAction(self.connect_db_action) self.toolbar.addSeparator() @@ -165,13 +151,13 @@ def initGui(self): self.toolbar.addAction(self.settings_action) def unload(self): - self.iface.removePluginMenu('Comptages', self.connect_db_action) - self.iface.removePluginMenu('Comptages', self.create_new_action) - self.iface.removePluginMenu('Comptages', self.select_edit_action) - self.iface.removePluginMenu('Comptages', self.filter_action) - self.iface.removePluginMenu('Comptages', self.yearly_report_action) - self.iface.removePluginMenu('Comptages', self.import_ics_action) - self.iface.removePluginMenu('Comptages', self.settings_action) + self.iface.removePluginMenu("Comptages", self.connect_db_action) + self.iface.removePluginMenu("Comptages", self.create_new_action) + self.iface.removePluginMenu("Comptages", self.select_edit_action) + self.iface.removePluginMenu("Comptages", self.filter_action) + self.iface.removePluginMenu("Comptages", self.yearly_report_action) + self.iface.removePluginMenu("Comptages", self.import_ics_action) + self.iface.removePluginMenu("Comptages", self.settings_action) del self.connect_db_action del self.create_new_action @@ -189,31 +175,34 @@ def do_connect_db_action(self): def do_create_new_action(self): if self.tm.countActiveTasks() > 0: - push_info(("Veuillez patienter jusqu'à ce que l'importation " - "soit terminée.")) + push_info( + ("Veuillez patienter jusqu'à ce que l'importation " "soit terminée.") + ) return self.layers.create_count() def do_select_edit_action(self): if self.tm.countActiveTasks() > 0: - push_info(("Veuillez patienter jusqu'à ce que l'importation " - "soit terminée.")) + push_info( + ("Veuillez patienter jusqu'à ce que l'importation " "soit terminée.") + ) return self.layers.edit_count() def do_import_files_action(self): if self.tm.countActiveTasks() > 0: - push_info(("Veuillez patienter jusqu'à ce que l'importation " - "soit terminée.")) + push_info( + ("Veuillez patienter jusqu'à ce que l'importation " "soit terminée.") + ) return file_dialog = QFileDialog() - title = 'Importer' - path = self.settings.value('data_import_directory') + title = "Importer" + path = self.settings.value("data_import_directory") files = QFileDialog.getOpenFileNames( - file_dialog, title, path, - "Data file (*.A?? *.aV? *.I?? *.V?? *.txt)")[0] + file_dialog, title, path, "Data file (*.A?? *.aV? *.I?? *.V?? *.txt)" + )[0] - self.tm.allTasksFinished.connect(partial(self.all_tasks_finished, 'import')) + self.tm.allTasksFinished.connect(partial(self.all_tasks_finished, "import")) tasks = [] for file_path in files: @@ -224,19 +213,21 @@ def do_import_files_action(self): def import_file(self, file_path, count_id=None): QgsMessageLog.logMessage( - '{} - Prepare import file {} started'.format( - datetime.now(), os.path.basename(file_path)), - 'Comptages', Qgis.Info) + "{} - Prepare import file {} started".format( + datetime.now(), os.path.basename(file_path) + ), + "Comptages", + Qgis.Info, + ) # Manage binary files - with open(file_path, 'rb') as fd: + with open(file_path, "rb") as fd: file_head = fd.read(24) - if file_head == b'Golden River Traffic Ltd': # is a binary file - formatter = self.layers.get_formatter_name('GoldenRiver') + if file_head == b"Golden River Traffic Ltd": # is a binary file + formatter = self.layers.get_formatter_name("GoldenRiver") file_path_formatted = "{}_for".format(file_path) - os.system("{} {} {}".format( - formatter, file_path, file_path_formatted)) + os.system("{} {} {}".format(formatter, file_path, file_path_formatted)) file_path = file_path_formatted if count_id: @@ -247,46 +238,65 @@ def import_file(self, file_path, count_id=None): if not count: QgsMessageLog.logMessage( "Impossible de trouver le comptage associé {}".format( - file_path,), 'Comptages', Qgis.Critical) + file_path, + ), + "Comptages", + Qgis.Critical, + ) return QgsMessageLog.logMessage( - '{} - Prepare import file {}'.format( - datetime.now(), os.path.basename(file_path)), - 'Comptages', Qgis.Info) + "{} - Prepare import file {}".format( + datetime.now(), os.path.basename(file_path) + ), + "Comptages", + Qgis.Info, + ) QgsMessageLog.logMessage( - '{} - Import file {} started'.format( - datetime.now(), os.path.basename(file_path)), - 'Comptages', Qgis.Info) + "{} - Import file {} started".format( + datetime.now(), os.path.basename(file_path) + ), + "Comptages", + Qgis.Info, + ) task = importer_task.ImporterTask(file_path, count) return task - def all_tasks_finished(self, task='import'): + def all_tasks_finished(self, task="import"): # Check if actually all tasks are finished because apparently it doesn't # work the same on all systems if not self.tm.countActiveTasks() == 0: QgsMessageLog.logMessage( - '{} - all_tasks_finished signal raised, but active tasks still exist, ignoring it'.format(datetime.now()), - 'Comptages', Qgis.Warning) + "{} - all_tasks_finished signal raised, but active tasks still exist, ignoring it".format( + datetime.now() + ), + "Comptages", + Qgis.Warning, + ) return self.tm.allTasksFinished.disconnect() - push_info(('Toutes les tâches sont terminées. Consultez le journal ' - 'pour plus de détails.')) + push_info( + ( + "Toutes les tâches sont terminées. Consultez le journal " + "pour plus de détails." + ) + ) QgsMessageLog.logMessage( - '{} - All tasks ended'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - All tasks ended".format(datetime.now()), "Comptages", Qgis.Info + ) - if task == 'import': + if task == "import": self.chart_dock.show_next_quarantined_chart() def do_validate_imported_files_action(self): if self.tm.countActiveTasks() > 0: - push_info(("Veuillez patienter jusqu'à ce que l'importation " - "soit terminée.")) + push_info( + ("Veuillez patienter jusqu'à ce que l'importation " "soit terminée.") + ) return self.chart_dock.show_next_quarantined_chart() @@ -326,8 +336,8 @@ def do_filter_action(self): self.filter_sector = dlg.sector.currentIndex() self.layers.apply_filter( - dlg.start_date.dateTime().toString('yyyy-MM-dd'), - dlg.end_date.dateTime().toString('yyyy-MM-dd'), + dlg.start_date.dateTime().toString("yyyy-MM-dd"), + dlg.end_date.dateTime().toString("yyyy-MM-dd"), dlg.installation.currentIndex(), dlg.sensor.currentIndex(), [self.filter_tjm[0], self.filter_tjm[1]], @@ -335,26 +345,38 @@ def do_filter_action(self): dlg.sector.currentData(), ) - if (not dlg.start_date.dateTime()) and (not dlg.end_date.dateTime()) and (dlg.installation.currentIndex() == 0) and \ - (dlg.sensor.currentIndex() == 0) and (dlg.tjm.lowerValue() == 0) and (dlg.tjm.upperValue() == 30000) and \ - (dlg.axe.currentText() == 'Tous') and (dlg.sector.currentText() == 'Tous'): + if ( + (not dlg.start_date.dateTime()) + and (not dlg.end_date.dateTime()) + and (dlg.installation.currentIndex() == 0) + and (dlg.sensor.currentIndex() == 0) + and (dlg.tjm.lowerValue() == 0) + and (dlg.tjm.upperValue() == 30000) + and (dlg.axe.currentText() == "Tous") + and (dlg.sector.currentText() == "Tous") + ): self.filter_action.setIcon( - QIcon(':/plugins/Comptages/images/filter.png')) + QIcon(":/plugins/Comptages/images/filter.png") + ) else: self.filter_action.setIcon( - QIcon(':/plugins/Comptages/images/filter_active.png')) + QIcon(":/plugins/Comptages/images/filter_active.png") + ) def do_yearly_report_action(self): QgsMessageLog.logMessage( - '{} - Generate yearly report action started'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate yearly report action started".format(datetime.now()), + "Comptages", + Qgis.Info, + ) if self.tm.countActiveTasks() > 0: - push_info(("Veuillez patienter jusqu'à ce que l'importation " - "soit terminée.")) + push_info( + ("Veuillez patienter jusqu'à ce que l'importation " "soit terminée.") + ) return - layer = self.layers.layers['section'] + layer = self.layers.layers["section"] selected_count = layer.selectedFeatureCount() if selected_count == 0: @@ -366,7 +388,7 @@ def do_yearly_report_action(self): else: selected_feature = next(layer.getSelectedFeatures()) - section_id = selected_feature.attribute('id') + section_id = selected_feature.attribute("id") classes = self.layers.get_classes_of_section(section_id) dlg = YearlyReportDialog(self.iface) @@ -378,25 +400,34 @@ def do_yearly_report_action(self): clazz = dlg.classi.currentText() file_dialog = QFileDialog() - title = 'Exporter un rapport' - path = self.settings.value('report_export_directory') - file_path = QFileDialog.getExistingDirectory( - file_dialog, title, path) + title = "Exporter un rapport" + path = self.settings.value("report_export_directory") + file_path = QFileDialog.getExistingDirectory(file_dialog, title, path) if not file_path: QgsMessageLog.logMessage( - '{} - Generate yearly report action ended: No file_path given'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate yearly report action ended: No file_path given".format( + datetime.now() + ), + "Comptages", + Qgis.Info, + ) return QgsMessageLog.logMessage( - '{} - Generate yearly report action can really begin now for count {} with file_path: {}'.format( - datetime.now(), selected_count, file_path), 'Comptages', Qgis.Info) + "{} - Generate yearly report action can really begin now for count {} with file_path: {}".format( + datetime.now(), selected_count, file_path + ), + "Comptages", + Qgis.Info, + ) if clazz.startswith("SPCH-MD"): yrb = YearlyReportBike(file_path, year, section_id) yrb.run() else: - self.tm.allTasksFinished.connect(partial(self.all_tasks_finished, report)) + self.tm.allTasksFinished.connect( + partial(self.all_tasks_finished, report) + ) # FIXME `report` is a mystery. # TODO: consider the chosed class too self.tm.addTask( @@ -404,8 +435,9 @@ def do_yearly_report_action(self): file_path=file_path, template="yearly", year=year, - section_id=section_id - )) + section_id=section_id, + ) + ) # TODO: check if there are comptages for this section and year def do_import_ics_action(self): @@ -418,131 +450,156 @@ def do_export_configuration_action(self, count_id): config_creator = ConfigCreatorCmd(self.layers, count_id) config_creator.set_section_commands() - installation_name = self.layers.get_installation_name_of_count( - count_id) + installation_name = self.layers.get_installation_name_of_count(count_id) file_dialog = QFileDialog() - file_dialog.setDefaultSuffix('*.CMD') - title = 'Exporter la configuration' + file_dialog.setDefaultSuffix("*.CMD") + title = "Exporter la configuration" path = os.path.join( - self.settings.value('config_export_directory'), - "{}.CMD".format(installation_name)) + self.settings.value("config_export_directory"), + "{}.CMD".format(installation_name), + ) file = QFileDialog.getSaveFileName( - file_dialog, title, path, "Config file (*.CMD)")[0] + file_dialog, title, path, "Config file (*.CMD)" + )[0] if not file: return config_creator.write_file(file) - push_info('Written config file {}'.format(file)) + push_info("Written config file {}".format(file)) def do_import_single_file_action(self, count_id): file_dialog = QFileDialog() - title = 'Importation' - path = self.settings.value('data_import_directory') + title = "Importation" + path = self.settings.value("data_import_directory") file_path = QFileDialog.getOpenFileName( - file_dialog, title, path, - "Data file (*.A?? *.aV? *.I?? *.V?? *.txt)")[0] + file_dialog, title, path, "Data file (*.A?? *.aV? *.I?? *.V?? *.txt)" + )[0] if not file_path: return - self.tm.allTasksFinished.connect(partial(self.all_tasks_finished, 'import')) + self.tm.allTasksFinished.connect(partial(self.all_tasks_finished, "import")) self.tm.addTask(self.import_file(file_path, count_id)) def do_generate_report_action(self, count_id): QgsMessageLog.logMessage( - '{} - Generate report action started'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate report action started".format(datetime.now()), + "Comptages", + Qgis.Info, + ) count = models.Count.objects.get(id=count_id) if self.tm.countActiveTasks() > 0: - push_info(("Veuillez patienter jusqu'à ce que l'importation " - "soit terminée.")) + push_info( + ("Veuillez patienter jusqu'à ce que l'importation " "soit terminée.") + ) return # Show message if there are no data to process if not models.CountDetail.objects.filter(id_count=count).exists(): - push_info("Installation {}: Il n'y a pas de données à traiter pour " - "le comptage {}".format( - count.id_installation.name, - count.id)) + push_info( + "Installation {}: Il n'y a pas de données à traiter pour " + "le comptage {}".format(count.id_installation.name, count.id) + ) QgsMessageLog.logMessage( - '{} - Generate report action ended: No data for count {}'.format(datetime.now(), count.id), - 'Comptages', Qgis.Info) + "{} - Generate report action ended: No data for count {}".format( + datetime.now(), count.id + ), + "Comptages", + Qgis.Info, + ) return file_dialog = QFileDialog() - title = 'Exporter un rapport' - path = self.settings.value('report_export_directory') - file_path = QFileDialog.getExistingDirectory( - file_dialog, title, path) + title = "Exporter un rapport" + path = self.settings.value("report_export_directory") + file_path = QFileDialog.getExistingDirectory(file_dialog, title, path) if not file_path: QgsMessageLog.logMessage( - '{} - Generate report action ended: No file_path given'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate report action ended: No file_path given".format( + datetime.now() + ), + "Comptages", + Qgis.Info, + ) return QgsMessageLog.logMessage( - '{} - Generate report action can really begin now for count {} with file_path: {}'.format( - datetime.now(), count.id, file_path), 'Comptages', Qgis.Info) + "{} - Generate report action can really begin now for count {} with file_path: {}".format( + datetime.now(), count.id, file_path + ), + "Comptages", + Qgis.Info, + ) - self.tm.allTasksFinished.connect(partial(self.all_tasks_finished, 'report')) + self.tm.allTasksFinished.connect(partial(self.all_tasks_finished, "report")) self.tm.addTask( report_task.ReportTask( file_path=file_path, count=count, - )) + ) + ) def do_export_plan_action(self, count_id): count = models.Count.objects.get(id=count_id) plan_creator = PlanCreator() file_dialog = QFileDialog() - file_dialog.setDefaultSuffix('*.PDF') - title = 'Exporter plan de pose' + file_dialog.setDefaultSuffix("*.PDF") + title = "Exporter plan de pose" path = os.path.join( - self.settings.value('config_export_directory'), - "{}.pdf".format("plan_de_pose")) + self.settings.value("config_export_directory"), + "{}.pdf".format("plan_de_pose"), + ) file = QFileDialog.getSaveFileName( - file_dialog, title, path, "Config file (*.PDF)")[0] + file_dialog, title, path, "Config file (*.PDF)" + )[0] if not file: return # Highlight the current sections and installation in the layout previous_highlightes_sections = self.layers.highlighted_sections - self.layers.highlighted_sections = \ - self.layers.get_section_ids_of_count(count_id) + self.layers.highlighted_sections = self.layers.get_section_ids_of_count( + count_id + ) QgsExpressionContextUtils.setProjectVariable( - QgsProject.instance(), 'highlighted_installation', - self.layers.get_installation_name_of_count(count_id)) + QgsProject.instance(), + "highlighted_installation", + self.layers.get_installation_name_of_count(count_id), + ) plan_creator.export_pdf(count, file) self.layers.highlighted_sections = previous_highlightes_sections QgsExpressionContextUtils.setProjectVariable( - QgsProject.instance(), 'highlighted_installation', - '') - self.layers.layers['section'].triggerRepaint() + QgsProject.instance(), "highlighted_installation", "" + ) + self.layers.layers["section"].triggerRepaint() def do_generate_chart_action(self, count_id): QgsMessageLog.logMessage( - '{} - Generate chart action started'.format(datetime.now()), - 'Comptages', Qgis.Info) + "{} - Generate chart action started".format(datetime.now()), + "Comptages", + Qgis.Info, + ) if self.tm.countActiveTasks() > 0: - push_info(("Veuillez patienter jusqu'à ce que l'importation " - "soit terminée.")) + push_info( + ("Veuillez patienter jusqu'à ce que l'importation " "soit terminée.") + ) return count = models.Count.objects.get(id=count_id) self.chart_dock.set_attributes(count) QgsMessageLog.logMessage( - '{} - Generate chart action ended'.format(datetime.now()), - 'Comptages', Qgis.Info) - + "{} - Generate chart action ended".format(datetime.now()), + "Comptages", + Qgis.Info, + ) def do_delete_data_action(self, count_id): dlg = DeleteDialog(self.iface) @@ -604,15 +661,14 @@ def is_highlighted(feature, parent): count""" # Call the method of the current instance of the plugin - return plugins['comptages'].is_section_highlighted( - feature.attribute('id')) + return plugins["comptages"].is_section_highlighted(feature.attribute("id")) @qgsfunction(args="auto", group="Comptages") def check_dates(feature, parent): """Used by count layer to show if a count was during a special period""" - return plugins['comptages'].layers.check_dates( - feature.attribute('start_process_date'), - feature.attribute('end_process_date') + return plugins["comptages"].layers.check_dates( + feature.attribute("start_process_date"), + feature.attribute("end_process_date"), ) diff --git a/comptages/config/config_creator.py b/comptages/config/config_creator.py index e457859e..7596b84e 100644 --- a/comptages/config/config_creator.py +++ b/comptages/config/config_creator.py @@ -2,7 +2,6 @@ class ConfigCreator(metaclass=abc.ABCMeta): - def __init__(self, layers, count_id): self.layers = layers self.count_id = count_id @@ -12,37 +11,34 @@ def write_file(self, file): class ConfigCreatorCmd(ConfigCreator): - commands = dict() def set_section_commands(self): sections = self.layers.get_sections_of_count(self.count_id) self.set_command( - 'SITE', - self.layers.get_installation_name_of_count(self.count_id)) - self.set_command('LOCATION', sections[0].attribute('name')) - self.set_command( - 'FILENAME', - self.layers.get_installation_name_of_count(self.count_id)) + "SITE", self.layers.get_installation_name_of_count(self.count_id) + ) + self.set_command("LOCATION", sections[0].attribute("name")) self.set_command( - 'CLASS', - self.layers.get_class_name_of_count(self.count_id)) - self.set_command('LPLENS', '200') + "FILENAME", self.layers.get_installation_name_of_count(self.count_id) + ) + self.set_command("CLASS", self.layers.get_class_name_of_count(self.count_id)) + self.set_command("LPLENS", "200") lanes = self.layers.get_lanes_of_count(self.count_id) sensor_type = self.layers.get_sensor_type_of_count(self.count_id) - channels_str = '' - sensors_str = '' + channels_str = "" + sensors_str = "" carriageway_nr = 0 sensor_length = None for i, lane in enumerate(lanes): - channels_str += '{} '.format(i+1) - if sensor_type.attribute('name') == 'Boucle': - sensors_str += '{} '.format('LL') + channels_str += "{} ".format(i + 1) + if sensor_type.attribute("name") == "Boucle": + sensors_str += "{} ".format("LL") else: - sensors_str += '{} '.format('TT') + sensors_str += "{} ".format("TT") if self.layers.check_sensor_of_lane(lane.id()): carriageway_nr += 1 @@ -51,17 +47,18 @@ def set_section_commands(self): if length: sensor_length = length - self.set_command('CHANNELS', channels_str) - self.set_command('SENSORS', sensors_str) + self.set_command("CHANNELS", channels_str) + self.set_command("SENSORS", sensors_str) self.set_command( - 'CARRIAGEWAY', - ' '.join('1' * carriageway_nr) + ' ' + ' '.join('0' * (8 - carriageway_nr))) + "CARRIAGEWAY", + " ".join("1" * carriageway_nr) + " " + " ".join("0" * (8 - carriageway_nr)), + ) if sensor_length and sensor_length < 6: - self.set_command('LPSEPS', '300') + self.set_command("LPSEPS", "300") else: - self.set_command('LPSEPS', '500') + self.set_command("LPSEPS", "500") def set_command(self, command, value): self.commands[command] = value @@ -70,7 +67,7 @@ def set_predefined_config(self): return self.layers.get_predefined_config_from_count(self.count_id) def write_file(self, file): - with open(file, 'w') as f: - f.write('{}\n'.format(self.set_predefined_config())) + with open(file, "w") as f: + f.write("{}\n".format(self.set_predefined_config())) for command in self.commands.keys(): - f.write('{} = {}\n'.format(command, self.commands[command])) + f.write("{} = {}\n".format(command, self.commands[command])) diff --git a/comptages/core/bulk_create_manager.py b/comptages/core/bulk_create_manager.py index dfe9dd35..65ccc6eb 100644 --- a/comptages/core/bulk_create_manager.py +++ b/comptages/core/bulk_create_manager.py @@ -2,7 +2,7 @@ from django.apps import apps -class BulkCreateManager(object): +class BulkCreateManager: """ This helper class keeps track of ORM objects to be created for multiple model classes, and automatically creates those objects with `bulk_create` @@ -12,7 +12,7 @@ class BulkCreateManager(object): call `done()` to ensure the final set of objects is created for all models. """ - def __init__(self, chunk_size=100): + def __init__(self, chunk_size: int = 100): self._create_queues = defaultdict(list) self.chunk_size = chunk_size diff --git a/comptages/core/definitions.py b/comptages/core/definitions.py index 8beeff23..93298a83 100644 --- a/comptages/core/definitions.py +++ b/comptages/core/definitions.py @@ -4,157 +4,157 @@ # Definitions of the layers LAYER_DEFINITIONS = { - 'installation': { - 'table': 'installation', - 'display_name': 'installation', - 'geometry': 'geometry', - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': True + "installation": { + "table": "installation", + "display_name": "installation", + "geometry": "geometry", + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": True, }, - 'sensor': { - 'table': 'sensor', - 'display_name': 'capteur', - 'geometry': 'geometry', - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': True + "sensor": { + "table": "sensor", + "display_name": "capteur", + "geometry": "geometry", + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": True, }, - 'section': { - 'table': 'section', - 'display_name': 'troncon', - 'geometry': 'geometry', - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': True + "section": { + "table": "section", + "display_name": "troncon", + "geometry": "geometry", + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": True, }, - 'municipality': { - 'table': 'municipality', - 'display_name': 'commune', - 'geometry': 'geometry', - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': True + "municipality": { + "table": "municipality", + "display_name": "commune", + "geometry": "geometry", + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": True, }, - 'sector': { - 'table': 'sector', - 'display_name': 'secteur', - 'geometry': 'geometry', - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': True + "sector": { + "table": "sector", + "display_name": "secteur", + "geometry": "geometry", + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": True, }, - 'brand': { - 'table': 'brand', - 'display_name': 'marque', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "brand": { + "table": "brand", + "display_name": "marque", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, - 'model': { - 'table': 'model', - 'display_name': 'model', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "model": { + "table": "model", + "display_name": "model", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, - 'device': { - 'table': 'device', - 'display_name': 'automate', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "device": { + "table": "device", + "display_name": "automate", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, - 'class': { - 'table': 'class', - 'display_name': 'classification', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "class": { + "table": "class", + "display_name": "classification", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, - 'category': { - 'table': 'category', - 'display_name': 'categorie', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "category": { + "table": "category", + "display_name": "categorie", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, -# 'count_detail': { -# 'table': 'count_detail', -# 'display_name': 'comptage_detail', -# 'geometry': None, -# 'sql': '', -# 'id': 'id', -# 'epsg': 'EPSG:2056', -# 'legend': False -# }, -# 'count_aggregate': { -# 'table': 'count_aggregate', -# 'display_name': 'comptage_aggrege', -# 'geometry': None, -# 'sql': '', -# 'id': 'id', -# 'epsg': 'EPSG:2056', -# 'legend': False -# }, - 'damage_log': { - 'table': 'damage_log', - 'display_name': 'journal_panne', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + # 'count_detail': { + # 'table': 'count_detail', + # 'display_name': 'comptage_detail', + # 'geometry': None, + # 'sql': '', + # 'id': 'id', + # 'epsg': 'EPSG:2056', + # 'legend': False + # }, + # 'count_aggregate': { + # 'table': 'count_aggregate', + # 'display_name': 'comptage_aggrege', + # 'geometry': None, + # 'sql': '', + # 'id': 'id', + # 'epsg': 'EPSG:2056', + # 'legend': False + # }, + "damage_log": { + "table": "damage_log", + "display_name": "journal_panne", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, - 'special_period': { - 'table': 'special_period', - 'display_name': 'periode_speciale', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "special_period": { + "table": "special_period", + "display_name": "periode_speciale", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, - 'count': { - 'table': 'count', - 'display_name': 'comptage', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': True + "count": { + "table": "count", + "display_name": "comptage", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": True, }, - 'sensor_type': { - 'table': 'sensor_type', - 'display_name': 'type_capteur', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "sensor_type": { + "table": "sensor_type", + "display_name": "type_capteur", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, - 'lane': { - 'table': 'lane', - 'display_name': 'voie', - 'geometry': None, - 'sql': '', - 'id': 'id', - 'epsg': 'EPSG:2056', - 'legend': False + "lane": { + "table": "lane", + "display_name": "voie", + "geometry": None, + "sql": "", + "id": "id", + "epsg": "EPSG:2056", + "legend": False, }, } diff --git a/comptages/core/delete_dialog.py b/comptages/core/delete_dialog.py index 1569bb06..469127fa 100644 --- a/comptages/core/delete_dialog.py +++ b/comptages/core/delete_dialog.py @@ -1,7 +1,7 @@ from qgis.PyQt.QtWidgets import QDialog from comptages.core.utils import get_ui_class -FORM_CLASS = get_ui_class('delete.ui') +FORM_CLASS = get_ui_class("delete.ui") class DeleteDialog(QDialog, FORM_CLASS): diff --git a/comptages/core/filter_dialog.py b/comptages/core/filter_dialog.py index 9d39b2a3..e05f5b75 100644 --- a/comptages/core/filter_dialog.py +++ b/comptages/core/filter_dialog.py @@ -1,9 +1,15 @@ -from qgis.PyQt.QtWidgets import QDialog, QCompleter, QComboBox, QSlider, QDialogButtonBox +from qgis.PyQt.QtWidgets import ( + QDialog, + QCompleter, + QComboBox, + QSlider, + QDialogButtonBox, +) from qgis.PyQt.QtCore import Qt from comptages.core.utils import get_ui_class from comptages.datamodel import models -FORM_CLASS = get_ui_class('filter.ui') +FORM_CLASS = get_ui_class("filter.ui") class FilterDialog(QDialog, FORM_CLASS): @@ -25,17 +31,19 @@ def update_tjm_labels(min, max): self.tjm.rangeChanged.connect(update_tjm_labels) # Populate axe filter - self.axe.addItem('Tous', None) + self.axe.addItem("Tous", None) self.axe.completer().setCompletionMode(QCompleter.PopupCompletion) self.axe.completer().setFilterMode(Qt.MatchContains) self.axe.setInsertPolicy(QComboBox.NoInsert) - for i in models.Section.objects.all().distinct('owner', 'road').order_by('owner'): - self.axe.addItem(str(i.owner) + ':' + str(i.road), (i.owner, i.road)) + for i in ( + models.Section.objects.all().distinct("owner", "road").order_by("owner") + ): + self.axe.addItem(str(i.owner) + ":" + str(i.road), (i.owner, i.road)) # Populate sector filter - self.sector.addItem('Tous', None) - for i in models.Sector.objects.all().order_by('id'): + self.sector.addItem("Tous", None) + for i in models.Sector.objects.all().order_by("id"): self.sector.addItem(str(i.id), i.id) def reset_dialog(button): @@ -47,5 +55,4 @@ def reset_dialog(button): self.axe.setCurrentIndex(0) self.sector.setCurrentIndex(0) - self.buttons.button( - QDialogButtonBox.Reset).clicked.connect(reset_dialog) + self.buttons.button(QDialogButtonBox.Reset).clicked.connect(reset_dialog) diff --git a/comptages/core/importer.py b/comptages/core/importer.py index ea7f2e73..5bdf251f 100644 --- a/comptages/core/importer.py +++ b/comptages/core/importer.py @@ -1,3 +1,4 @@ +from typing import Callable, Dict, Iterator, List, Optional import pytz import os from datetime import datetime, timedelta @@ -12,20 +13,22 @@ def simple_print_callback(progress): print(f"Importing... {progress}%") -def import_file(file_path, count, callback_progress=simple_print_callback): - +def import_file( + file_path: str, count: models.Count, callback_progress=simple_print_callback +): file_format = get_file_format(file_path) file_header = _parse_file_header(file_path) if file_format == "VBV-1": _parse_and_write(file_path, count, _parse_line_vbv1, callback_progress) elif file_format == "INT-2": - interval = int(file_header['INTERVAL']) + interval = int(file_header["INTERVAL"]) intspec = get_intspec(file_header) data_header = _parse_data_header(file_path) cat_bins = _populate_category_dict(count) _parse_and_write( - file_path, count, + file_path, + count, _parse_line_int2, callback_progress, interval=interval, @@ -41,7 +44,14 @@ def import_file(file_path, count, callback_progress=simple_print_callback): raise NotImplementedError("file format not recognized") -def _parse_and_write(file_path, count, line_parser, callback_progress, from_aggregate=False, **kwargs): +def _parse_and_write( + file_path: str, + count: models.Count, + line_parser: Callable, + callback_progress, + from_aggregate: bool = False, + **kwargs, +): basename = os.path.basename(file_path) bulk_mgr = BulkCreateManager(chunk_size=1000) lanes = _populate_lane_dict(count) @@ -63,23 +73,31 @@ def _parse_and_write(file_path, count, line_parser, callback_progress, from_aggr previous_progress = progress for row in rows: - category = cat_bins[row['category']] if row['category'] is not None else None - id_lane = lanes[int(row['lane'])] if row['lane'] else directions[int(row['direction'])] + category = ( + cat_bins[row["category"]] + if row["category"] is not None + else None + ) + id_lane = ( + lanes[int(row["lane"])] + if row["lane"] + else directions[int(row["direction"])] + ) bulk_mgr.add( models.CountDetail( - numbering=row['numbering'], - timestamp=row['timestamp'], - distance_front_front=row['distance_front_front'], - distance_front_back=row['distance_front_back'], - speed=row['speed'], - length=row['length'], - height=row['height'], + numbering=row["numbering"], + timestamp=row["timestamp"], + distance_front_front=row["distance_front_front"], + distance_front_back=row["distance_front_back"], + speed=row["speed"], + length=row["length"], + height=row["height"], file_name=basename, import_status=definitions.IMPORT_STATUS_QUARANTINE, id_lane_id=id_lane, id_count_id=count.id, id_category_id=category, - times=row['times'], + times=row["times"], from_aggregate=from_aggregate, ) ) @@ -90,83 +108,82 @@ def _parse_and_write(file_path, count, line_parser, callback_progress, from_aggr bulk_mgr.done() -def _parse_line_vbv1(line, **kwargs): - - if line.startswith('* '): +def _parse_line_vbv1(line: str, **kwargs) -> Optional[List[Dict]]: + if line.startswith("* "): return None - parsed_line = None - tz = pytz.timezone('Europe/Zurich') + parsed_line = {} + tz = pytz.timezone("Europe/Zurich") try: - parsed_line = dict() - parsed_line['numbering'] = line[0:6] - parsed_line['timestamp'] = tz.localize(datetime.strptime( - "{}0000".format(line[7:24]), "%d%m%y %H%M %S %f")) - parsed_line['reserve_code'] = line[25:31] - parsed_line['lane'] = int(line[32:34]) - parsed_line['direction'] = int(line[35:36]) + parsed_line["numbering"] = line[0:6] + parsed_line["timestamp"] = tz.localize( + datetime.strptime("{}0000".format(line[7:24]), "%d%m%y %H%M %S %f") + ) + parsed_line["reserve_code"] = line[25:31] + parsed_line["lane"] = int(line[32:34]) + parsed_line["direction"] = int(line[35:36]) # Default values that are used in case some values are missed from a line - parsed_line['distance_front_front'] = 0 - parsed_line['distance_front_back'] = 0 - parsed_line['speed'] = -1 - parsed_line['length'] = 0 - parsed_line['category'] = 0 - parsed_line['height'] = 'NA' - parsed_line['times'] = 1 - - parsed_line['distance_front_front'] = float(line[37:41]) - parsed_line['distance_front_back'] = float(line[42:46]) - parsed_line['speed'] = int(line[47:50]) - parsed_line['length'] = int(line[52:56]) - parsed_line['category'] = int(line[60:62].strip()) - parsed_line['height'] = line[63:65].strip() + parsed_line["distance_front_front"] = 0 + parsed_line["distance_front_back"] = 0 + parsed_line["speed"] = -1 + parsed_line["length"] = 0 + parsed_line["category"] = 0 + parsed_line["height"] = "NA" + parsed_line["times"] = 1 + + parsed_line["distance_front_front"] = float(line[37:41]) + parsed_line["distance_front_back"] = float(line[42:46]) + parsed_line["speed"] = int(line[47:50]) + parsed_line["length"] = int(line[52:56]) + parsed_line["category"] = int(line[60:62].strip()) + parsed_line["height"] = line[63:65].strip() # If the speed of a vehicle is 0, we put it in the category 0 - if parsed_line['speed'] == 0: - parsed_line['category'] = 0 + if parsed_line["speed"] == 0: + parsed_line["category"] = 0 # If the speed of a vehicle is greater than 3*max_speed or 150km/h # TODO: get actual speed limit of the section - if parsed_line['speed'] > 150: - parsed_line['category'] = 0 + if parsed_line["speed"] > 150: + parsed_line["category"] = 0 except ValueError: - if 'lane' not in parsed_line: + if "lane" not in parsed_line: return None - if 'direction' not in parsed_line: + if "direction" not in parsed_line: return None return [parsed_line] -def _parse_line_mc(line, **kwargs): - if not line.startswith('20'): +def _parse_line_mc(line: str, **kwargs) -> Optional[List[Dict]]: + if not line.startswith("20"): return None - parsed_line = None + parsed_line = {} try: - parsed_line = dict() - tz = pytz.timezone('Europe/Zurich') + tz = pytz.timezone("Europe/Zurich") # TODO: numbering numbering = 1 - parsed_line['numbering'] = numbering - parsed_line['timestamp'] = tz.localize(datetime.strptime( - line[0:19], "%Y-%m-%d %H:%M:%S")) + parsed_line["numbering"] = numbering + parsed_line["timestamp"] = tz.localize( + datetime.strptime(line[0:19], "%Y-%m-%d %H:%M:%S") + ) # On MetroCount files, the direction is 0-1 instead of 1-2 - parsed_line['lane'] = None - parsed_line['direction'] = int(line[22:23]) + 1 - parsed_line['distance_front_front'] = float(line[24:31]) - if parsed_line['distance_front_front'] > 99.9: - parsed_line['distance_front_front'] = 99.9 - parsed_line['distance_front_back'] = float(line[31:38]) - if parsed_line['distance_front_back'] > 99.9: - parsed_line['distance_front_back'] = 99.9 - parsed_line['speed'] = int(float(line[39:44])) - parsed_line['length'] = int(float(line[44:50])) - parsed_line['category'] = int(line[51:54].strip()) - parsed_line['height'] = '' - parsed_line['times'] = 1 + parsed_line["lane"] = None + parsed_line["direction"] = int(line[22:23]) + 1 + parsed_line["distance_front_front"] = float(line[24:31]) + if parsed_line["distance_front_front"] > 99.9: + parsed_line["distance_front_front"] = 99.9 + parsed_line["distance_front_back"] = float(line[31:38]) + if parsed_line["distance_front_back"] > 99.9: + parsed_line["distance_front_back"] = 99.9 + parsed_line["speed"] = int(float(line[39:44])) + parsed_line["length"] = int(float(line[44:50])) + parsed_line["category"] = int(line[51:54].strip()) + parsed_line["height"] = "" + parsed_line["times"] = 1 except ValueError as e: # QgsMessageLog.logMessage( # 'ValueError: {}'.format(e), 'Comptages', Qgis.Info) @@ -176,160 +193,165 @@ def _parse_line_mc(line, **kwargs): return [parsed_line] -def _parse_line_int2(line, **kwargs): - if line.startswith('* '): + +def _parse_line_int2(line, **kwargs) -> Iterator[Optional[Dict]]: + if line.startswith("* "): return None - parsed_line = dict() - tz = pytz.timezone('Europe/Zurich') + parsed_line = {} + tz = pytz.timezone("Europe/Zurich") # TODO: numbering numbering = 1 - parsed_line['numbering'] = numbering + parsed_line["numbering"] = numbering # In the data files midnight is 2400 of the current day # instead of 0000 of the next day - if line[7:9] == '24': - line = line[:7] + '00' + line[9:] - end = tz.localize(datetime.strptime( - "{}".format(line[0:11]), "%d%m%y %H%M")) + if line[7:9] == "24": + line = line[:7] + "00" + line[9:] + end = tz.localize(datetime.strptime("{}".format(line[0:11]), "%d%m%y %H%M")) end += timedelta(days=1) else: - end = tz.localize(datetime.strptime( - "{}".format(line[0:11]), "%d%m%y %H%M")) - - parsed_line['end'] = end - parsed_line['start'] = parsed_line['end'] - timedelta( - minutes=kwargs['interval']) - parsed_line['channel'] = line[12:13] - parsed_line['reserve_code'] = line[14:16] - parsed_line['info_code'] = line[17:19] - - parsed_line['timestamp'] = parsed_line['start'] - - parsed_line['distance_front_front'] = None - parsed_line['distance_front_back'] = None - parsed_line['speed'] = None - parsed_line['length'] = None - parsed_line['height'] = None - parsed_line['category'] = None - parsed_line['lane'] = parsed_line['channel'] - parsed_line['times'] = 1 - - intspec = kwargs['intspec'] - row_type = intspec[int(parsed_line['info_code']) - 1] + end = tz.localize(datetime.strptime("{}".format(line[0:11]), "%d%m%y %H%M")) + + parsed_line["end"] = end + parsed_line["start"] = parsed_line["end"] - timedelta(minutes=kwargs["interval"]) + parsed_line["channel"] = line[12:13] + parsed_line["reserve_code"] = line[14:16] + parsed_line["info_code"] = line[17:19] + + parsed_line["timestamp"] = parsed_line["start"] + + parsed_line["distance_front_front"] = None + parsed_line["distance_front_back"] = None + parsed_line["speed"] = None + parsed_line["length"] = None + parsed_line["height"] = None + parsed_line["category"] = None + parsed_line["lane"] = parsed_line["channel"] + parsed_line["times"] = 1 + + intspec = kwargs["intspec"] + row_type = intspec[int(parsed_line["info_code"]) - 1] bins = _get_int_bins( - kwargs['file_header'], - kwargs['data_header'], - kwargs['intspec'], - kwargs['categories'], - row_type) - if row_type == 'SPD': + kwargs["file_header"], + kwargs["data_header"], + kwargs["intspec"], + kwargs["categories"], + row_type, + ) + if row_type == "SPD": for i, data in enumerate(line[20:].split()): if int(data) == 0: continue speed_low = bins[i] speed = int(int(speed_low) + 5) - parsed_line['speed'] = speed - parsed_line['times'] = int(data) + parsed_line["speed"] = speed + parsed_line["times"] = int(data) yield parsed_line - elif row_type == 'LEN': + elif row_type == "LEN": for i, data in enumerate(line[20:].split()): if int(data) == 0: continue lenght_low = bins[i] lenght_high = bins[i + 1] lenght = int(int(lenght_low) + int(lenght_high) / 2) - parsed_line['lenght'] = lenght - parsed_line['times'] = int(data) + parsed_line["lenght"] = lenght + parsed_line["times"] = int(data) yield parsed_line - elif row_type == 'CLS': + elif row_type == "CLS": for i, data in enumerate(line[20:].split()): if int(data) == 0: continue - parsed_line['category'] = i + 1 - parsed_line['times'] = int(data) + parsed_line["category"] = i + 1 + parsed_line["times"] = int(data) yield parsed_line - elif row_type == 'SDS': + elif row_type == "SDS": # Insert the values in the SPD table and only the # mean and the deviation in the SDS table pass - elif row_type == 'DRN': + elif row_type == "DRN": pass - elif row_type == 'CNT': + elif row_type == "CNT": pass return None -def _get_int_bins(file_header, data_header, intspec, categories, code): +def _get_int_bins(file_header, data_header, intspec, categories, code) -> List: """Returns an array with the bins if they exist, or the number of columns of this data type""" values = [] - if code == 'SPD' or code == 'SDS': - values = file_header['SPDBINS'].split() - elif code == 'LEN': - values = file_header['LENBINS'].split() - elif code == 'CLS': + if code == "SPD" or code == "SDS": + values = file_header["SPDBINS"].split() + elif code == "LEN": + values = file_header["LENBINS"].split() + elif code == "CLS": values = list(categories.values())[1:] # [1:] is because 0 is trash else: values = data_header[intspec.index(code)] return values -def _parse_file_header(file_path): +def _parse_file_header(file_path: str): file_header = dict() tz = pytz.timezone("Europe/Zurich") with open(file_path, encoding=get_file_encoding(file_path)) as f: for line in f: # Marksmann - if line.startswith('* ') and not line.startswith('* HEAD '): + if line.startswith("* ") and not line.startswith("* HEAD "): line = line[2:] - splitted = line.split('=', 1) + splitted = line.split("=", 1) if len(splitted) > 1: key = splitted[0].strip() value = splitted[1].strip() - if key == 'CLASS' and value == 'SPECIAL10': - value = 'SWISS10' - if key in ['STARTREC', 'STOPREC']: - value = tz.localize( - datetime.strptime(value, "%H:%M %d/%m/%y")) + if key == "CLASS" and value == "SPECIAL10": + value = "SWISS10" + if key in ["STARTREC", "STOPREC"]: + value = tz.localize(datetime.strptime(value, "%H:%M %d/%m/%y")) file_header[key] = value # MetroCount - elif line.startswith('MetroCount'): - file_header['FORMAT'] = 'MC' - elif line.startswith('Place'): - file_header['SITE'] = line[ - line.find('[') + 1:line.find(']')].replace('-', '') - elif line.startswith('20') and file_header['FORMAT'] == 'MC' and 'STARTREC' not in file_header: - file_header['STARTREC'] = tz.localize( - datetime.strptime(line[:19], "%Y-%m-%d %H:%M:%S")) - elif line.startswith('20') and file_header['FORMAT'] == 'MC': - file_header['STOPREC'] = tz.localize( - datetime.strptime(line[:19], "%Y-%m-%d %H:%M:%S")) - elif line.startswith('Type de Cat') and file_header['FORMAT'] == 'MC': - file_header['CLASS'] = line[line.find('(') + 1:line.find(')')] - if file_header['CLASS'] == 'Euro13': - file_header['CLASS'] = 'EUR13' - elif file_header['CLASS'] == 'NZTA2011': - file_header['CLASS'] = 'NZ13' - elif file_header['CLASS'][:5] == 'FHWA ': - file_header['CLASS'] = 'FHWA13' - elif file_header['CLASS'] == 'CAT-Cycle_dist-empat': - file_header['CLASS'] = 'SPCH-MD 5C' + elif line.startswith("MetroCount"): + file_header["FORMAT"] = "MC" + elif line.startswith("Place"): + file_header["SITE"] = line[line.find("[") + 1 : line.find("]")].replace( + "-", "" + ) + elif ( + line.startswith("20") + and file_header["FORMAT"] == "MC" + and "STARTREC" not in file_header + ): + file_header["STARTREC"] = tz.localize( + datetime.strptime(line[:19], "%Y-%m-%d %H:%M:%S") + ) + elif line.startswith("20") and file_header["FORMAT"] == "MC": + file_header["STOPREC"] = tz.localize( + datetime.strptime(line[:19], "%Y-%m-%d %H:%M:%S") + ) + elif line.startswith("Type de Cat") and file_header["FORMAT"] == "MC": + file_header["CLASS"] = line[line.find("(") + 1 : line.find(")")] + if file_header["CLASS"] == "Euro13": + file_header["CLASS"] = "EUR13" + elif file_header["CLASS"] == "NZTA2011": + file_header["CLASS"] = "NZ13" + elif file_header["CLASS"][:5] == "FHWA ": + file_header["CLASS"] = "FHWA13" + elif file_header["CLASS"] == "CAT-Cycle_dist-empat": + file_header["CLASS"] = "SPCH-MD 5C" return file_header -def _parse_data_header(file_path): +def _parse_data_header(file_path: str) -> List: data_header = [] with open(file_path, encoding=get_file_encoding(file_path)) as f: for line in f: - if line.startswith('* HEAD '): + if line.startswith("* HEAD "): start_char = 20 i = 0 while True: - if not line[start_char:start_char + 4] == '': + if not line[start_char : start_char + 4] == "": i += 1 start_char += 5 else: @@ -338,12 +360,10 @@ def _parse_data_header(file_path): return data_header -def _populate_lane_dict(count): +def _populate_lane_dict(count: models.Count) -> Dict[int, int]: # e.g. lanes = {1: 435, 2: 436} - lanes = models.Lane.objects.filter( - id_installation__count=count - ).order_by("number") + lanes = models.Lane.objects.filter(id_installation__count=count).order_by("number") return {x.number: x.id for x in lanes} @@ -352,9 +372,9 @@ def _populate_direction_dict(count): # e.g. lanes = {1: 406, 2: 408} # It will return only one lane per direction - directions = models.Lane.objects.filter( - id_installation__count=count - ).order_by("-number") + directions = models.Lane.objects.filter(id_installation__count=count).order_by( + "-number" + ) return {x.direction: x.id for x in directions} @@ -374,7 +394,7 @@ def _populate_category_dict(count): # e.g. categories = {0: 922, 1: 22, 2: 23, 3: 24, 4: 25, 5: 26, 6: 27, 7: 28, 8: 29, 9: 30, 10: 31} categories = models.Category.objects.filter( classcategory__id_class__name=class_name - ).order_by('code') + ).order_by("code") return {x.code: x.id for x in categories} @@ -394,14 +414,14 @@ def get_file_format(file_path): def get_file_encoding(file_path): """Guess the right file encoding.""" - with open(file_path, 'r', encoding='utf-8') as f: + with open(file_path, "r", encoding="utf-8") as f: try: for line in f: pass except UnicodeDecodeError: - return 'ISO-8859-1' + return "ISO-8859-1" else: - return 'utf-8' + return "utf-8" def get_line_count(file_path): @@ -412,9 +432,9 @@ def get_line_count(file_path): def get_intspec(file_header): intspec = [] - for i, code in enumerate(file_header['INTSPEC'].split('+')): - if code.strip() not in ['SPD', 'SDS', 'LEN', 'CLS', 'CNT', 'DRN']: - raise NotImplementedError('{}'.format(code.strip())) + for i, code in enumerate(file_header["INTSPEC"].split("+")): + if code.strip() not in ["SPD", "SDS", "LEN", "CLS", "CNT", "DRN"]: + raise NotImplementedError("{}".format(code.strip())) # the key corrpespond to the value in the data row intspec.append(code.strip()) return intspec @@ -426,12 +446,17 @@ def guess_count(file_path): header = _parse_file_header(file_path) result = models.Count.objects.filter( - Q(id_installation__name=header['SITE']) | Q(id_installation__alias=header['SITE'])) + Q(id_installation__name=header["SITE"]) + | Q(id_installation__alias=header["SITE"]) + ) result = result.filter( id_installation__active=True, - id_class__name=header['CLASS'], - start_service_date__lte=header['STARTREC'], - end_service_date__gte=header['STOPREC'] - timedelta(seconds=1), # To manage datetimes like 01.01.2022 00.00 that should be equal to 31.12.2021 + id_class__name=header["CLASS"], + start_service_date__lte=header["STARTREC"], + end_service_date__gte=header["STOPREC"] + - timedelta( + seconds=1 + ), # To manage datetimes like 01.01.2022 00.00 that should be equal to 31.12.2021 ) if len(result) > 0: diff --git a/comptages/core/importer_task.py b/comptages/core/importer_task.py index cf00f6fd..58c1b511 100644 --- a/comptages/core/importer_task.py +++ b/comptages/core/importer_task.py @@ -1,40 +1,46 @@ import os from datetime import datetime +from typing import Any from qgis.core import QgsTask, Qgis, QgsMessageLog +from comptages.datamodel import models from comptages.core import importer class ImporterTask(QgsTask): - - def __init__(self, file_path, count): + def __init__(self, file_path: str, count: models.Count): self.basename = os.path.basename(file_path) - super().__init__( - 'Importation fichier {}'.format(self.basename)) + super().__init__("Importation fichier {}".format(self.basename)) self.file_path = file_path self.count = count def run(self): try: - importer.import_file(self.file_path, self.count, callback_progress=self.setProgress) + importer.import_file( + self.file_path, self.count, callback_progress=self.setProgress + ) return True except Exception as e: self.exception = e raise e # return False - def finished(self, result): + def finished(self, result: Any): if result: QgsMessageLog.logMessage( - '{} - Import file {} ended'.format( - datetime.now(), self.basename), - 'Comptages', Qgis.Info) + "{} - Import file {} ended".format(datetime.now(), self.basename), + "Comptages", + Qgis.Info, + ) else: QgsMessageLog.logMessage( - '{} - Import file {} ended with errors: {}'.format( - datetime.now(), self.basename, self.exception), - 'Comptages', Qgis.Info) + "{} - Import file {} ended with errors: {}".format( + datetime.now(), self.basename, self.exception + ), + "Comptages", + Qgis.Info, + ) diff --git a/comptages/core/layers.py b/comptages/core/layers.py index b5ecc4dd..90eaf26b 100644 --- a/comptages/core/layers.py +++ b/comptages/core/layers.py @@ -1,12 +1,18 @@ import os +from typing import Union -from qgis.PyQt.QtCore import QObject, QVariant -from qgis.PyQt.QtSql import QSqlDatabase, QSqlQuery +from qgis.PyQt.QtCore import QObject +from qgis.PyQt.QtSql import QSqlQuery from qgis.core import ( - QgsProject, QgsEditorWidgetSetup, QgsVectorLayer, - QgsCoordinateReferenceSystem, QgsDataSourceUri, - QgsAction, QgsFeatureRequest, QgsExpressionContextUtils, - QgsField, QgsVectorLayerJoinInfo) + QgsProject, + QgsEditorWidgetSetup, + QgsVectorLayer, + QgsCoordinateReferenceSystem, + QgsDataSourceUri, + QgsAction, + QgsFeatureRequest, + QgsExpressionContextUtils, +) from qgis.utils import iface from comptages.core.definitions import LAYER_DEFINITIONS @@ -30,35 +36,34 @@ def __init__(self): def load_layers(self): settings = Settings() - group_comptages = QgsProject.instance().layerTreeRoot().findGroup( - 'Comptages') - group_extra = QgsProject.instance().layerTreeRoot().findGroup( - 'Extra') + group_comptages = QgsProject.instance().layerTreeRoot().findGroup("Comptages") + group_extra = QgsProject.instance().layerTreeRoot().findGroup("Extra") if group_comptages is None: - group_comptages = QgsProject.instance().layerTreeRoot().addGroup( - 'Comptages') + group_comptages = ( + QgsProject.instance().layerTreeRoot().addGroup("Comptages") + ) if group_extra is None and settings.value("extra_layers"): - group_extra = group_comptages.addGroup('Extra') + group_extra = group_comptages.addGroup("Extra") for key in LAYER_DEFINITIONS: layer_definition = LAYER_DEFINITIONS[key] if not QgsProject.instance().mapLayersByName( - layer_definition['display_name']): - + layer_definition["display_name"] + ): layer = self.load_layer( - 'comptages', # Schema - layer_definition['table'], - layer_definition['geometry'], - layer_definition['sql'], - layer_definition['display_name'], - layer_definition['id'], - layer_definition['epsg'], + "comptages", # Schema + layer_definition["table"], + layer_definition["geometry"], + layer_definition["sql"], + layer_definition["display_name"], + layer_definition["id"], + layer_definition["epsg"], ) - if layer_definition['legend']: + if layer_definition["legend"]: group_comptages.addLayer(layer) elif settings.value("extra_layers"): group_extra.addLayer(layer) @@ -70,21 +75,24 @@ def load_layers(self): self.create_virtual_fields() self.create_joins() self.create_relations() - iface.setActiveLayer(self.layers['section']) + iface.setActiveLayer(self.layers["section"]) self.populate_list_of_highlighted_sections() - self.layers['count'].featureAdded.connect(self.on_count_added) + self.layers["count"].featureAdded.connect(self.on_count_added) from qgis.core import QgsExpressionContextUtils + QgsExpressionContextUtils.setProjectVariable( - QgsProject.instance(), 'highlighted_installation', '') + QgsProject.instance(), "highlighted_installation", "" + ) def apply_qml_styles(self): for key in LAYER_DEFINITIONS: current_dir = os.path.dirname(os.path.abspath(__file__)) qml_file_path = os.path.join( - current_dir, os.pardir, 'qml', '{}.qml'.format(key)) + current_dir, os.pardir, "qml", "{}.qml".format(key) + ) self.layers[key].loadNamedStyle(qml_file_path) def create_virtual_fields(self): @@ -104,141 +112,166 @@ def create_relations(self): # QgsProject.instance().relationManager().addRelation(rel) widget = QgsEditorWidgetSetup( - 'ValueRelation', + "ValueRelation", { - 'AllowMulti': False, - 'AllowNull': False, - 'FilterExpression': '', - 'Key': 'id', - 'Layer': self.layers['installation'].id(), - 'OrderByValue': False, - 'UseCompleter': False, - 'Value': 'name' - } - ) - data_provider = self.layers['count'].dataProvider() - index = data_provider.fieldNameIndex('id_installation') - self.layers['count'].setEditorWidgetSetup(index, widget) - - classes_1 = "("+",".join("'"+str(i)+"'" for i in self.get_classes_by_sensor_type(1))+")" - classes_2 = "("+",".join("'"+str(i)+"'" for i in self.get_classes_by_sensor_type(2))+")" - classes_3 = "("+",".join("'"+str(i)+"'" for i in self.get_classes_by_sensor_type(3))+")" + "AllowMulti": False, + "AllowNull": False, + "FilterExpression": "", + "Key": "id", + "Layer": self.layers["installation"].id(), + "OrderByValue": False, + "UseCompleter": False, + "Value": "name", + }, + ) + data_provider = self.layers["count"].dataProvider() + index = data_provider.fieldNameIndex("id_installation") + self.layers["count"].setEditorWidgetSetup(index, widget) + + classes_1 = ( + "(" + + ",".join("'" + str(i) + "'" for i in self.get_classes_by_sensor_type(1)) + + ")" + ) + classes_2 = ( + "(" + + ",".join("'" + str(i) + "'" for i in self.get_classes_by_sensor_type(2)) + + ")" + ) + classes_3 = ( + "(" + + ",".join("'" + str(i) + "'" for i in self.get_classes_by_sensor_type(3)) + + ")" + ) widget = QgsEditorWidgetSetup( - 'ValueRelation', + "ValueRelation", { - 'AllowMulti': False, - 'AllowNull': True, - 'FilterExpression': - f""" + "AllowMulti": False, + "AllowNull": True, + "FilterExpression": f""" CASE WHEN current_value('id_sensor_type') = 1 THEN \"id\" IN {classes_1} WHEN current_value('id_sensor_type') = 2 THEN \"id\" IN {classes_2} WHEN current_value('id_sensor_type') = 3 THEN \"id\" IN {classes_3} ELSE \"id\" END""", - 'Key': 'id', - 'Layer': self.layers['class'].id(), - 'OrderByValue': False, - 'UseCompleter': False, - 'Value': 'name' - } + "Key": "id", + "Layer": self.layers["class"].id(), + "OrderByValue": False, + "UseCompleter": False, + "Value": "name", + }, ) - data_provider = self.layers['count'].dataProvider() - index = data_provider.fieldNameIndex('id_class') - self.layers['count'].setEditorWidgetSetup(index, widget) + data_provider = self.layers["count"].dataProvider() + index = data_provider.fieldNameIndex("id_class") + self.layers["count"].setEditorWidgetSetup(index, widget) widget = QgsEditorWidgetSetup( - 'ValueRelation', + "ValueRelation", { - 'AllowMulti': False, - 'AllowNull': False, - 'FilterExpression': '', - 'Key': 'id', - 'Layer': self.layers['sensor_type'].id(), - 'OrderByValue': False, - 'UseCompleter': False, - 'Value': 'name' - } - ) - data_provider = self.layers['count'].dataProvider() - index = data_provider.fieldNameIndex('id_sensor_type') - self.layers['count'].setEditorWidgetSetup(index, widget) + "AllowMulti": False, + "AllowNull": False, + "FilterExpression": "", + "Key": "id", + "Layer": self.layers["sensor_type"].id(), + "OrderByValue": False, + "UseCompleter": False, + "Value": "name", + }, + ) + data_provider = self.layers["count"].dataProvider() + index = data_provider.fieldNameIndex("id_sensor_type") + self.layers["count"].setEditorWidgetSetup(index, widget) widget = QgsEditorWidgetSetup( - 'ValueRelation', + "ValueRelation", { - 'AllowMulti': False, - 'AllowNull': False, - 'FilterExpression': '', - 'Key': 'id', - 'Layer': self.layers['device'].id(), - 'OrderByValue': False, - 'UseCompleter': False, - 'Value': 'name' - } - ) - data_provider = self.layers['count'].dataProvider() - index = data_provider.fieldNameIndex('id_device') - self.layers['count'].setEditorWidgetSetup(index, widget) - - models_1 = "("+",".join("'"+str(i)+"'" for i in self.get_models_by_sensor_type(1))+")" - models_2 = "("+",".join("'"+str(i)+"'" for i in self.get_models_by_sensor_type(2))+")" - models_3 = "("+",".join("'"+str(i)+"'" for i in self.get_models_by_sensor_type(3))+")" + "AllowMulti": False, + "AllowNull": False, + "FilterExpression": "", + "Key": "id", + "Layer": self.layers["device"].id(), + "OrderByValue": False, + "UseCompleter": False, + "Value": "name", + }, + ) + data_provider = self.layers["count"].dataProvider() + index = data_provider.fieldNameIndex("id_device") + self.layers["count"].setEditorWidgetSetup(index, widget) + + models_1 = ( + "(" + + ",".join("'" + str(i) + "'" for i in self.get_models_by_sensor_type(1)) + + ")" + ) + models_2 = ( + "(" + + ",".join("'" + str(i) + "'" for i in self.get_models_by_sensor_type(2)) + + ")" + ) + models_3 = ( + "(" + + ",".join("'" + str(i) + "'" for i in self.get_models_by_sensor_type(3)) + + ")" + ) widget = QgsEditorWidgetSetup( - 'ValueRelation', + "ValueRelation", { - 'AllowMulti': False, - 'AllowNull': False, - 'FilterExpression': - f""" + "AllowMulti": False, + "AllowNull": False, + "FilterExpression": f""" CASE WHEN current_value('id_sensor_type') = 1 THEN \"id\" IN {models_1} WHEN current_value('id_sensor_type') = 2 THEN \"id\" IN {models_2} WHEN current_value('id_sensor_type') = 3 THEN \"id\" IN {models_3} ELSE \"id\" END""", - 'Key': 'id', - 'Layer': self.layers['model'].id(), - 'OrderByValue': False, - 'UseCompleter': False, - 'Value': 'name' - } + "Key": "id", + "Layer": self.layers["model"].id(), + "OrderByValue": False, + "UseCompleter": False, + "Value": "name", + }, ) - data_provider = self.layers['count'].dataProvider() - index = data_provider.fieldNameIndex('id_model') - self.layers['count'].setEditorWidgetSetup(index, widget) + data_provider = self.layers["count"].dataProvider() + index = data_provider.fieldNameIndex("id_model") + self.layers["count"].setEditorWidgetSetup(index, widget) model_objs = models.Model.objects.all() filter_expression = "CASE " for model in model_objs: - devices = list(models.Device.objects.filter(id_model__id = model.id).values_list('id', flat=True)) + devices = list( + models.Device.objects.filter(id_model__id=model.id).values_list( + "id", flat=True + ) + ) if not devices: devices = " " devices_exp = "(" + ",".join("'" + str(i) + "'" for i in devices) + ")" filter_expression += f" WHEN current_value('id_model') = {model.id} THEN \"id\" IN {devices_exp} " - filter_expression += " ELSE \"id\" END" + filter_expression += ' ELSE "id" END' widget = QgsEditorWidgetSetup( - 'ValueRelation', + "ValueRelation", { - 'AllowMulti': False, - 'AllowNull': False, - 'FilterExpression': filter_expression, - 'Key': 'id', - 'Layer': self.layers['device'].id(), - 'OrderByValue': False, - 'UseCompleter': False, - 'Value': 'name' - } - ) - data_provider = self.layers['count'].dataProvider() - index = data_provider.fieldNameIndex('id_device') - self.layers['count'].setEditorWidgetSetup(index, widget) + "AllowMulti": False, + "AllowNull": False, + "FilterExpression": filter_expression, + "Key": "id", + "Layer": self.layers["device"].id(), + "OrderByValue": False, + "UseCompleter": False, + "Value": "name", + }, + ) + data_provider = self.layers["count"].dataProvider() + index = data_provider.fieldNameIndex("id_device") + self.layers["count"].setEditorWidgetSetup(index, widget) def load_layer( - self, schema, layer_name, geometry, sql, display_name, id_col='', - epsg=None): - + self, schema, layer_name, geometry, sql, display_name, id_col="", epsg=None + ): settings = Settings() uri = QgsDataSourceUri() uri.setConnection( @@ -246,7 +279,7 @@ def load_layer( str(settings.value("db_port")), settings.value("db_name"), settings.value("db_username"), - settings.value("db_password") + settings.value("db_password"), ) uri.setDataSource(schema, layer_name, geometry, sql, id_col) @@ -260,66 +293,77 @@ def load_layer( return layer def add_layer_actions(self): - action_manager = self.layers['count'].actions() + action_manager = self.layers["count"].actions() action_manager.clearActions() action = QgsAction( QgsAction.GenericPython, - 'Exporter la configuration', - ("from qgis.utils import plugins\n" - "plugins['comptages'].do_export_configuration_action([% attribute( $currentfeature, 'id' ) %])") + "Exporter la configuration", + ( + "from qgis.utils import plugins\n" + "plugins['comptages'].do_export_configuration_action([% attribute( $currentfeature, 'id' ) %])" + ), ) - action.setActionScopes(['Feature']) + action.setActionScopes(["Feature"]) action_manager.addAction(action) action = QgsAction( QgsAction.GenericPython, - 'Importation', - ("from qgis.utils import plugins\n" - "plugins['comptages'].do_import_single_file_action([% attribute( $currentfeature, 'id' ) %])") + "Importation", + ( + "from qgis.utils import plugins\n" + "plugins['comptages'].do_import_single_file_action([% attribute( $currentfeature, 'id' ) %])" + ), ) - action.setActionScopes(['Feature']) + action.setActionScopes(["Feature"]) action_manager.addAction(action) action = QgsAction( QgsAction.GenericPython, - 'Creer un rapport', - ("from qgis.utils import plugins\n" - "plugins['comptages'].do_generate_report_action([% attribute( $currentfeature, 'id' ) %])") + "Creer un rapport", + ( + "from qgis.utils import plugins\n" + "plugins['comptages'].do_generate_report_action([% attribute( $currentfeature, 'id' ) %])" + ), ) - action.setActionScopes(['Feature']) + action.setActionScopes(["Feature"]) action_manager.addAction(action) action = QgsAction( QgsAction.GenericPython, - 'Creer un plan', - ("from qgis.utils import plugins\n" - "plugins['comptages'].do_export_plan_action([% attribute( $currentfeature, 'id' ) %])") + "Creer un plan", + ( + "from qgis.utils import plugins\n" + "plugins['comptages'].do_export_plan_action([% attribute( $currentfeature, 'id' ) %])" + ), ) - action.setActionScopes(['Feature']) + action.setActionScopes(["Feature"]) action_manager.addAction(action) action = QgsAction( QgsAction.GenericPython, - 'Générer les graphiques', - ("from qgis.utils import plugins\n" - "plugins['comptages'].do_generate_chart_action([% attribute( $currentfeature, 'id' ) %])") + "Générer les graphiques", + ( + "from qgis.utils import plugins\n" + "plugins['comptages'].do_generate_chart_action([% attribute( $currentfeature, 'id' ) %])" + ), ) - action.setActionScopes(['Feature']) + action.setActionScopes(["Feature"]) action_manager.addAction(action) action = QgsAction( QgsAction.GenericPython, - 'Effacer des données', - ("from qgis.utils import plugins\n" - "plugins['comptages'].do_delete_data_action([% attribute( $currentfeature, 'id' ) %])") + "Effacer des données", + ( + "from qgis.utils import plugins\n" + "plugins['comptages'].do_delete_data_action([% attribute( $currentfeature, 'id' ) %])" + ), ) - action.setActionScopes(['Feature']) + action.setActionScopes(["Feature"]) action_manager.addAction(action) def create_count(self): - - layer = self.layers['section'] + layer = self.layers["section"] selected_count = layer.selectedFeatureCount() if selected_count == 0: @@ -331,18 +375,19 @@ def create_count(self): else: selected_feature = next(layer.getSelectedFeatures()) - lanes = self.get_lanes_of_section(selected_feature.attribute('id')) - installation = self.get_installation_of_lane( - next(lanes).attribute('id')) + lanes = self.get_lanes_of_section(selected_feature.attribute("id")) + installation = self.get_installation_of_lane(next(lanes).attribute("id")) # Save the id of the installation related to the selected section # so we can use in the count form to automatically select the # installation in the combobox QgsExpressionContextUtils.setProjectVariable( QgsProject.instance(), - 'selected_installation', installation.attribute('id')) - self.layers['count'].startEditing() - iface.setActiveLayer(self.layers['count']) + "selected_installation", + installation.attribute("id"), + ) + self.layers["count"].startEditing() + iface.setActiveLayer(self.layers["count"]) iface.actionAddFeature().trigger() def on_count_added(self): @@ -350,13 +395,13 @@ def on_count_added(self): Refresh the map""" self.populate_list_of_highlighted_sections() - self.layers['section'].triggerRepaint() + self.layers["section"].triggerRepaint() def edit_count(self): """Open attribute table of count filtered with only the features related to the selected section""" - layer = self.layers['section'] + layer = self.layers["section"] selected_count = layer.selectedFeatureCount() if selected_count == 0: @@ -367,11 +412,10 @@ def edit_count(self): return else: selected_feature = next(layer.getSelectedFeatures()) - counts = self.get_counts_of_section( - selected_feature.attribute('id')) + counts = self.get_counts_of_section(selected_feature.attribute("id")) ids = [] for c in counts: - ids.append(c.attribute('id')) + ids.append(c.attribute("id")) self.open_count_attribute_table_and_filter(ids) def open_count_attribute_table_and_filter(self, count_ids): @@ -381,18 +425,16 @@ def open_count_attribute_table_and_filter(self, count_ids): return iface.showAttributeTable( - self.layers['count'], - '"id" in ({})'.format(", ".join(map(str, count_ids)))) + self.layers["count"], '"id" in ({})'.format(", ".join(map(str, count_ids))) + ) def get_counts_of_section(self, section_id): """Return a list of all count features related with the passed section""" try: lanes = self.get_lanes_of_section(section_id) - installation = self.get_installation_of_lane( - next(lanes).attribute('id')) - counts = self.get_counts_of_installation( - installation.attribute('id')) + installation = self.get_installation_of_lane(next(lanes).attribute("id")) + counts = self.get_counts_of_installation(installation.attribute("id")) except StopIteration: return [] @@ -404,10 +446,8 @@ def get_counts_of_section_by_year(self, section_id, year): try: lanes = self.get_lanes_of_section(section_id) - installation = self.get_installation_of_lane( - next(lanes).attribute('id')) - counts = self.get_counts_of_installation( - installation.attribute('id')) + installation = self.get_installation_of_lane(next(lanes).attribute("id")) + counts = self.get_counts_of_installation(installation.attribute("id")) except StopIteration: return [] @@ -417,19 +457,19 @@ def get_counts_of_section_by_year(self, section_id, year): def get_lanes_of_section(self, section_id): """Return a list of the lane features of the passed section""" request = QgsFeatureRequest().setFilterExpression( - '"id_section" = \'{}\''.format(section_id) + "\"id_section\" = '{}'".format(section_id) ) - return self.layers['lane'].getFeatures(request) + return self.layers["lane"].getFeatures(request) def get_installation_of_lane(self, lane_id): """Return the installation feature of the passes lane""" - lane = next(self.layers['lane'].getFeatures( - '"id"={}'.format(lane_id))) - installation_id = lane.attribute('id_installation') + lane = next(self.layers["lane"].getFeatures('"id"={}'.format(lane_id))) + installation_id = lane.attribute("id_installation") - return next(self.layers['installation'].getFeatures( - '"id"={}'.format(installation_id))) + return next( + self.layers["installation"].getFeatures('"id"={}'.format(installation_id)) + ) def get_counts_of_installation(self, installation_id): """Return a list of count features related with the passsed @@ -439,7 +479,7 @@ def get_counts_of_installation(self, installation_id): '"id_installation" = {}'.format(installation_id) ) - return self.layers['count'].getFeatures(request) + return self.layers["count"].getFeatures(request) def is_section_highlighted(self, section_id): """Return if the passed section has related counts with the current @@ -450,8 +490,15 @@ def is_section_highlighted(self, section_id): return False def populate_list_of_highlighted_sections( - self, start_date=None, end_date=None, permanent=None, - sensor_type_id=None, tjm=None, axe=None, sector=None): + self, + start_date=None, + end_date=None, + permanent=None, + sensor_type_id=None, + tjm=None, + axe=None, + sector=None, + ): """Return a list of highlighted sections. Directly on the db for performances""" @@ -462,8 +509,7 @@ def populate_list_of_highlighted_sections( wheres = [] if start_date: - wheres.append( - "c.start_process_date >= '{}'::date".format(start_date)) + wheres.append("c.start_process_date >= '{}'::date".format(start_date)) if end_date: wheres.append("c.end_process_date <= '{}'::date".format(end_date)) if permanent is not None: @@ -479,26 +525,31 @@ def populate_list_of_highlighted_sections( wheres.append("s.owner = '{}' and s.road = '{}'".format(axe[0], axe[1])) if sector: - wheres.append("ST_Intersects(s.geometry, sec.geometry) and sec.id = {}".format(sector)) + wheres.append( + "ST_Intersects(s.geometry, sec.geometry) and sec.id = {}".format(sector) + ) - where_str = '' + where_str = "" if wheres: where_str = "where " + " and ".join(wheres) - query_str = ("select distinct l.id_section from comptages.lane as l " - "inner join comptages.installation as i on " - "(l.id_installation = i.id) inner join " - "comptages.count as c on (i.id = c.id_installation) " - "inner join comptages.section as s on" - "(l.id_section = s.id), comptages.sector as sec " - "{};".format(where_str)) + query_str = ( + "select distinct l.id_section from comptages.lane as l " + "inner join comptages.installation as i on " + "(l.id_installation = i.id) inner join " + "comptages.count as c on (i.id = c.id_installation) " + "inner join comptages.section as s on" + "(l.id_section = s.id), comptages.sector as sec " + "{};".format(where_str) + ) query.exec_(query_str) while query.next(): self.highlighted_sections.append(str(query.value(0)).strip()) def apply_filter( - self, start_date, end_date, installation_choice, sensor_choice, tjm, axe, sector): + self, start_date, end_date, installation_choice, sensor_choice, tjm, axe, sector + ): if installation_choice == 0: permanent = None elif installation_choice == 1: @@ -509,13 +560,14 @@ def apply_filter( if sensor_choice == 0: sensor_type_id = None elif sensor_choice == 1: - sensor_type_id = self.get_sensor_type_id('Boucle') + sensor_type_id = self.get_sensor_type_id("Boucle") elif sensor_choice == 2: - sensor_type_id = self.get_sensor_type_id('Tube') + sensor_type_id = self.get_sensor_type_id("Tube") self.populate_list_of_highlighted_sections( - start_date, end_date, permanent, sensor_type_id, tjm, axe, sector) - self.layers['section'].triggerRepaint() + start_date, end_date, permanent, sensor_type_id, tjm, axe, sector + ) + self.layers["section"].triggerRepaint() def is_connected(self): """Return if the plugin is connected to the database""" @@ -532,27 +584,26 @@ def close_db_connection(self): self.db.close() self.db = None - def get_installation_name_of_count(self, count_id): - return self.get_installation_of_count(count_id).attribute('name') + def get_installation_name_of_count(self, count_id: str): + return self.get_installation_of_count(count_id).attribute("name") def get_installation_of_count(self, count_id): count = self.get_count(count_id) - installation = self.get_installation( - count.attribute('id_installation')) + installation = self.get_installation(count.attribute("id_installation")) return installation - def get_sections_of_count(self, count_id): + def get_sections_of_count(self, count_id: str): """Return the sections related to a count""" count = self.get_count(count_id) - installation_id = count.attribute('id_installation') + installation_id = count.attribute("id_installation") lanes = self.get_lanes_of_installation(installation_id) # Get only distinct section ids section_ids = set() for lane in lanes: - section_ids.add(lane.attribute('id_section')) + section_ids.add(lane.attribute("id_section")) sections = [] for section_id in section_ids: @@ -564,19 +615,18 @@ def get_section_ids_of_count(self, count_id): """Return the section ids related to a count""" count = self.get_count(count_id) - installation_id = count.attribute('id_installation') + installation_id = count.attribute("id_installation") lanes = self.get_lanes_of_installation(installation_id) # Get only distinct section ids section_ids = set() for lane in lanes: - section_ids.add(lane.attribute('id_section')) + section_ids.add(lane.attribute("id_section")) return list(section_ids) def get_sections_with_data_of_count(self, count_id, status): - self.init_db_connection() query = QSqlQuery(self.db) @@ -593,7 +643,8 @@ def get_sections_with_data_of_count(self, count_id, status): "join comptages.lane as lan " "on lan.id = cou.id_lane " "where cou.id_count = {0} " - "and import_status = {1} ".format(count_id, status)) + "and import_status = {1} ".format(count_id, status) + ) result = [] query.exec_(query_str) @@ -601,104 +652,97 @@ def get_sections_with_data_of_count(self, count_id, status): result.append(query.value(0)) return result - def get_count(self, count_id): + def get_count(self, count_id: str): """Return the count feature""" - request = QgsFeatureRequest().setFilterExpression( - '"id" = {}'.format(count_id) - ) - - return next(self.layers['count'].getFeatures(request)) + request = QgsFeatureRequest().setFilterExpression('"id" = {}'.format(count_id)) - def get_installation(self, installation_id): + return next(self.layers["count"].getFeatures(request)) + def get_installation(self, installation_id: str): request = QgsFeatureRequest().setFilterExpression( '"id" = {}'.format(installation_id) ) - return next(self.layers['installation'].getFeatures(request)) - - def get_lanes_of_installation(self, installation_id): + return next(self.layers["installation"].getFeatures(request)) + def get_lanes_of_installation(self, installation_id: str): request = QgsFeatureRequest().setFilterExpression( '"id_installation" = {}'.format(installation_id) ) - return self.layers['lane'].getFeatures(request) + return self.layers["lane"].getFeatures(request) def get_section(self, section_id): request = QgsFeatureRequest().setFilterExpression( '"id" = {}'.format(section_id) ) - return next(self.layers['section'].getFeatures(request)) - + return next(self.layers["section"].getFeatures(request)) - def is_data_aggregate(self, count_id): + def is_data_aggregate(self, count_id: str): self.init_db_connection() query = QSqlQuery(self.db) query_str = ( "select id from comptages.count_aggregate " - "where id_count = {}".format(count_id)) + "where id_count = {}".format(count_id) + ) query.exec_(query_str) if query.next(): return True return False - def is_data_detail(self, count_id): + def is_data_detail(self, count_id: str): self.init_db_connection() query = QSqlQuery(self.db) query_str = ( "select id from comptages.count_detail " - "where id_count = {}".format(count_id)) + "where id_count = {}".format(count_id) + ) query.exec_(query_str) if query.next(): return True return False - def select_and_zoom_on_section_of_count(self, count_id): + def select_and_zoom_on_section_of_count(self, count_id: str): sections = self.get_sections_of_count(count_id) - layer = self.layers['section'] + layer = self.layers["section"] layer.selectByIds([x.id() for x in sections]) iface.setActiveLayer(layer) iface.actionZoomToSelected().trigger() - def get_predefined_config_from_count(self, count_id): count = self.get_count(count_id) - model = self.get_model(count.attribute('id_model')) + model = self.get_model(count.attribute("id_model")) - return model.attribute('configuration') + return model.attribute("configuration") def get_model(self, model_id): - request = QgsFeatureRequest().setFilterExpression( - '"id" = {}'.format(model_id)) + request = QgsFeatureRequest().setFilterExpression('"id" = {}'.format(model_id)) - return next(self.layers['model'].getFeatures(request)) + return next(self.layers["model"].getFeatures(request)) def get_class_name_of_count(self, count_id): count = self.get_count(count_id) - clazz = self.get_class(count.attribute('id_class')) - - return clazz.attribute('name') + clazz = self.get_class(count.attribute("id_class")) - def get_class(self, class_id): - request = QgsFeatureRequest().setFilterExpression( - '"id" = {}'.format(class_id)) + return clazz.attribute("name") - return next(self.layers['class'].getFeatures(request)) + def get_class(self, class_id: str): + request = QgsFeatureRequest().setFilterExpression('"id" = {}'.format(class_id)) - def get_lanes_of_count(self, count_id): + return next(self.layers["class"].getFeatures(request)) + def get_lanes_of_count(self, count_id: str): return self.get_lanes_of_installation( - self.get_installation_of_count(count_id).attribute('id')) - - def get_lanes_dict(self, count_id): + self.get_installation_of_count(count_id).attribute("id") + ) + def get_lanes_dict(self, count_id: str): # Cached values if count_id in self.lanes_cache: return self.lanes_cache[count_id] @@ -707,62 +751,62 @@ def get_lanes_dict(self, count_id): result = dict() for lane in lanes: - result[lane.attribute('number')] = lane.attribute('id') + result[lane.attribute("number")] = lane.attribute("id") self.lanes_cache[count_id] = result return result def invalidate_lanes_cache(self): - """ To be called after an import is finished""" + """To be called after an import is finished""" self.lanes_cache = dict() - def get_sensor_type_of_count(self, count_id): - sensor_type_id = self.get_count(count_id).attribute('id_sensor_type') + def get_sensor_type_of_count(self, count_id: str): + sensor_type_id = self.get_count(count_id).attribute("id_sensor_type") return self.get_sensor_type(sensor_type_id) - def get_sensor_type(self, sensor_type_id): + def get_sensor_type(self, sensor_type_id: str): """Return the sensor_type feature""" request = QgsFeatureRequest().setFilterExpression( '"id" = {}'.format(sensor_type_id) ) - return next(self.layers['sensor_type'].getFeatures(request)) + return next(self.layers["sensor_type"].getFeatures(request)) - def get_sensor_type_id(self, sensor_type): + def get_sensor_type_id(self, sensor_type: str): request = QgsFeatureRequest().setFilterExpression( - '"name" = \'{}\''.format(sensor_type) + "\"name\" = '{}'".format(sensor_type) ) - return next(self.layers['sensor_type'].getFeatures( - request)).attribute('id') + return next(self.layers["sensor_type"].getFeatures(request)).attribute("id") def write_special_period( - self, start_date, end_date, description, entity, influence): + self, start_date, end_date, description, entity, influence + ): """Insert into special_period only if it is not altready present""" self.init_db_connection() query = QSqlQuery(self.db) - query_str = ("INSERT INTO comptages.special_period " - "(start_date, end_date, description, entity, influence) " - "SELECT '{0}', '{1}', '{2}', '{3}', '{4}' " - "WHERE NOT EXISTS ( " - "SELECT id FROM comptages.special_period WHERE " - "start_date = '{0}' AND end_date = '{1}' AND " - "description = '{2}' AND entity = '{3}' AND " - "influence = '{4}');".format( - start_date, - end_date, - description, - entity, - influence)) + query_str = ( + "INSERT INTO comptages.special_period " + "(start_date, end_date, description, entity, influence) " + "SELECT '{0}', '{1}', '{2}', '{3}', '{4}' " + "WHERE NOT EXISTS ( " + "SELECT id FROM comptages.special_period WHERE " + "start_date = '{0}' AND end_date = '{1}' AND " + "description = '{2}' AND entity = '{3}' AND " + "influence = '{4}');".format( + start_date, end_date, description, entity, influence + ) + ) query.exec_(query_str) def get_special_period(self, special_period_id): request = QgsFeatureRequest().setFilterExpression( - '"id" = {}'.format(special_period_id)) + '"id" = {}'.format(special_period_id) + ) - return next(self.layers['special_period'].getFeatures(request)) + return next(self.layers["special_period"].getFeatures(request)) def get_special_period_overlaps(self, start_date, end_date): """Return the ids of the special periods thats overlaps the @@ -775,8 +819,8 @@ def get_special_period_overlaps(self, start_date, end_date): "select * from comptages.special_period where " "('{0}' >= start_date AND '{0}' < end_date) OR " "('{1}' >= start_date AND '{1}' < end_date) OR " - "('{0}' < start_date AND '{1}' >= end_date); ".format( - start_date, end_date)) + "('{0}' < start_date AND '{1}' >= end_date); ".format(start_date, end_date) + ) result = [] query.exec_(query_str) @@ -790,29 +834,30 @@ def check_dates(self, start_date, end_date): during the count dates""" special_periods = self.get_special_period_overlaps( - start_date.toString('yyyy-MM-dd'), - end_date.toString('yyyy-MM-dd')) + start_date.toString("yyyy-MM-dd"), end_date.toString("yyyy-MM-dd") + ) result = [] for special_period_id in special_periods: special_period = self.get_special_period(special_period_id) - start = special_period.attribute( - 'start_date').toString('dd.MM.yyyy') - end = special_period.attribute( - 'end_date').addDays(-1).toString('dd.MM.yyyy') + start = special_period.attribute("start_date").toString("dd.MM.yyyy") + end = ( + special_period.attribute("end_date").addDays(-1).toString("dd.MM.yyyy") + ) if start == end: - result.append('{} ({})'.format( - special_period.attribute('description'), - start)) + result.append( + "{} ({})".format(special_period.attribute("description"), start) + ) else: - result.append('{} ({}-{})'.format( - special_period.attribute('description'), - start, - end)) - return '; '.join(result) + result.append( + "{} ({}-{})".format( + special_period.attribute("description"), start, end + ) + ) + return "; ".join(result) - def count_contains_data(self, count_id): + def count_contains_data(self, count_id: str): self.init_db_connection() query = QSqlQuery(self.db) @@ -820,22 +865,22 @@ def count_contains_data(self, count_id): "select id from comptages.count_aggregate where id_count = {0} " "and import_status = {1} " "union select id from comptages.count_detail where id_count = {0} " - "and import_status = {1}; ".format( - count_id, self.IMPORT_STATUS_DEFINITIVE)) + "and import_status = {1}; ".format(count_id, self.IMPORT_STATUS_DEFINITIVE) + ) query.exec_(query_str) if query.next(): return True return False - def get_type_of_aggregate_count(self, count_id, import_status): + def get_type_of_aggregate_count(self, count_id: str, import_status): self.init_db_connection() query = QSqlQuery(self.db) query_str = ( "select distinct(type) from comptages.count_aggregate where " - "id_count = {} and import_status = {};".format( - count_id, import_status)) + "id_count = {} and import_status = {};".format(count_id, import_status) + ) result = [] query.exec_(query_str) @@ -844,8 +889,8 @@ def get_type_of_aggregate_count(self, count_id, import_status): return result def get_characteristic_speeds( - self, count_id, hour, direction, start_timestamp, end_timestamp, - section_id): + self, count_id, hour, direction, start_timestamp, end_timestamp, section_id: str + ): self.init_db_connection() query = QSqlQuery(self.db) result = [] @@ -857,8 +902,9 @@ def get_characteristic_speeds( "and lan.id_section = '{}' " "and date_part('hour', det.timestamp) = {} " "and det.timestamp>='{}' and det.timestamp<'{}';".format( - count_id, direction, section_id, hour, start_timestamp, - end_timestamp)) + count_id, direction, section_id, hour, start_timestamp, end_timestamp + ) + ) query.exec_(query_str) if query.next(): @@ -884,14 +930,21 @@ def get_characteristic_speeds( "order by speed " "offset ({}-1) rows " "fetch next 1 rows only;".format( - count_id, direction, section_id, hour, start_timestamp, - end_timestamp, i)) + count_id, + direction, + section_id, + hour, + start_timestamp, + end_timestamp, + i, + ) + ) query.exec_(query_str) query.next() if query.value(0) and query.value(0) >= 1: result.append(query.value(0)) else: - result.append('NA') + result.append("NA") query_str = ( "select coalesce(avg(det.speed), 0) from " @@ -901,56 +954,59 @@ def get_characteristic_speeds( "and lan.id_section = '{}' " "and date_part('hour', det.timestamp) = {} " "and det.timestamp>='{}' and det.timestamp<'{}';".format( - count_id, direction, section_id, hour, start_timestamp, - end_timestamp)) + count_id, direction, section_id, hour, start_timestamp, end_timestamp + ) + ) query.exec_(query_str) query.next() if query.value(0) and query.value(0) >= 1: result.append(query.value(0)) else: - result.append('NA') + result.append("NA") return result - def get_formatter_name(self, model_name): + def get_formatter_name(self, model_name: str): request = QgsFeatureRequest().setFilterExpression( - '"name" = \'{}\''.format(model_name) + "\"name\" = '{}'".format(model_name) + ) + return next(self.layers["brand"].getFeatures(request)).attribute( + "formatter_name" ) - return next(self.layers['brand'].getFeatures( - request)).attribute('formatter_name') - def get_classes_of_section(self, section_id): + def get_classes_of_section(self, section_id: str): result = set() counts = self.get_counts_of_section(section_id) for count in counts: - result.add(self.get_class_name_of_count(count.attribute('id'))) + result.add(self.get_class_name_of_count(count.attribute("id"))) return result - def check_sensor_of_lane(self, lane_id): - """ Check id a lane is registered in the sensor table""" + def check_sensor_of_lane(self, lane_id: str): + """Check id a lane is registered in the sensor table""" self.init_db_connection() query = QSqlQuery(self.db) - query_str = ( - "select id from comptages.sensor where id_lane = {0};".format( - lane_id)) + query_str = "select id from comptages.sensor where id_lane = {0};".format( + lane_id + ) query.exec_(query_str) if query.next(): return True return False - def get_sensor_length(self, lane_id): - """ Get length of geometry in the sensor table""" + def get_sensor_length(self, lane_id: str): + """Get length of geometry in the sensor table""" self.init_db_connection() query = QSqlQuery(self.db) - query_str = ( - "select ST_LENGTH(geometry) from comptages.sensor where id_lane = {};".format(lane_id)) + query_str = "select ST_LENGTH(geometry) from comptages.sensor where id_lane = {};".format( + lane_id + ) query.exec_(query_str) @@ -958,19 +1014,16 @@ def get_sensor_length(self, lane_id): return query.value(0) return None - def get_models_by_sensor_type(self, sensor_type): - qs = models.SensorTypeModel.objects.filter( - id_sensor_type=sensor_type) + def get_models_by_sensor_type(self, sensor_type: models.SensorType): + qs = models.SensorTypeModel.objects.filter(id_sensor_type=sensor_type) result = [] for i in qs: result.append(i.id_model.id) return result - - def get_classes_by_sensor_type(self, sensor_type): - qs = models.SensorTypeClass.objects.filter( - id_sensor_type=sensor_type) + def get_classes_by_sensor_type(self, sensor_type: models.SensorType): + qs = models.SensorTypeClass.objects.filter(id_sensor_type=sensor_type) result = [] for i in qs: diff --git a/comptages/core/report.py b/comptages/core/report.py index 9b9657d0..3beb7880 100644 --- a/comptages/core/report.py +++ b/comptages/core/report.py @@ -11,33 +11,35 @@ def simple_print_callback(progress): print(f"Generating report... {progress}%") -def prepare_reports(file_path, count=None, year=None, template='default', section_id=None, callback_progress=simple_print_callback): +def prepare_reports( + file_path, + count=None, + year=None, + template="default", + section_id=None, + callback_progress=simple_print_callback, +): current_dir = os.path.dirname(os.path.abspath(__file__)) - if template == 'default': - template_name = 'template.xlsx' - template_path = os.path.join( - current_dir, - os.pardir, - 'report', - template_name) + if template == "default": + template_name = "template.xlsx" + template_path = os.path.join(current_dir, os.pardir, "report", template_name) _prepare_default_reports(file_path, count, template_path, callback_progress) - elif template == 'yearly': - template_name = 'template_yearly.xlsx' - template_path = os.path.join( - current_dir, - os.pardir, - 'report', - template_name) - _prepare_yearly_report(file_path, year, template_path, section_id, callback_progress) - elif template == 'yearly_bike': + elif template == "yearly": + template_name = "template_yearly.xlsx" + template_path = os.path.join(current_dir, os.pardir, "report", template_name) + _prepare_yearly_report( + file_path, year, template_path, section_id, callback_progress + ) + elif template == "yearly_bike": pass def _prepare_default_reports(file_path, count, template_path, callback_progress): - # We do by section and not by count because of special cases. - sections = models.Section.objects.filter(lane__id_installation__count=count).distinct() + sections = models.Section.objects.filter( + lane__id_installation__count=count + ).distinct() mondays_qty = len(list(_mondays_of_count(count))) mondays = _mondays_of_count(count) @@ -53,18 +55,20 @@ def _prepare_default_reports(file_path, count, template_path, callback_progress) _data_category(count, section, monday, workbook) _remove_useless_sheets(count, workbook) output = os.path.join( - file_path, '{}_{}_r.xlsx'.format( - section.id, - monday.strftime("%Y%m%d"))) + file_path, "{}_{}_r.xlsx".format(section.id, monday.strftime("%Y%m%d")) + ) workbook.save(filename=output) -def _prepare_yearly_report(file_path, year, template_path, section_id, callback_progress): - +def _prepare_yearly_report( + file_path, year, template_path, section_id, callback_progress +): section = models.Section.objects.get(id__contains=section_id) # Get first count to be used as example - count_qs = models.Count.objects.filter(id_installation__lane__id_section=section, start_process_date__year=year) + count_qs = models.Count.objects.filter( + id_installation__lane__id_section=section, start_process_date__year=year + ) if not count_qs: return count = count_qs[0] @@ -76,10 +80,7 @@ def _prepare_yearly_report(file_path, year, template_path, section_id, callback_ _data_speed_yearly(count, section, year, workbook) _data_category_yearly(count, section, year, workbook) _remove_useless_sheets(count, workbook) - output = os.path.join( - file_path, '{}_{}_r.xlsx'.format( - section.id, - year)) + output = os.path.join(file_path, "{}_{}_r.xlsx".format(section.id, year)) workbook.save(filename=output) @@ -91,7 +92,7 @@ def _mondays_of_count(count): end = count.end_process_date # Monday of first week - monday = (start - timedelta(days=start.weekday())) + monday = start - timedelta(days=start.weekday()) yield monday while True: @@ -102,104 +103,95 @@ def _mondays_of_count(count): def _data_count(count, section, monday, workbook): - ws = workbook['Data_count'] - ws['B3'] = ( - 'Poste de comptage : {} Axe : {}:{}{} ' - 'PR {} + {} m à PR {} + {} m').format( - section.id, - section.owner, - section.road, - section.way, - section.start_pr, - int(section.start_dist), - section.end_pr, - int(section.end_dist) + ws = workbook["Data_count"] + ws["B3"] = ( + "Poste de comptage : {} Axe : {}:{}{} " "PR {} + {} m à PR {} + {} m" + ).format( + section.id, + section.owner, + section.road, + section.way, + section.start_pr, + int(section.start_dist), + section.end_pr, + int(section.end_dist), ) - ws['B4'] = 'Periode de comptage du {} au {}'.format( + ws["B4"] = "Periode de comptage du {} au {}".format( monday.strftime("%d/%m/%Y"), (monday + timedelta(days=7)).strftime("%d/%m/%Y"), ) - ws['B5'] = 'Comptage {}'.format( - monday.strftime("%Y") - ) - ws['B6'] = 'Type de capteur : {}'.format( - count.id_sensor_type.name - ) - ws['B7'] = 'Modèle : {}'.format( - count.id_model.name) - ws['B8'] = 'Classification : {}'.format(count.id_class.name) + ws["B5"] = "Comptage {}".format(monday.strftime("%Y")) + ws["B6"] = "Type de capteur : {}".format(count.id_sensor_type.name) + ws["B7"] = "Modèle : {}".format(count.id_model.name) + ws["B8"] = "Classification : {}".format(count.id_class.name) - ws['B9'] = 'Comptage véhicule par véhicule' + ws["B9"] = "Comptage véhicule par véhicule" if _is_aggregate(count): - ws['B9'] = 'Comptage par interval' + ws["B9"] = "Comptage par interval" - special_periods = statistics.get_special_periods( - monday, - monday + timedelta(days=6)) + special_periods = statistics.get_special_periods(monday, monday + timedelta(days=6)) texts = [] for i in special_periods: texts.append(f"{i.start_date} - {i.end_date}: {i.description}") - ws['B10'] = 'Periode speciales : {}'.format( - ", ".join(texts) - ) + ws["B10"] = "Periode speciales : {}".format(", ".join(texts)) - ws['B11'] = section.place_name + ws["B11"] = section.place_name if count.remarks: - ws['B12'] = 'Remarque : {}'.format(count.remarks) + ws["B12"] = "Remarque : {}".format(count.remarks) - lanes = models.Lane.objects.filter(id_section=section).order_by('direction') + lanes = models.Lane.objects.filter(id_section=section).order_by("direction") if lanes: - ws['B13'] = lanes[0].direction_desc + ws["B13"] = lanes[0].direction_desc if len(lanes) > 1: - ws['B14'] = lanes[1].direction_desc + ws["B14"] = lanes[1].direction_desc def _data_count_yearly(count, section, year, workbook): - ws = workbook['Data_count'] - ws['B3'] = ( - 'Poste de comptage : {} Axe : {}:{}{} ' - 'PR {} + {} m à PR {} + {} m').format( - section.id, - section.owner, - section.road, - section.way, - section.start_pr, - int(section.start_dist), - section.end_pr, - int(section.end_dist) + ws = workbook["Data_count"] + ws["B3"] = ( + "Poste de comptage : {} Axe : {}:{}{} " "PR {} + {} m à PR {} + {} m" + ).format( + section.id, + section.owner, + section.road, + section.way, + section.start_pr, + int(section.start_dist), + section.end_pr, + int(section.end_dist), ) - ws['B4'] = 'Periode de comptage du 01/01/{0} au 31/12/{0}'.format(year) + ws["B4"] = "Periode de comptage du 01/01/{0} au 31/12/{0}".format(year) - ws['B5'] = 'Comptage {}'.format(year) + ws["B5"] = "Comptage {}".format(year) - ws['B6'] = 'Type de capteur : {}'.format(count.id_sensor_type.name) + ws["B6"] = "Type de capteur : {}".format(count.id_sensor_type.name) - ws['B7'] = 'Modèle : {}'.format(count.id_model.name) + ws["B7"] = "Modèle : {}".format(count.id_model.name) - ws['B8'] = 'Classification : {}'.format(count.id_class.name) + ws["B8"] = "Classification : {}".format(count.id_class.name) - ws['B9'] = 'Comptage véhicule par véhicule' + ws["B9"] = "Comptage véhicule par véhicule" if _is_aggregate(count): - ws['B9'] = 'Comptage par intervale' + ws["B9"] = "Comptage par intervale" - ws['B11'] = section.place_name + ws["B11"] = section.place_name if count.remarks: - ws['B12'] = 'Remarque : {}'.format(count.remarks) + ws["B12"] = "Remarque : {}".format(count.remarks) - lanes = models.Lane.objects.filter(id_section=section).order_by('direction') + lanes = models.Lane.objects.filter(id_section=section).order_by("direction") if lanes: - ws['B13'] = lanes[0].direction_desc + ws["B13"] = lanes[0].direction_desc if len(lanes) > 1: - ws['B14'] = lanes[1].direction_desc + ws["B14"] = lanes[1].direction_desc def _data_day(count, section, monday, workbook): - ws = workbook['Data_day'] + ws = workbook["Data_day"] # Total row_offset = 65 @@ -209,27 +201,36 @@ def _data_day(count, section, monday, workbook): count, section, start=monday + timedelta(days=i), - end=monday + timedelta(days=i + 1) + end=monday + timedelta(days=i + 1), ) for row in df.itertuples(): - ws.cell( - row=row_offset + row.hour, - column=col_offset + i, - value=row.thm - ) + ws.cell(row=row_offset + row.hour, column=col_offset + i, value=row.thm) # Monthly coefficients row_offset = 2 col_offset = 2 - monthly_coefficients = [0.93, 0.96, 1.00, 1.02, 1.01, 1.04, 0.98, 0.98, 1.04, 1.03, 1.02, 0.98] + monthly_coefficients = [ + 0.93, + 0.96, + 1.00, + 1.02, + 1.01, + 1.04, + 0.98, + 0.98, + 1.04, + 1.03, + 1.02, + 0.98, + ] for i in range(7): day = monday + timedelta(days=i) ws.cell( row=row_offset, column=col_offset + i, - value=monthly_coefficients[day.month-1] + value=monthly_coefficients[day.month - 1], ) # Direction 1 @@ -241,15 +242,11 @@ def _data_day(count, section, monday, workbook): section, start=monday + timedelta(days=i), end=monday + timedelta(days=i + 1), - direction=1 + direction=1, ) for row in df.itertuples(): - ws.cell( - row=row_offset + row.hour, - column=col_offset + i, - value=row.thm - ) + ws.cell(row=row_offset + row.hour, column=col_offset + i, value=row.thm) # Light heavy direction 1 row_offset = 30 @@ -260,18 +257,10 @@ def _data_day(count, section, monday, workbook): section, start=monday + timedelta(days=i), end=monday + timedelta(days=i + 1), - direction=1 - ) - ws.cell( - row=row_offset, - column=col_offset + i, - value=light.get(True, 0) - ) - ws.cell( - row=row_offset + 1, - column=col_offset + i, - value=light.get(False, 0) + direction=1, ) + ws.cell(row=row_offset, column=col_offset + i, value=light.get(True, 0)) + ws.cell(row=row_offset + 1, column=col_offset + i, value=light.get(False, 0)) # Direction 2 row_offset = 35 @@ -282,15 +271,11 @@ def _data_day(count, section, monday, workbook): section, start=monday + timedelta(days=i), end=monday + timedelta(days=i + 1), - direction=2 + direction=2, ) for row in df.itertuples(): - ws.cell( - row=row_offset + row.hour, - column=col_offset + i, - value=row.thm - ) + ws.cell(row=row_offset + row.hour, column=col_offset + i, value=row.thm) # Light heavy direction 2 row_offset = 60 @@ -301,131 +286,115 @@ def _data_day(count, section, monday, workbook): section, start=monday + timedelta(days=i), end=monday + timedelta(days=i + 1), - direction=2 - ) - ws.cell( - row=row_offset, - column=col_offset + i, - value=light.get(True, 0) - ) - ws.cell( - row=row_offset + 1, - column=col_offset + i, - value=light.get(False, 0) + direction=2, ) + ws.cell(row=row_offset, column=col_offset + i, value=light.get(True, 0)) + ws.cell(row=row_offset + 1, column=col_offset + i, value=light.get(False, 0)) def _data_day_yearly(count, section, year, workbook): - ws = workbook['Data_day'] + ws = workbook["Data_day"] # Total row_offset = 5 col_offset = 2 - df = statistics.get_time_data_yearly( - year, section) + df = statistics.get_time_data_yearly(year, section) for i in range(7): - day_df = df[df['date'] == i] + day_df = df[df["date"] == i] for row in day_df.itertuples(): - ws.cell( - row=row_offset + row.hour, - column=col_offset + i, - value=row.thm - ) + ws.cell(row=row_offset + row.hour, column=col_offset + i, value=row.thm) # Monthly coefficients row_offset = 31 col_offset = 2 - monthly_coefficients = [0.93, 0.96, 1.00, 1.02, 1.01, 1.04, 0.98, 0.98, 1.04, 1.03, 1.02, 0.98] + monthly_coefficients = [ + 0.93, + 0.96, + 1.00, + 1.02, + 1.01, + 1.04, + 0.98, + 0.98, + 1.04, + 1.03, + 1.02, + 0.98, + ] for i in range(7): ws.cell( row=row_offset, column=col_offset + i, # FIXME: calculate actual coefficients - value=1 + value=1, ) # Direction 1 row_offset = 35 col_offset = 2 - df = statistics.get_time_data_yearly( - year, section, direction=1) + df = statistics.get_time_data_yearly(year, section, direction=1) for i in range(7): - day_df = df[df['date'] == i] + day_df = df[df["date"] == i] for row in day_df.itertuples(): - ws.cell( - row=row_offset + row.hour, - column=col_offset + i, - value=row.thm - ) + ws.cell(row=row_offset + row.hour, column=col_offset + i, value=row.thm) # Light heavy direction 1 row_offset = 61 col_offset = 2 df = statistics.get_light_numbers_yearly( - section, - start=datetime(year, 1, 1), - end=datetime(year + 1, 1, 1), - direction=1 + section, start=datetime(year, 1, 1), end=datetime(year + 1, 1, 1), direction=1 ) for i in range(7): ws.cell( row=row_offset, column=col_offset + i, - value=int(df[df['date'] == i][df['id_category__light'] == True].value), + value=int(df[df["date"] == i][df["id_category__light"] == True].value), ) ws.cell( row=row_offset + 1, column=col_offset + i, - value=int(df[df['date'] == i][df['id_category__light'] == False].value), + value=int(df[df["date"] == i][df["id_category__light"] == False].value), ) # Direction 2 row_offset = 66 col_offset = 2 - df = statistics.get_time_data_yearly( - year, section, direction=2) + df = statistics.get_time_data_yearly(year, section, direction=2) for i in range(7): - day_df = df[df['date'] == i] + day_df = df[df["date"] == i] for row in day_df.itertuples(): - ws.cell( - row=row_offset + row.hour, - column=col_offset + i, - value=row.thm - ) + ws.cell(row=row_offset + row.hour, column=col_offset + i, value=row.thm) # Light heavy direction 2 row_offset = 92 col_offset = 2 df = statistics.get_light_numbers_yearly( - section, - start=datetime(year, 1, 1), - end=datetime(year + 1, 1, 1), - direction=2 + section, start=datetime(year, 1, 1), end=datetime(year + 1, 1, 1), direction=2 ) for i in range(7): ws.cell( row=row_offset, column=col_offset + i, - value=int(df[df['date'] == i][df['id_category__light'] == True].value), + value=int(df[df["date"] == i][df["id_category__light"] == True].value), ) ws.cell( row=row_offset + 1, column=col_offset + i, - value=int(df[df['date'] == i][df['id_category__light'] == False].value), + value=int(df[df["date"] == i][df["id_category__light"] == False].value), ) def _data_month_yearly(count, section, year, workbook): - ws = workbook['Data_month'] + ws = workbook["Data_month"] start = datetime(year, 1, 1) end = datetime(year + 1, 1, 1) @@ -435,15 +404,11 @@ def _data_month_yearly(count, section, year, workbook): col_offset = 2 for col in df.itertuples(): - ws.cell( - row=row_offset, - column=col_offset + col.Index, - value=col.tm - ) + ws.cell(row=row_offset, column=col_offset + col.Index, value=col.tm) def _data_speed(count, section, monday, workbook): - ws = workbook['Data_speed'] + ws = workbook["Data_speed"] speed_ranges = [ (0, 10), @@ -494,11 +459,7 @@ def _data_speed(count, section, monday, workbook): ) for row in res: - ws.cell( - row=row_offset + row[0], - column=col_offset + i, - value=row[1] - ) + ws.cell(row=row_offset + row[0], column=col_offset + i, value=row[1]) if not _is_aggregate(count): # Characteristic speed direction 1 @@ -511,13 +472,11 @@ def _data_speed(count, section, monday, workbook): direction=1, start=monday, end=monday + timedelta(days=7), - v=v + v=v, ) for row in df.itertuples(): ws.cell( - row=row_offset + row.Index, - column=col_offset + i, - value=row.speed + row=row_offset + row.Index, column=col_offset + i, value=row.speed ) # Average speed direction 1 @@ -525,17 +484,10 @@ def _data_speed(count, section, monday, workbook): col_offset = 19 df = statistics.get_average_speed_by_hour( - count, - section, - direction=1, - start=monday, - end=monday + timedelta(days=7)) + count, section, direction=1, start=monday, end=monday + timedelta(days=7) + ) for row in df.itertuples(): - ws.cell( - row=row_offset + row.Index, - column=col_offset, - value=row.speed - ) + ws.cell(row=row_offset + row.Index, column=col_offset, value=row.speed) # Direction 2 row_offset = 33 @@ -552,11 +504,7 @@ def _data_speed(count, section, monday, workbook): ) for row in res: - ws.cell( - row=row_offset + row[0], - column=col_offset + i, - value=row[1] - ) + ws.cell(row=row_offset + row[0], column=col_offset + i, value=row[1]) if not _is_aggregate(count): # Characteristic speed direction 2 @@ -569,13 +517,11 @@ def _data_speed(count, section, monday, workbook): direction=2, start=monday, end=monday + timedelta(days=7), - v=v + v=v, ) for row in df.itertuples(): ws.cell( - row=row_offset + row.Index, - column=col_offset + i, - value=row.speed + row=row_offset + row.Index, column=col_offset + i, value=row.speed ) # Average speed direction 1 @@ -583,21 +529,14 @@ def _data_speed(count, section, monday, workbook): col_offset = 19 df = statistics.get_average_speed_by_hour( - count, - section, - direction=2, - start=monday, - end=monday + timedelta(days=7)) + count, section, direction=2, start=monday, end=monday + timedelta(days=7) + ) for row in df.itertuples(): - ws.cell( - row=row_offset + row.Index, - column=col_offset, - value=row.speed - ) + ws.cell(row=row_offset + row.Index, column=col_offset, value=row.speed) def _data_speed_yearly(count, section, year, workbook): - ws = workbook['Data_speed'] + ws = workbook["Data_speed"] start = datetime(year, 1, 1) end = datetime(year + 1, 1, 1) @@ -650,11 +589,7 @@ def _data_speed_yearly(count, section, year, workbook): ) for row in res: - ws.cell( - row=row_offset + row[0], - column=col_offset + i, - value=row[1] - ) + ws.cell(row=row_offset + row[0], column=col_offset + i, value=row[1]) if not _is_aggregate(count): # Characteristic speed direction 1 @@ -662,18 +597,11 @@ def _data_speed_yearly(count, section, year, workbook): col_offset = 16 for i, v in enumerate(characteristic_speeds): df = statistics.get_characteristic_speed_by_hour( - None, - section, - direction=1, - start=start, - end=end, - v=v + None, section, direction=1, start=start, end=end, v=v ) for row in df.itertuples(): ws.cell( - row=row_offset + row.Index, - column=col_offset + i, - value=row.speed + row=row_offset + row.Index, column=col_offset + i, value=row.speed ) # Average speed direction 1 @@ -688,11 +616,7 @@ def _data_speed_yearly(count, section, year, workbook): end=end, ) for row in df.itertuples(): - ws.cell( - row=row_offset + row.Index, - column=col_offset, - value=row.speed - ) + ws.cell(row=row_offset + row.Index, column=col_offset, value=row.speed) # Direction 2 row_offset = 33 @@ -709,11 +633,7 @@ def _data_speed_yearly(count, section, year, workbook): ) for row in res: - ws.cell( - row=row_offset + row[0], - column=col_offset + i, - value=row[1] - ) + ws.cell(row=row_offset + row[0], column=col_offset + i, value=row[1]) if not _is_aggregate(count): # Characteristic speed direction 2 @@ -721,18 +641,11 @@ def _data_speed_yearly(count, section, year, workbook): col_offset = 16 for i, v in enumerate(characteristic_speeds): df = statistics.get_characteristic_speed_by_hour( - count, - section, - direction=2, - start=start, - end=end, - v=v + count, section, direction=2, start=start, end=end, v=v ) for row in df.itertuples(): ws.cell( - row=row_offset + row.Index, - column=col_offset + i, - value=row.speed + row=row_offset + row.Index, column=col_offset + i, value=row.speed ) # Average speed direction 1 @@ -747,17 +660,17 @@ def _data_speed_yearly(count, section, year, workbook): end=end, ) for row in df.itertuples(): - ws.cell( - row=row_offset + row.Index, - column=col_offset, - value=row.speed - ) + ws.cell(row=row_offset + row.Index, column=col_offset, value=row.speed) def _data_category(count, section, monday, workbook): - ws = workbook['Data_category'] + ws = workbook["Data_category"] - categories = models.Category.objects.filter(countdetail__id_count=count).distinct().order_by('code') + categories = ( + models.Category.objects.filter(countdetail__id_count=count) + .distinct() + .order_by("code") + ) # Direction 1 row_offset = 5 @@ -775,13 +688,11 @@ def _data_category(count, section, monday, workbook): for row in res: row_num = row_offset + row[0] col_num = col_offset + _t_cat(count, category.code) - value = ws.cell(row_num, col_num).value + row[1] # Add to previous value because with class convertions multiple categories can converge into a single one + value = ( + ws.cell(row_num, col_num).value + row[1] + ) # Add to previous value because with class convertions multiple categories can converge into a single one - ws.cell( - row=row_num, - column=col_num, - value=value - ) + ws.cell(row=row_num, column=col_num, value=value) # Direction 2 row_offset = 33 @@ -799,21 +710,23 @@ def _data_category(count, section, monday, workbook): for row in res: row_num = row_offset + row[0] col_num = col_offset + _t_cat(count, category.code) - value = ws.cell(row_num, col_num).value + row[1] # Add to previous value because with class convertions multiple categories can converge into a single one + value = ( + ws.cell(row_num, col_num).value + row[1] + ) # Add to previous value because with class convertions multiple categories can converge into a single one - ws.cell( - row=row_num, - column=col_num, - value=value - ) + ws.cell(row=row_num, column=col_num, value=value) def _data_category_yearly(count, section, year, workbook): - ws = workbook['Data_category'] + ws = workbook["Data_category"] start = datetime(year, 1, 1) end = datetime(year + 1, 1, 1) - categories = models.Category.objects.filter(countdetail__id_count=count).distinct().order_by('code') + categories = ( + models.Category.objects.filter(countdetail__id_count=count) + .distinct() + .order_by("code") + ) # Direction 1 row_offset = 5 @@ -831,13 +744,11 @@ def _data_category_yearly(count, section, year, workbook): for row in res: row_num = row_offset + row[0] col_num = col_offset + _t_cat(count, category.code) - value = ws.cell(row_num, col_num).value + row[1] # Add to previous value because with class convertions multiple categories can converge into a single one + value = ( + ws.cell(row_num, col_num).value + row[1] + ) # Add to previous value because with class convertions multiple categories can converge into a single one - ws.cell( - row=row_num, - column=col_num, - value=value - ) + ws.cell(row=row_num, column=col_num, value=value) # Direction 2 row_offset = 33 @@ -855,74 +766,73 @@ def _data_category_yearly(count, section, year, workbook): for row in res: row_num = row_offset + row[0] col_num = col_offset + _t_cat(count, category.code) - value = ws.cell(row_num, col_num).value + row[1] # Add to previous value because with class convertions multiple categories can converge into a single one + value = ( + ws.cell(row_num, col_num).value + row[1] + ) # Add to previous value because with class convertions multiple categories can converge into a single one - ws.cell( - row=row_num, - column=col_num, - value=value - ) + ws.cell(row=row_num, column=col_num, value=value) def _remove_useless_sheets(count, workbook): + return class_name = _t_cl(count.id_class.name) - if class_name == 'SWISS10': - workbook.remove_sheet(workbook['SWISS7_H']) - workbook.remove_sheet(workbook['SWISS7_G']) - workbook.remove_sheet(workbook['EUR6_H']) - workbook.remove_sheet(workbook['EUR6_G']) - elif class_name == 'SWISS7': - workbook.remove_sheet(workbook['SWISS10_H']) - workbook.remove_sheet(workbook['SWISS10_G']) - workbook.remove_sheet(workbook['EUR6_H']) - workbook.remove_sheet(workbook['EUR6_G']) - elif class_name == 'EUR6': - workbook.remove_sheet(workbook['SWISS10_H']) - workbook.remove_sheet(workbook['SWISS10_G']) - workbook.remove_sheet(workbook['SWISS7_H']) - workbook.remove_sheet(workbook['SWISS7_G']) - elif class_name == 'Volume1': - workbook.remove_sheet(workbook['SWISS7_H']) - workbook.remove_sheet(workbook['SWISS7_G']) - workbook.remove_sheet(workbook['SWISS10_H']) - workbook.remove_sheet(workbook['SWISS10_G']) - workbook.remove_sheet(workbook['EUR6_H']) - workbook.remove_sheet(workbook['EUR6_G']) + if class_name == "SWISS10": + workbook.remove_sheet(workbook["SWISS7_H"]) + workbook.remove_sheet(workbook["SWISS7_G"]) + workbook.remove_sheet(workbook["EUR6_H"]) + workbook.remove_sheet(workbook["EUR6_G"]) + elif class_name == "SWISS7": + workbook.remove_sheet(workbook["SWISS10_H"]) + workbook.remove_sheet(workbook["SWISS10_G"]) + workbook.remove_sheet(workbook["EUR6_H"]) + workbook.remove_sheet(workbook["EUR6_G"]) + elif class_name == "EUR6": + workbook.remove_sheet(workbook["SWISS10_H"]) + workbook.remove_sheet(workbook["SWISS10_G"]) + workbook.remove_sheet(workbook["SWISS7_H"]) + workbook.remove_sheet(workbook["SWISS7_G"]) + elif class_name == "Volume1": + workbook.remove_sheet(workbook["SWISS7_H"]) + workbook.remove_sheet(workbook["SWISS7_G"]) + workbook.remove_sheet(workbook["SWISS10_H"]) + workbook.remove_sheet(workbook["SWISS10_G"]) + workbook.remove_sheet(workbook["EUR6_H"]) + workbook.remove_sheet(workbook["EUR6_G"]) if _is_aggregate(count): - workbook.remove_sheet(workbook['Vit_Hd']) + workbook.remove_sheet(workbook["Vit_Hd"]) else: - workbook.remove_sheet(workbook['Vit_H']) + workbook.remove_sheet(workbook["Vit_H"]) def _t_cl(class_name): """Translate class name""" - if class_name == 'FHWA13': - return 'SWISS7' + if class_name == "FHWA13": + return "SWISS7" if class_name is None: - return 'Volume1' + return "Volume1" - if class_name == 'SPCH13': - return 'SWISS7' + if class_name == "SPCH13": + return "SWISS7" return class_name def _t_cat(count, cat_id): """Convert categories of a class into the ones of another class e.g. - FHWA13 should be converted in SWISS7 in order to fill the - report cells + FHWA13 should be converted in SWISS7 in order to fill the + report cells """ - if count.id_class.name == 'ARXCycle13': + if count.id_class.name == "ARXCycle13": # FIXME: implement real conversiont between ARX Cycle and SWISS7 or 10 new_hour = [0] * 7 return new_hour - if count.id_class.name == 'FHWA13': + if count.id_class.name == "FHWA13": conv = { 0: 0, 1: 2, @@ -942,7 +852,7 @@ def _t_cat(count, cat_id): } return conv[cat_id] - if count.id_class.name == 'SPCH13': + if count.id_class.name == "SPCH13": conv = { 0: 0, 1: 2, @@ -961,7 +871,7 @@ def _t_cat(count, cat_id): } return conv[cat_id] - if count.id_class.name == 'EUR6': + if count.id_class.name == "EUR6": conv = { 0: 0, 1: 2, @@ -977,10 +887,10 @@ def _t_cat(count, cat_id): def _is_aggregate(count): - from_aggregate = models. \ - CountDetail.objects. \ - filter(id_count=count) \ - .distinct('from_aggregate') \ - .values('from_aggregate')[0]['from_aggregate'] + from_aggregate = ( + models.CountDetail.objects.filter(id_count=count) + .distinct("from_aggregate") + .values("from_aggregate")[0]["from_aggregate"] + ) return from_aggregate diff --git a/comptages/core/report_task.py b/comptages/core/report_task.py index 57452873..8d9bdd74 100644 --- a/comptages/core/report_task.py +++ b/comptages/core/report_task.py @@ -7,11 +7,11 @@ class ReportTask(QgsTask): - - def __init__(self, file_path, count=None, year=None, template="default", section_id=None): + def __init__( + self, file_path, count=None, year=None, template="default", section_id=None + ): self.basename = os.path.basename(file_path) - super().__init__( - 'Génération du rapport: {}'.format(self.basename)) + super().__init__("Génération du rapport: {}".format(self.basename)) self.count = count self.file_path = file_path @@ -21,7 +21,14 @@ def __init__(self, file_path, count=None, year=None, template="default", section def run(self): try: - report.prepare_reports(self.file_path, self.count, self.year, self.template, self.section_id, callback_progress=self.setProgress) + report.prepare_reports( + self.file_path, + self.count, + self.year, + self.template, + self.section_id, + callback_progress=self.setProgress, + ) return True except Exception as e: self.exception = e @@ -31,12 +38,16 @@ def run(self): def finished(self, result): if result: QgsMessageLog.logMessage( - '{} - Report generation {} ended'.format( - datetime.now(), self.basename), - 'Comptages', Qgis.Info) + "{} - Report generation {} ended".format(datetime.now(), self.basename), + "Comptages", + Qgis.Info, + ) else: QgsMessageLog.logMessage( - '{} - Report generation {} ended with errors: {}'.format( - datetime.now(), self.basename, self.exception), - 'Comptages', Qgis.Info) + "{} - Report generation {} ended with errors: {}".format( + datetime.now(), self.basename, self.exception + ), + "Comptages", + Qgis.Info, + ) diff --git a/comptages/core/settings.py b/comptages/core/settings.py index a8155fe6..d37bf3a5 100644 --- a/comptages/core/settings.py +++ b/comptages/core/settings.py @@ -10,51 +10,35 @@ class Settings(SettingManager): def __init__(self): - SettingManager.__init__(self, 'Comptages') - - self.add_setting( - String("db_host", Scope.Global, 'comptages-db')) - self.add_setting( - String("db_name", Scope.Global, 'comptages')) - self.add_setting( - Integer("db_port", Scope.Global, 5432)) - self.add_setting( - String("db_username", Scope.Global, 'postgres')) - self.add_setting( - String("db_password", Scope.Global, 'postgres')) - self.add_setting( - Bool("extra_layers", Scope.Global, False)) - self.add_setting( - String("config_export_directory", Scope.Global, '/')) - self.add_setting( - String("plan_export_directory", Scope.Global, '/')) - self.add_setting( - String("data_import_directory", Scope.Global, '/')) - self.add_setting( - String("picture_directory", Scope.Global, '/')) - self.add_setting( - String("report_export_directory", Scope.Global, '/')) - - -def get_ui_class(ui_file): + SettingManager.__init__(self, "Comptages") + + self.add_setting(String("db_host", Scope.Global, "comptages-db")) + self.add_setting(String("db_name", Scope.Global, "comptages")) + self.add_setting(Integer("db_port", Scope.Global, 5432)) + self.add_setting(String("db_username", Scope.Global, "postgres")) + self.add_setting(String("db_password", Scope.Global, "postgres")) + self.add_setting(Bool("extra_layers", Scope.Global, False)) + self.add_setting(String("config_export_directory", Scope.Global, "/")) + self.add_setting(String("plan_export_directory", Scope.Global, "/")) + self.add_setting(String("data_import_directory", Scope.Global, "/")) + self.add_setting(String("picture_directory", Scope.Global, "/")) + self.add_setting(String("report_export_directory", Scope.Global, "/")) + + +def get_ui_class(ui_file: str): """Get UI Python class from .ui file. Can be filename.ui or subdirectory/filename.ui :param ui_file: The file of the ui in svir.ui :type ui_file: str """ - os.path.sep.join(ui_file.split('/')) + os.path.sep.join(ui_file.split("/")) ui_file_path = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - os.pardir, - 'ui', - ui_file - ) + os.path.join(os.path.dirname(__file__), os.pardir, "ui", ui_file) ) return loadUiType(ui_file_path)[0] -FORM_CLASS = get_ui_class('settings_dialog.ui') +FORM_CLASS = get_ui_class("settings_dialog.ui") class SettingsDialog(QDialog, FORM_CLASS, SettingDialog): diff --git a/comptages/core/statistics.py b/comptages/core/statistics.py index 5355ed4a..01398326 100644 --- a/comptages/core/statistics.py +++ b/comptages/core/statistics.py @@ -4,15 +4,21 @@ from django.db.models import F, CharField, Value, Q from django.db.models import Sum -from django.db.models.functions import ( - ExtractHour, Trunc, Concat) +from django.db.models.functions import ExtractHour, Trunc, Concat from comptages.core import definitions from comptages.datamodel import models -def get_time_data(count, section, lane=None, direction=None, start=None, end=None, exclude_trash=False): - +def get_time_data( + count: models.Count, + section: models.Section, + lane=None, + direction=None, + start=None, + end=None, + exclude_trash=False, +): if not start: start = count.start_process_date if not end: @@ -24,7 +30,8 @@ def get_time_data(count, section, lane=None, direction=None, start=None, end=Non id_count=count, id_lane__id_section=section, id_category__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if lane is not None: @@ -37,17 +44,19 @@ def get_time_data(count, section, lane=None, direction=None, start=None, end=Non qs = qs.exclude(id_category__trash=True) # Vehicles by day and hour - qs = qs.annotate(date=Trunc('timestamp', 'day'), hour=ExtractHour('timestamp')) \ - .order_by('hour') \ - .values('date', 'hour') \ - .order_by('-date', 'hour') \ - .annotate(thm=Sum('times')) \ - .values('import_status', 'date', 'hour', 'thm') + qs = ( + qs.annotate(date=Trunc("timestamp", "day"), hour=ExtractHour("timestamp")) + .order_by("hour") + .values("date", "hour") + .order_by("-date", "hour") + .annotate(thm=Sum("times")) + .values("import_status", "date", "hour", "thm") + ) df = pd.DataFrame.from_records(qs) if not df.empty: - df['date'] = df['date'].dt.strftime('%a %d.%m.%Y') - df['import_status'].replace({0: 'Existant', 1: 'Nouveau'}, inplace=True) + df["date"] = df["date"].dt.strftime("%a %d.%m.%Y") + df["import_status"].replace({0: "Existant", 1: "Nouveau"}, inplace=True) return df @@ -61,7 +70,8 @@ def get_time_data_yearly(year, section, lane=None, direction=None): qs = models.CountDetail.objects.filter( id_lane__id_section=section, id_category__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if lane is not None: @@ -71,22 +81,32 @@ def get_time_data_yearly(year, section, lane=None, direction=None): qs = qs.filter(id_lane__direction=direction) # Vehicles by day and hour - qs = qs.annotate(date=Trunc('timestamp', 'day'), hour=ExtractHour('timestamp')) \ - .order_by('hour') \ - .values('date', 'hour') \ - .order_by('date', 'hour') \ - .annotate(thm=Sum('times')) \ - .values('import_status', 'date', 'hour', 'thm') + qs = ( + qs.annotate(date=Trunc("timestamp", "day"), hour=ExtractHour("timestamp")) + .order_by("hour") + .values("date", "hour") + .order_by("date", "hour") + .annotate(thm=Sum("times")) + .values("import_status", "date", "hour", "thm") + ) df = pd.DataFrame.from_records(qs) - df = df.groupby([df['date'].dt.dayofweek, 'hour']).thm.sum() + df = df.groupby([df["date"].dt.dayofweek, "hour"]).thm.sum() df = df.reset_index() return df -def get_day_data(count, section=None, lane=None, direction=None, status=None, exclude_trash=False, start=None, end=None): - +def get_day_data( + count, + section=None, + lane=None, + direction=None, + status=None, + exclude_trash=False, + start=None, + end=None, +): if not start: start = count.start_process_date if not end: @@ -95,23 +115,19 @@ def get_day_data(count, section=None, lane=None, direction=None, status=None, ex qs = models.CountDetail.objects.filter( id_count=count, id_category__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if exclude_trash: qs = qs.exclude(id_category__trash=True) - # Can be None if we are calculating the total TJM of a special case's count if section is not None: - qs = qs.filter( - id_lane__id_section=section - ) + qs = qs.filter(id_lane__id_section=section) if status is not None: - qs = qs.filter( - import_status=status - ) + qs = qs.filter(import_status=status) if lane is not None: qs = qs.filter(id_lane=lane) @@ -119,24 +135,27 @@ def get_day_data(count, section=None, lane=None, direction=None, status=None, ex if direction is not None: qs = qs.filter(id_lane__direction=direction) - qs = qs.annotate(date=Trunc('timestamp', 'day')) \ - .order_by('date') \ - .values('date', 'import_status') \ - .annotate(tj=Sum('times')) \ - .values('date', 'tj', 'import_status') + qs = ( + qs.annotate(date=Trunc("timestamp", "day")) + .order_by("date") + .values("date", "import_status") + .annotate(tj=Sum("times")) + .values("date", "tj", "import_status") + ) df = pd.DataFrame.from_records(qs) mean = 0 if not df.empty: mean = df["tj"].mean() - df['import_status'].replace({0: 'Existant', 1: 'Nouveau'}, inplace=True) + df["import_status"].replace({0: "Existant", 1: "Nouveau"}, inplace=True) return df, int(mean) -def get_category_data(count, section, status=definitions.IMPORT_STATUS_DEFINITIVE, start=None, end=None): - +def get_category_data( + count, section, status=definitions.IMPORT_STATUS_DEFINITIVE, start=None, end=None +): if not start: start = count.start_process_date if not end: @@ -147,29 +166,33 @@ def get_category_data(count, section, status=definitions.IMPORT_STATUS_DEFINITIV id_lane__id_section=section, id_category__isnull=False, import_status=status, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) - qs = qs.annotate(cat_name=F('id_category__name')) \ - .annotate(cat_code=F('id_category__code')) \ - .annotate( - cat_name_code=Concat( - F('id_category__name'), - Value(' ('), - F('id_category__code'), - Value(')'), - output_field=CharField())) \ - .values('cat_name', 'cat_code', 'cat_name_code') \ - .annotate(value=Sum('times')) \ - .order_by('cat_code') \ - .values('cat_name', 'cat_code', 'cat_name_code', 'value') + qs = ( + qs.annotate(cat_name=F("id_category__name")) + .annotate(cat_code=F("id_category__code")) + .annotate( + cat_name_code=Concat( + F("id_category__name"), + Value(" ("), + F("id_category__code"), + Value(")"), + output_field=CharField(), + ) + ) + .values("cat_name", "cat_code", "cat_name_code") + .annotate(value=Sum("times")) + .order_by("cat_code") + .values("cat_name", "cat_code", "cat_name_code", "value") + ) df = pd.DataFrame.from_records(qs) return df def get_speed_data(count, section, exclude_trash=False, start=None, end=None): - if not start: start = count.start_process_date if not end: @@ -179,51 +202,53 @@ def get_speed_data(count, section, exclude_trash=False, start=None, end=None): id_count=count, id_lane__id_section=section, speed__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if exclude_trash: qs = qs.exclude(id_category__trash=True) - df = pd.DataFrame.from_records(qs.values('speed', 'times', 'import_status')) + df = pd.DataFrame.from_records(qs.values("speed", "times", "import_status")) if df.empty: return df df = df.groupby( - ['import_status', - pd.cut( - df['speed'], - bins=[0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 999], - labels=[ - '0-10', - '10-20', - '20-30', - '30-40', - '40-50', - '50-60', - '60-70', - '70-80', - '80-90', - '90-100', - '100-110', - '110-120', - '120-999', - ], - right=False, # Don't include rightmost edge (e.g. bin 10-20 is actually 10-19.9999999...) - ) - ]).sum('times') - - df = df.rename(columns={'speed': 'speedNP'}) - - df = df.reset_index(col_fill='NPLA_') - df['import_status'].replace({0: 'Existant', 1: 'Nouveau'}, inplace=True) + [ + "import_status", + pd.cut( + df["speed"], + bins=[0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 999], + labels=[ + "0-10", + "10-20", + "20-30", + "30-40", + "40-50", + "50-60", + "60-70", + "70-80", + "80-90", + "90-100", + "100-110", + "110-120", + "120-999", + ], + right=False, # Don't include rightmost edge (e.g. bin 10-20 is actually 10-19.9999999...) + ), + ] + ).sum("times") + + df = df.rename(columns={"speed": "speedNP"}) + + df = df.reset_index(col_fill="NPLA_") + df["import_status"].replace({0: "Existant", 1: "Nouveau"}, inplace=True) return df def get_light_numbers(count, section, lane=None, direction=None, start=None, end=None): - if not start: start = count.start_process_date if not end: @@ -233,7 +258,8 @@ def get_light_numbers(count, section, lane=None, direction=None, start=None, end id_count=count, id_lane__id_section=section, id_category__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if lane is not None: @@ -242,9 +268,11 @@ def get_light_numbers(count, section, lane=None, direction=None, start=None, end if direction is not None: qs = qs.filter(id_lane__direction=direction) - qs = qs.values('id_category__light') \ - .annotate(value=Sum('times')) \ - .values_list('id_category__light', 'value') + qs = ( + qs.values("id_category__light") + .annotate(value=Sum("times")) + .values_list("id_category__light", "value") + ) res = {} for r in qs: @@ -253,11 +281,11 @@ def get_light_numbers(count, section, lane=None, direction=None, start=None, end def get_light_numbers_yearly(section, lane=None, direction=None, start=None, end=None): - qs = models.CountDetail.objects.filter( id_lane__id_section=section, id_category__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if lane is not None: @@ -266,17 +294,25 @@ def get_light_numbers_yearly(section, lane=None, direction=None, start=None, end if direction is not None: qs = qs.filter(id_lane__direction=direction) - qs = qs.annotate(date=Trunc('timestamp', 'day')) - qs = qs.values('date', 'id_category__light').annotate(value=Sum('times')) + qs = qs.annotate(date=Trunc("timestamp", "day")) + qs = qs.values("date", "id_category__light").annotate(value=Sum("times")) df = pd.DataFrame.from_records(qs) - df = df.groupby([df['date'].dt.dayofweek, 'id_category__light']).value.sum() + df = df.groupby([df["date"].dt.dayofweek, "id_category__light"]).value.sum() return df.reset_index() -def get_speed_data_by_hour(count, section, lane=None, direction=None, start=None, end=None, speed_low=0, speed_high=15): - +def get_speed_data_by_hour( + count, + section, + lane=None, + direction=None, + start=None, + end=None, + speed_low=0, + speed_high=15, +): if not start: start = count.start_process_date if not end: @@ -286,7 +322,8 @@ def get_speed_data_by_hour(count, section, lane=None, direction=None, start=None id_lane__id_section=section, speed__gte=speed_low, speed__lt=speed_high, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if count is not None: @@ -298,16 +335,20 @@ def get_speed_data_by_hour(count, section, lane=None, direction=None, start=None if direction is not None: qs = qs.filter(id_lane__direction=direction) - qs = qs.annotate(hour=ExtractHour('timestamp')) \ - .values('hour') \ - .annotate(value=Sum('times')) \ - .values('hour', 'value') \ - .values_list('hour', 'value') + qs = ( + qs.annotate(hour=ExtractHour("timestamp")) + .values("hour") + .annotate(value=Sum("times")) + .values("hour", "value") + .values_list("hour", "value") + ) return qs -def get_characteristic_speed_by_hour(count, section, lane=None, direction=None, start=None, end=None, v=0.15): +def get_characteristic_speed_by_hour( + count, section, lane=None, direction=None, start=None, end=None, v=0.15 +): if not start: start = count.start_process_date if not end: @@ -316,7 +357,8 @@ def get_characteristic_speed_by_hour(count, section, lane=None, direction=None, qs = models.CountDetail.objects.filter( id_lane__id_section=section, speed__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if count is not None: @@ -328,18 +370,22 @@ def get_characteristic_speed_by_hour(count, section, lane=None, direction=None, if direction is not None: qs = qs.filter(id_lane__direction=direction) - qs = qs.annotate(hour=ExtractHour('timestamp')) \ - .order_by('hour', 'speed') \ - .values('hour', 'speed') + qs = ( + qs.annotate(hour=ExtractHour("timestamp")) + .order_by("hour", "speed") + .values("hour", "speed") + ) - df = pd.DataFrame.from_records(qs.values('hour', 'speed')) + df = pd.DataFrame.from_records(qs.values("hour", "speed")) if not df.empty: - df = df.set_index('hour') - df = df.groupby('hour').quantile(v, interpolation='lower') + df = df.set_index("hour") + df = df.groupby("hour").quantile(v, interpolation="lower") return df -def get_average_speed_by_hour(count, section, lane=None, direction=None, start=None, end=None, v=0.15): +def get_average_speed_by_hour( + count, section, lane=None, direction=None, start=None, end=None, v=0.15 +): if not start: start = count.start_process_date if not end: @@ -348,7 +394,8 @@ def get_average_speed_by_hour(count, section, lane=None, direction=None, start=N qs = models.CountDetail.objects.filter( id_lane__id_section=section, speed__isnull=False, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if count is not None: @@ -360,20 +407,29 @@ def get_average_speed_by_hour(count, section, lane=None, direction=None, start=N if direction is not None: qs = qs.filter(id_lane__direction=direction) - qs = qs.annotate(hour=ExtractHour('timestamp')) \ - .order_by('hour', 'speed') \ - .values('hour', 'speed') + qs = ( + qs.annotate(hour=ExtractHour("timestamp")) + .order_by("hour", "speed") + .values("hour", "speed") + ) - df = pd.DataFrame.from_records(qs.values('hour', 'speed')) + df = pd.DataFrame.from_records(qs.values("hour", "speed")) if not df.empty: - df = df.set_index('hour') - df = df.groupby('hour').mean('speed') + df = df.set_index("hour") + df = df.groupby("hour").mean("speed") return df -def get_category_data_by_hour(count, section, category, lane=None, direction=None, start=None, end=None): - +def get_category_data_by_hour( + count: models.Count, + section: models.Section, + category: models.Category, + lane=None, + direction=None, + start=None, + end=None, +): if not start: start = count.start_process_date if not end: @@ -382,7 +438,8 @@ def get_category_data_by_hour(count, section, category, lane=None, direction=Non qs = models.CountDetail.objects.filter( id_lane__id_section=section, id_category=category, - timestamp__gte=start, timestamp__lt=end + timestamp__gte=start, + timestamp__lt=end, ) if count is not None: @@ -394,36 +451,37 @@ def get_category_data_by_hour(count, section, category, lane=None, direction=Non if direction is not None: qs = qs.filter(id_lane__direction=direction) - qs = qs.annotate(hour=ExtractHour('timestamp')) \ - .values('hour', 'times') \ - .annotate(value=Sum('times')) \ - .values('hour', 'value') \ - .values_list('hour', 'value') + qs = ( + qs.annotate(hour=ExtractHour("timestamp")) + .values("hour", "times") + .annotate(value=Sum("times")) + .values("hour", "value") + .values_list("hour", "value") + ) return qs def get_special_periods(first_day, last_day): qs = models.SpecialPeriod.objects.filter( - Q( - (Q(start_date__lte=first_day) & \ - Q(end_date__gte=last_day))) | \ - (Q(start_date__lte=last_day) & \ - Q(end_date__gte=first_day))) + Q((Q(start_date__lte=first_day) & Q(end_date__gte=last_day))) + | (Q(start_date__lte=last_day) & Q(end_date__gte=first_day)) + ) return qs def get_month_data(section, start, end): qs = models.CountDetail.objects.filter( - id_lane__id_section=section, - timestamp__gte=start, timestamp__lt=end + id_lane__id_section=section, timestamp__gte=start, timestamp__lt=end ) - qs = qs.annotate(month=Trunc('timestamp', 'month')) \ - .order_by('month') \ - .values('month', 'import_status') \ - .annotate(tm=Sum('times')) \ - .values('month', 'tm', 'import_status') + qs = ( + qs.annotate(month=Trunc("timestamp", "month")) + .order_by("month") + .values("month", "import_status") + .annotate(tm=Sum("times")) + .values("month", "tm", "import_status") + ) df = pd.DataFrame.from_records(qs) return df diff --git a/comptages/core/utils.py b/comptages/core/utils.py index 9ea40c59..8f9b6ea8 100644 --- a/comptages/core/utils.py +++ b/comptages/core/utils.py @@ -18,39 +18,33 @@ def get_ui_class(ui_file): :param ui_file: The file of the ui in svir.ui :type ui_file: str """ - os.path.sep.join(ui_file.split('/')) + os.path.sep.join(ui_file.split("/")) ui_file_path = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - os.pardir, - 'ui', - ui_file - ) + os.path.join(os.path.dirname(__file__), os.pardir, "ui", ui_file) ) return loadUiType(ui_file_path)[0] -def push_info(message): - iface.messageBar().pushInfo('Comptages', message) +def push_info(message: str): + iface.messageBar().pushInfo("Comptages", message) -def push_warning(message): - iface.messageBar().pushMessage('Comptages', message, Qgis.Warning, 0) +def push_warning(message: str): + iface.messageBar().pushMessage("Comptages", message, Qgis.Warning, 0) -def push_error(message): +def push_error(message: str): # iface.messageBar().pushCritical('Comptages', message) - iface.messageBar().pushMessage('Comptages', message, Qgis.Critical, 0) + iface.messageBar().pushMessage("Comptages", message, Qgis.Critical, 0) -def create_progress_bar(message): - +def create_progress_bar(message: str): progress_widget = QProgressBar() progress_widget.setMaximum(100) progress_widget.setAlignment(Qt.AlignLeft | Qt.AlignVCenter) message_bar = iface.messageBar().createMessage(message) message_bar.setWidget(progress_widget) - iface.messageBar().pushMessage('') + iface.messageBar().pushMessage("") iface.messageBar().pushWidget(message_bar) return progress_widget diff --git a/comptages/core/yearly_report_dialog.py b/comptages/core/yearly_report_dialog.py index d2544e8a..67104c7d 100644 --- a/comptages/core/yearly_report_dialog.py +++ b/comptages/core/yearly_report_dialog.py @@ -1,7 +1,7 @@ from qgis.PyQt.QtWidgets import QDialog from comptages.core.utils import get_ui_class -FORM_CLASS = get_ui_class('yearly_report.ui') +FORM_CLASS = get_ui_class("yearly_report.ui") class YearlyReportDialog(QDialog, FORM_CLASS): diff --git a/comptages/datamodel/apps.py b/comptages/datamodel/apps.py index 95f1a06f..eb589411 100644 --- a/comptages/datamodel/apps.py +++ b/comptages/datamodel/apps.py @@ -3,6 +3,7 @@ from django.db import connection + def move_tables_to_schemas(sender, **kwargs): """ This moves all tables of this model to the app's schema @@ -11,19 +12,21 @@ def move_tables_to_schemas(sender, **kwargs): with connection.cursor() as cursor: # If we are testing on the django autogenerated test database, we don't # change the schema of the tables - if cursor.db.settings_dict.get('NAME').startswith('test_'): + if cursor.db.settings_dict.get("NAME").startswith("test_"): return cursor.execute(f"CREATE SCHEMA IF NOT EXISTS {app.label};") for model in app.get_models(): - query = f'ALTER TABLE IF EXISTS {model._meta.db_table} SET SCHEMA {app.label};' + query = ( + f"ALTER TABLE IF EXISTS {model._meta.db_table} SET SCHEMA {app.label};" + ) print(query) cursor.execute(query) class ComptagesConfig(AppConfig): - name = 'comptages.datamodel' - label = 'comptages' + name = "comptages.datamodel" + label = "comptages" def ready(self): post_migrate.connect(move_tables_to_schemas) diff --git a/comptages/datamodel/management/commands/importdata.py b/comptages/datamodel/management/commands/importdata.py index f94471b4..36a97d03 100644 --- a/comptages/datamodel/management/commands/importdata.py +++ b/comptages/datamodel/management/commands/importdata.py @@ -5,9 +5,24 @@ from django.core.management.base import BaseCommand from ...models import ( - Section, Lane, Brand, Category, Class, ClassCategory, Device, Installation, - Model, ModelClass, SensorType, SensorTypeClass, SensorTypeInstallation, - SensorTypeModel, Count, Sector, Municipality) + Section, + Lane, + Brand, + Category, + Class, + ClassCategory, + Device, + Installation, + Model, + ModelClass, + SensorType, + SensorTypeClass, + SensorTypeInstallation, + SensorTypeModel, + Count, + Sector, + Municipality, +) logger = logging.getLogger("main") @@ -19,7 +34,6 @@ def add_arguments(self, parser): parser.add_argument("--clear", action="store_true", help="Delete existing data") def handle(self, *args, **options): - if options["clear"]: print("Deleting...") try: @@ -54,7 +68,9 @@ def handle(self, *args, **options): self.import_model_classes(self.file_path("model_class.csv")) self.import_sensor_types(self.file_path("sensor_type.csv")) self.import_sensor_type_classes(self.file_path("sensor_type_class.csv")) - self.import_sensor_type_installations(self.file_path("sensor_type_installation.csv")) + self.import_sensor_type_installations( + self.file_path("sensor_type_installation.csv") + ) self.import_sensor_type_models(self.file_path("sensor_type_model.csv")) self.import_devices(self.file_path("device.csv")) self.import_sectors(self.file_path("sector.csv")) @@ -62,7 +78,9 @@ def handle(self, *args, **options): print("🚓") def file_path(self, filename): - return os.path.join(os.path.dirname(__file__),'..','..','..','basedata',filename) + return os.path.join( + os.path.dirname(__file__), "..", "..", "..", "basedata", filename + ) def import_sections(self, csv_file): print("Importing sections...") @@ -82,8 +100,12 @@ def import_sections(self, csv_file): start_dist=Decimal(feat["start_dist"].value), end_dist=Decimal(feat["end_dist"].value), place_name=feat["place_name"], - start_validity=feat["start_validity"].value, # TODO : probably needs cast do datetime - end_validity=feat["end_validity"].value, # TODO : probably needs cast do datetime + start_validity=feat[ + "start_validity" + ].value, # TODO : probably needs cast do datetime + end_validity=feat[ + "end_validity" + ].value, # TODO : probably needs cast do datetime ) ) Section.objects.bulk_create(sections) @@ -101,7 +123,7 @@ def import_brands(self, csv_file): Brand( id=Decimal(feat["id"].value), name=feat["name"], - formatter_name=feat["formatter_name"] + formatter_name=feat["formatter_name"], ) ) Brand.objects.bulk_create(brands) @@ -174,7 +196,8 @@ def import_installations(self, csv_file): Installation( geometry=feat.geom.wkt, id=Decimal(feat["id"].value), - permanent=str(feat["permanent"]).lower() in ("yes", "true", "t", "1"), + permanent=str(feat["permanent"]).lower() + in ("yes", "true", "t", "1"), name=feat["name"], picture=feat["picture"], active=str(feat["active"]).lower() in ("yes", "true", "t", "1"), @@ -253,7 +276,8 @@ def import_sensor_types(self, csv_file): SensorType( id=Decimal(feat["id"].value), name=feat["name"], - permanent=str(feat["permanent"]).lower() in ("yes", "true", "t", "1"), + permanent=str(feat["permanent"]).lower() + in ("yes", "true", "t", "1"), ) ) SensorType.objects.bulk_create(objects) diff --git a/comptages/datamodel/management/commands/tjmreset.py b/comptages/datamodel/management/commands/tjmreset.py index 9e1bb489..e25252b4 100644 --- a/comptages/datamodel/management/commands/tjmreset.py +++ b/comptages/datamodel/management/commands/tjmreset.py @@ -17,11 +17,11 @@ def add_arguments(self, parser): def handle(self, *args, **options): counts = models.Count.objects.all() - if options['min_id']: - counts = counts.filter(id__gte=options['min_id']) + if options["min_id"]: + counts = counts.filter(id__gte=options["min_id"]) - if options['max_id']: - counts = counts.filter(id__lte=options['max_id']) + if options["max_id"]: + counts = counts.filter(id__lte=options["max_id"]) for i, count in enumerate(counts): print(f"{i+1} of {len(counts)} - Calculate TJM of count {count.id}") @@ -29,4 +29,4 @@ def handle(self, *args, **options): df, tjm = statistics.get_day_data(count) count.tjm = tjm - count.save(update_fields=['tjm']) + count.save(update_fields=["tjm"]) diff --git a/comptages/datamodel/migrations/0001_initial.py b/comptages/datamodel/migrations/0001_initial.py index 629f528b..18e7a802 100644 --- a/comptages/datamodel/migrations/0001_initial.py +++ b/comptages/datamodel/migrations/0001_initial.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] diff --git a/comptages/datamodel/migrations/0002_search_path.py b/comptages/datamodel/migrations/0002_search_path.py index e5c0cf4f..2757f380 100644 --- a/comptages/datamodel/migrations/0002_search_path.py +++ b/comptages/datamodel/migrations/0002_search_path.py @@ -3,17 +3,17 @@ from django.db import migrations from django.db import connection + def alter_search_path(apps, schema_editor): with connection.cursor() as cursor: - cursor.execute(f"ALTER DATABASE {connection.settings_dict['NAME']} SET search_path TO comptages,transfer,public,topology;") + cursor.execute( + f"ALTER DATABASE {connection.settings_dict['NAME']} SET search_path TO comptages,transfer,public,topology;" + ) class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0001_initial'), + ("comptages", "0001_initial"), ] - operations = [ - migrations.RunPython(alter_search_path) - ] + operations = [migrations.RunPython(alter_search_path)] diff --git a/comptages/datamodel/migrations/0003_auto_20210820_0626.py b/comptages/datamodel/migrations/0003_auto_20210820_0626.py index c6155fe8..f3b7ca49 100644 --- a/comptages/datamodel/migrations/0003_auto_20210820_0626.py +++ b/comptages/datamodel/migrations/0003_auto_20210820_0626.py @@ -4,14 +4,13 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0002_search_path'), + ("comptages", "0002_search_path"), ] operations = [ migrations.RemoveField( - model_name='category', - name='id_category', + model_name="category", + name="id_category", ), ] diff --git a/comptages/datamodel/migrations/0004_delete_basetjmok.py b/comptages/datamodel/migrations/0004_delete_basetjmok.py index d3150da4..318e5330 100644 --- a/comptages/datamodel/migrations/0004_delete_basetjmok.py +++ b/comptages/datamodel/migrations/0004_delete_basetjmok.py @@ -4,13 +4,12 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0003_auto_20210820_0626'), + ("comptages", "0003_auto_20210820_0626"), ] operations = [ migrations.DeleteModel( - name='BaseTjmOk', + name="BaseTjmOk", ), ] diff --git a/comptages/datamodel/migrations/0005_auto_20210820_0843.py b/comptages/datamodel/migrations/0005_auto_20210820_0843.py index 5a18b699..5d52d207 100644 --- a/comptages/datamodel/migrations/0005_auto_20210820_0843.py +++ b/comptages/datamodel/migrations/0005_auto_20210820_0843.py @@ -4,20 +4,19 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0004_delete_basetjmok'), + ("comptages", "0004_delete_basetjmok"), ] operations = [ migrations.AddField( - model_name='countdetail', - name='from_aggregate', + model_name="countdetail", + name="from_aggregate", field=models.BooleanField(default=False), ), migrations.AddField( - model_name='countdetail', - name='times', + model_name="countdetail", + name="times", field=models.IntegerField(default=1), ), ] diff --git a/comptages/datamodel/migrations/0006_auto_20210820_1115.py b/comptages/datamodel/migrations/0006_auto_20210820_1115.py index 354d5ee7..0ee56b8b 100644 --- a/comptages/datamodel/migrations/0006_auto_20210820_1115.py +++ b/comptages/datamodel/migrations/0006_auto_20210820_1115.py @@ -6,24 +6,27 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0005_auto_20210820_0843'), + ("comptages", "0005_auto_20210820_0843"), ] operations = [ migrations.AddField( - model_name='classcategory', - name='id', + model_name="classcategory", + name="id", field=models.UUIDField(default=uuid.uuid4, editable=False), ), migrations.AlterField( - model_name='classcategory', - name='id_class', - field=models.ForeignKey(db_column='id_class', on_delete=django.db.models.deletion.DO_NOTHING, to='comptages.class'), + model_name="classcategory", + name="id_class", + field=models.ForeignKey( + db_column="id_class", + on_delete=django.db.models.deletion.DO_NOTHING, + to="comptages.class", + ), ), migrations.AlterUniqueTogether( - name='classcategory', + name="classcategory", unique_together=set(), ), ] diff --git a/comptages/datamodel/migrations/0007_alter_classcategory_id.py b/comptages/datamodel/migrations/0007_alter_classcategory_id.py index 8d6531da..8b3d4365 100644 --- a/comptages/datamodel/migrations/0007_alter_classcategory_id.py +++ b/comptages/datamodel/migrations/0007_alter_classcategory_id.py @@ -5,15 +5,16 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0006_auto_20210820_1115'), + ("comptages", "0006_auto_20210820_1115"), ] operations = [ migrations.AlterField( - model_name='classcategory', - name='id', - field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False), + model_name="classcategory", + name="id", + field=models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), ), ] diff --git a/comptages/datamodel/migrations/0008_auto_20210820_1143.py b/comptages/datamodel/migrations/0008_auto_20210820_1143.py index 482ba946..2a9a16cf 100644 --- a/comptages/datamodel/migrations/0008_auto_20210820_1143.py +++ b/comptages/datamodel/migrations/0008_auto_20210820_1143.py @@ -6,52 +6,63 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0007_alter_classcategory_id'), + ("comptages", "0007_alter_classcategory_id"), ] operations = [ migrations.AddField( - model_name='sensortypeclass', - name='id', + model_name="sensortypeclass", + name="id", field=models.UUIDField(default=uuid.uuid4, editable=False), ), migrations.AddField( - model_name='sensortypeinstallation', - name='id', + model_name="sensortypeinstallation", + name="id", field=models.UUIDField(default=uuid.uuid4, editable=False), ), migrations.AddField( - model_name='sensortypemodel', - name='id', + model_name="sensortypemodel", + name="id", field=models.UUIDField(default=uuid.uuid4, editable=False), ), migrations.AlterField( - model_name='sensortypeclass', - name='id_sensor_type', - field=models.ForeignKey(db_column='id_sensor_type', on_delete=django.db.models.deletion.DO_NOTHING, to='comptages.sensortype'), + model_name="sensortypeclass", + name="id_sensor_type", + field=models.ForeignKey( + db_column="id_sensor_type", + on_delete=django.db.models.deletion.DO_NOTHING, + to="comptages.sensortype", + ), ), migrations.AlterField( - model_name='sensortypeinstallation', - name='id_sensor_type', - field=models.ForeignKey(db_column='id_sensor_type', on_delete=django.db.models.deletion.DO_NOTHING, to='comptages.sensortype'), + model_name="sensortypeinstallation", + name="id_sensor_type", + field=models.ForeignKey( + db_column="id_sensor_type", + on_delete=django.db.models.deletion.DO_NOTHING, + to="comptages.sensortype", + ), ), migrations.AlterField( - model_name='sensortypemodel', - name='id_sensor_type', - field=models.ForeignKey(db_column='id_sensor_type', on_delete=django.db.models.deletion.DO_NOTHING, to='comptages.sensortype'), + model_name="sensortypemodel", + name="id_sensor_type", + field=models.ForeignKey( + db_column="id_sensor_type", + on_delete=django.db.models.deletion.DO_NOTHING, + to="comptages.sensortype", + ), ), migrations.AlterUniqueTogether( - name='sensortypeclass', + name="sensortypeclass", unique_together=set(), ), migrations.AlterUniqueTogether( - name='sensortypeinstallation', + name="sensortypeinstallation", unique_together=set(), ), migrations.AlterUniqueTogether( - name='sensortypemodel', + name="sensortypemodel", unique_together=set(), ), ] diff --git a/comptages/datamodel/migrations/0009_auto_20210820_1143.py b/comptages/datamodel/migrations/0009_auto_20210820_1143.py index cb0fbb0c..f7f6a121 100644 --- a/comptages/datamodel/migrations/0009_auto_20210820_1143.py +++ b/comptages/datamodel/migrations/0009_auto_20210820_1143.py @@ -5,25 +5,30 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0008_auto_20210820_1143'), + ("comptages", "0008_auto_20210820_1143"), ] operations = [ migrations.AlterField( - model_name='sensortypeclass', - name='id', - field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False), + model_name="sensortypeclass", + name="id", + field=models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), ), migrations.AlterField( - model_name='sensortypeinstallation', - name='id', - field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False), + model_name="sensortypeinstallation", + name="id", + field=models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), ), migrations.AlterField( - model_name='sensortypemodel', - name='id', - field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False), + model_name="sensortypemodel", + name="id", + field=models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), ), ] diff --git a/comptages/datamodel/migrations/0010_tjm.py b/comptages/datamodel/migrations/0010_tjm.py index 69f9013e..735bfec1 100644 --- a/comptages/datamodel/migrations/0010_tjm.py +++ b/comptages/datamodel/migrations/0010_tjm.py @@ -5,23 +5,34 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0009_auto_20210820_1143'), + ("comptages", "0009_auto_20210820_1143"), ] operations = [ migrations.CreateModel( - name='Tjm', + name="Tjm", fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('day', models.DateField()), - ('value', models.DecimalField(decimal_places=2, max_digits=10)), - ('count', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='comptages.count')), - ('lane', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='comptages.lane')), + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ("day", models.DateField()), + ("value", models.DecimalField(decimal_places=2, max_digits=10)), + ( + "count", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="comptages.count", + ), + ), + ( + "lane", + models.ForeignKey( + on_delete=django.db.models.deletion.DO_NOTHING, + to="comptages.lane", + ), + ), ], options={ - 'db_table': 'tjm', + "db_table": "tjm", }, ), ] diff --git a/comptages/datamodel/migrations/0011_tjm_week_day.py b/comptages/datamodel/migrations/0011_tjm_week_day.py index 6f13bdec..4f0e597e 100644 --- a/comptages/datamodel/migrations/0011_tjm_week_day.py +++ b/comptages/datamodel/migrations/0011_tjm_week_day.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0010_tjm'), + ("comptages", "0010_tjm"), ] operations = [ migrations.AddField( - model_name='tjm', - name='week_day', + model_name="tjm", + name="week_day", field=models.SmallIntegerField(default=0), ), ] diff --git a/comptages/datamodel/migrations/0012_remove_tjm_day.py b/comptages/datamodel/migrations/0012_remove_tjm_day.py index 080af971..a5062633 100644 --- a/comptages/datamodel/migrations/0012_remove_tjm_day.py +++ b/comptages/datamodel/migrations/0012_remove_tjm_day.py @@ -4,14 +4,13 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0011_tjm_week_day'), + ("comptages", "0011_tjm_week_day"), ] operations = [ migrations.RemoveField( - model_name='tjm', - name='day', + model_name="tjm", + name="day", ), ] diff --git a/comptages/datamodel/migrations/0013_auto_20211001_0643.py b/comptages/datamodel/migrations/0013_auto_20211001_0643.py index 82a02a91..e33bfd63 100644 --- a/comptages/datamodel/migrations/0013_auto_20211001_0643.py +++ b/comptages/datamodel/migrations/0013_auto_20211001_0643.py @@ -4,19 +4,18 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0012_remove_tjm_day'), + ("comptages", "0012_remove_tjm_day"), ] operations = [ migrations.RemoveField( - model_name='tjm', - name='week_day', + model_name="tjm", + name="week_day", ), migrations.AddField( - model_name='tjm', - name='day', + model_name="tjm", + name="day", field=models.DateField(null=True), ), ] diff --git a/comptages/datamodel/migrations/0014_count_tjm.py b/comptages/datamodel/migrations/0014_count_tjm.py index 8760dc4c..14af225e 100644 --- a/comptages/datamodel/migrations/0014_count_tjm.py +++ b/comptages/datamodel/migrations/0014_count_tjm.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0013_auto_20211001_0643'), + ("comptages", "0013_auto_20211001_0643"), ] operations = [ migrations.AddField( - model_name='count', - name='tjm', + model_name="count", + name="tjm", field=models.DecimalField(decimal_places=2, max_digits=10, null=True), ), ] diff --git a/comptages/datamodel/migrations/0015_sector.py b/comptages/datamodel/migrations/0015_sector.py index 797020eb..a5a41851 100644 --- a/comptages/datamodel/migrations/0015_sector.py +++ b/comptages/datamodel/migrations/0015_sector.py @@ -5,20 +5,24 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0014_count_tjm'), + ("comptages", "0014_count_tjm"), ] operations = [ migrations.CreateModel( - name='Sector', + name="Sector", fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('geometry', django.contrib.gis.db.models.fields.GeometryField(blank=True, null=True, srid=2056)), + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ( + "geometry", + django.contrib.gis.db.models.fields.GeometryField( + blank=True, null=True, srid=2056 + ), + ), ], options={ - 'db_table': 'sector', + "db_table": "sector", }, ), ] diff --git a/comptages/datamodel/migrations/0016_alter_sector_geometry.py b/comptages/datamodel/migrations/0016_alter_sector_geometry.py index 95fe42e3..c639aa45 100644 --- a/comptages/datamodel/migrations/0016_alter_sector_geometry.py +++ b/comptages/datamodel/migrations/0016_alter_sector_geometry.py @@ -5,15 +5,16 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0015_sector'), + ("comptages", "0015_sector"), ] operations = [ migrations.AlterField( - model_name='sector', - name='geometry', - field=django.contrib.gis.db.models.fields.PolygonField(blank=True, null=True, srid=2056), + model_name="sector", + name="geometry", + field=django.contrib.gis.db.models.fields.PolygonField( + blank=True, null=True, srid=2056 + ), ), ] diff --git a/comptages/datamodel/migrations/0017_alter_countdetail_id_count.py b/comptages/datamodel/migrations/0017_alter_countdetail_id_count.py index a34c296b..5dcfd3cf 100644 --- a/comptages/datamodel/migrations/0017_alter_countdetail_id_count.py +++ b/comptages/datamodel/migrations/0017_alter_countdetail_id_count.py @@ -5,15 +5,18 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0016_alter_sector_geometry'), + ("comptages", "0016_alter_sector_geometry"), ] operations = [ migrations.AlterField( - model_name='countdetail', - name='id_count', - field=models.ForeignKey(db_column='id_count', on_delete=django.db.models.deletion.CASCADE, to='comptages.count'), + model_name="countdetail", + name="id_count", + field=models.ForeignKey( + db_column="id_count", + on_delete=django.db.models.deletion.CASCADE, + to="comptages.count", + ), ), ] diff --git a/comptages/datamodel/migrations/0018_alter_countdetail_id_category.py b/comptages/datamodel/migrations/0018_alter_countdetail_id_category.py index 4b98aca6..95f39803 100644 --- a/comptages/datamodel/migrations/0018_alter_countdetail_id_category.py +++ b/comptages/datamodel/migrations/0018_alter_countdetail_id_category.py @@ -5,15 +5,19 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0017_alter_countdetail_id_count'), + ("comptages", "0017_alter_countdetail_id_count"), ] operations = [ migrations.AlterField( - model_name='countdetail', - name='id_category', - field=models.ForeignKey(db_column='id_category', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='comptages.category'), + model_name="countdetail", + name="id_category", + field=models.ForeignKey( + db_column="id_category", + null=True, + on_delete=django.db.models.deletion.DO_NOTHING, + to="comptages.category", + ), ), ] diff --git a/comptages/datamodel/migrations/0019_alter_tjm_count.py b/comptages/datamodel/migrations/0019_alter_tjm_count.py index d25e0872..b3771287 100644 --- a/comptages/datamodel/migrations/0019_alter_tjm_count.py +++ b/comptages/datamodel/migrations/0019_alter_tjm_count.py @@ -5,15 +5,18 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0018_alter_countdetail_id_category'), + ("comptages", "0018_alter_countdetail_id_category"), ] operations = [ migrations.AlterField( - model_name='tjm', - name='count', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tjms', to='comptages.count'), + model_name="tjm", + name="count", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="tjms", + to="comptages.count", + ), ), ] diff --git a/comptages/datamodel/migrations/0020_delete_tjm.py b/comptages/datamodel/migrations/0020_delete_tjm.py index d1e89547..6788cf2f 100644 --- a/comptages/datamodel/migrations/0020_delete_tjm.py +++ b/comptages/datamodel/migrations/0020_delete_tjm.py @@ -4,13 +4,12 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0019_alter_tjm_count'), + ("comptages", "0019_alter_tjm_count"), ] operations = [ migrations.DeleteModel( - name='Tjm', + name="Tjm", ), ] diff --git a/comptages/datamodel/migrations/0021_alter_count_tjm.py b/comptages/datamodel/migrations/0021_alter_count_tjm.py index 45c3702b..295350a5 100644 --- a/comptages/datamodel/migrations/0021_alter_count_tjm.py +++ b/comptages/datamodel/migrations/0021_alter_count_tjm.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0020_delete_tjm'), + ("comptages", "0020_delete_tjm"), ] operations = [ migrations.AlterField( - model_name='count', - name='tjm', + model_name="count", + name="tjm", field=models.IntegerField(null=True), ), ] diff --git a/comptages/datamodel/migrations/0022_auto_20211217_0624.py b/comptages/datamodel/migrations/0022_auto_20211217_0624.py index e05b1bf8..c17888ce 100644 --- a/comptages/datamodel/migrations/0022_auto_20211217_0624.py +++ b/comptages/datamodel/migrations/0022_auto_20211217_0624.py @@ -9,7 +9,7 @@ def get_console_logger(): logger = logging.getLogger(__name__) - handler = logging.FileHandler('migration_0022.log') + handler = logging.FileHandler("migration_0022.log") logger.addHandler(handler) logger.setLevel(logging.INFO) @@ -21,25 +21,31 @@ def migrate_data(apps, schema_editor): logger = get_console_logger() count_details = [] chunk_size = 1000 - local_tz = pytz.timezone('Europe/Zurich') + local_tz = pytz.timezone("Europe/Zurich") start_time = datetime.datetime.now() # We can't import the models directly as it may be a newer # version than this migration expects. We use the historical version. - CountAggregateValueCls = apps.get_model('comptages', 'CountAggregateValueCls') - CountAggregateValueCnt = apps.get_model('comptages', 'CountAggregateValueCnt') - CountAggregateValueDrn = apps.get_model('comptages', 'CountAggregateValueDrn') - CountAggregateValueLen = apps.get_model('comptages', 'CountAggregateValueLen') - CountAggregateValueSpd = apps.get_model('comptages', 'CountAggregateValueSpd') - CountDetail = apps.get_model('comptages', 'CountDetail') + CountAggregateValueCls = apps.get_model("comptages", "CountAggregateValueCls") + CountAggregateValueCnt = apps.get_model("comptages", "CountAggregateValueCnt") + CountAggregateValueDrn = apps.get_model("comptages", "CountAggregateValueDrn") + CountAggregateValueLen = apps.get_model("comptages", "CountAggregateValueLen") + CountAggregateValueSpd = apps.get_model("comptages", "CountAggregateValueSpd") + CountDetail = apps.get_model("comptages", "CountDetail") ## CLS - logger.info(f'{datetime.datetime.now()} Start with count_aggregate_value_cls table') + logger.info(f"{datetime.datetime.now()} Start with count_aggregate_value_cls table") num = 0 while CountAggregateValueCls.objects.all().exists(): with transaction.atomic(): # Cannot delete using a normal slice on all() objects like this qs = CountAggregateValueCls.objects.all()[:100] - qs = CountAggregateValueCls.objects.filter(id__in=list(CountAggregateValueCls.objects.values_list('pk', flat=True)[:chunk_size])) + qs = CountAggregateValueCls.objects.filter( + id__in=list( + CountAggregateValueCls.objects.values_list("pk", flat=True)[ + :chunk_size + ] + ) + ) for i in qs: count_details.append( CountDetail( @@ -59,14 +65,22 @@ def migrate_data(apps, schema_editor): qs.delete() num += chunk_size now = datetime.datetime.now() - logger.info(f'{now} Table CLS (1/5), migrated {num} rows, total elapsed time {now - start_time}') + logger.info( + f"{now} Table CLS (1/5), migrated {num} rows, total elapsed time {now - start_time}" + ) ## CNT - logger.info(f'{datetime.datetime.now()} Start with count_aggregate_value_cnt table') + logger.info(f"{datetime.datetime.now()} Start with count_aggregate_value_cnt table") num = 0 while CountAggregateValueCnt.objects.all().exists(): with transaction.atomic(): - qs = CountAggregateValueCnt.objects.filter(id__in=list(CountAggregateValueCnt.objects.values_list('pk', flat=True)[:chunk_size])) + qs = CountAggregateValueCnt.objects.filter( + id__in=list( + CountAggregateValueCnt.objects.values_list("pk", flat=True)[ + :chunk_size + ] + ) + ) for i in qs: count_details.append( CountDetail( @@ -85,14 +99,22 @@ def migrate_data(apps, schema_editor): qs.delete() num += chunk_size now = datetime.datetime.now() - logger.info(f'{now} Table CNT (2/5), migrated {num} rows, total elapsed time {now - start_time}') + logger.info( + f"{now} Table CNT (2/5), migrated {num} rows, total elapsed time {now - start_time}" + ) ## DRN - logger.info(f'{datetime.datetime.now()} Start with count_aggregate_value_drn table') + logger.info(f"{datetime.datetime.now()} Start with count_aggregate_value_drn table") num = 0 while CountAggregateValueDrn.objects.all().exists(): with transaction.atomic(): - qs = CountAggregateValueDrn.objects.filter(id__in=list(CountAggregateValueDrn.objects.values_list('pk', flat=True)[:chunk_size])) + qs = CountAggregateValueDrn.objects.filter( + id__in=list( + CountAggregateValueDrn.objects.values_list("pk", flat=True)[ + :chunk_size + ] + ) + ) for i in qs: count_details.append( CountDetail( @@ -111,14 +133,22 @@ def migrate_data(apps, schema_editor): qs.delete() num += chunk_size now = datetime.datetime.now() - logger.info(f'{now} Table DRN (3/5), migrated {num} rows, total elapsed time {now - start_time}') + logger.info( + f"{now} Table DRN (3/5), migrated {num} rows, total elapsed time {now - start_time}" + ) ## LEN - logger.info(f'{datetime.datetime.now()} Start with count_aggregate_value_len table') + logger.info(f"{datetime.datetime.now()} Start with count_aggregate_value_len table") num = 0 while CountAggregateValueLen.objects.all().exists(): with transaction.atomic(): - qs = CountAggregateValueLen.objects.filter(id__in=list(CountAggregateValueLen.objects.values_list('pk', flat=True)[:chunk_size])) + qs = CountAggregateValueLen.objects.filter( + id__in=list( + CountAggregateValueLen.objects.values_list("pk", flat=True)[ + :chunk_size + ] + ) + ) for i in qs: count_details.append( CountDetail( @@ -138,14 +168,22 @@ def migrate_data(apps, schema_editor): qs.delete() num += chunk_size now = datetime.datetime.now() - logger.info(f'{now} Table LEN (4/5), migrated {num} rows, total elapsed time {now - start_time}') + logger.info( + f"{now} Table LEN (4/5), migrated {num} rows, total elapsed time {now - start_time}" + ) ## SPD - logger.info(f'{datetime.datetime.now()} Start with count_aggregate_value_spd table') + logger.info(f"{datetime.datetime.now()} Start with count_aggregate_value_spd table") num = 0 while CountAggregateValueSpd.objects.all().exists(): with transaction.atomic(): - qs = CountAggregateValueSpd.objects.filter(id__in=list(CountAggregateValueSpd.objects.values_list('pk', flat=True)[:chunk_size])) + qs = CountAggregateValueSpd.objects.filter( + id__in=list( + CountAggregateValueSpd.objects.values_list("pk", flat=True)[ + :chunk_size + ] + ) + ) for i in qs: count_details.append( CountDetail( @@ -165,25 +203,28 @@ def migrate_data(apps, schema_editor): qs.delete() num += chunk_size now = datetime.datetime.now() - logger.info(f'{now} Table SPD (5/5), migrated {num} rows, total elapsed time {now - start_time}') + logger.info( + f"{now} Table SPD (5/5), migrated {num} rows, total elapsed time {now - start_time}" + ) now = datetime.datetime.now() - logger.info(f'{now} All tables migrated! Total elapsed time {now - start_time}') + logger.info(f"{now} All tables migrated! Total elapsed time {now - start_time}") + class Migration(migrations.Migration): atomic = False dependencies = [ - ('comptages', '0021_alter_count_tjm'), + ("comptages", "0021_alter_count_tjm"), ] operations = [ migrations.RunPython(migrate_data), - migrations.DeleteModel(name='CountAggregateValueCls'), - migrations.DeleteModel(name='CountAggregateValueCnt'), - migrations.DeleteModel(name='CountAggregateValueDrn'), - migrations.DeleteModel(name='CountAggregateValueLen'), - migrations.DeleteModel(name='CountAggregateValueSpd'), - migrations.DeleteModel(name='CountAggregateValueSds'), - migrations.DeleteModel(name='CountAggregate'), + migrations.DeleteModel(name="CountAggregateValueCls"), + migrations.DeleteModel(name="CountAggregateValueCnt"), + migrations.DeleteModel(name="CountAggregateValueDrn"), + migrations.DeleteModel(name="CountAggregateValueLen"), + migrations.DeleteModel(name="CountAggregateValueSpd"), + migrations.DeleteModel(name="CountAggregateValueSds"), + migrations.DeleteModel(name="CountAggregate"), ] diff --git a/comptages/datamodel/migrations/0023_installation_alias.py b/comptages/datamodel/migrations/0023_installation_alias.py index 5f112183..d8c92c48 100644 --- a/comptages/datamodel/migrations/0023_installation_alias.py +++ b/comptages/datamodel/migrations/0023_installation_alias.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0022_auto_20211217_0624'), + ("comptages", "0022_auto_20211217_0624"), ] operations = [ migrations.AddField( - model_name='installation', - name='alias', + model_name="installation", + name="alias", field=models.CharField(blank=True, max_length=200, null=True), ), ] diff --git a/comptages/datamodel/migrations/0024_category_trash.py b/comptages/datamodel/migrations/0024_category_trash.py index 0ad82b61..187b7883 100644 --- a/comptages/datamodel/migrations/0024_category_trash.py +++ b/comptages/datamodel/migrations/0024_category_trash.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0023_installation_alias'), + ("comptages", "0023_installation_alias"), ] operations = [ migrations.AddField( - model_name='category', - name='trash', + model_name="category", + name="trash", field=models.BooleanField(default=False), ), ] diff --git a/comptages/datamodel/migrations/0025_auto_20220204_1353.py b/comptages/datamodel/migrations/0025_auto_20220204_1353.py index 040d2347..576919bc 100644 --- a/comptages/datamodel/migrations/0025_auto_20220204_1353.py +++ b/comptages/datamodel/migrations/0025_auto_20220204_1353.py @@ -6,20 +6,23 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0024_category_trash'), + ("comptages", "0024_category_trash"), ] operations = [ migrations.AddField( - model_name='modelclass', - name='id', + model_name="modelclass", + name="id", field=models.UUIDField(default=uuid.uuid4, editable=False), ), migrations.AlterField( - model_name='modelclass', - name='id_model', - field=models.ForeignKey(db_column='id_model', on_delete=django.db.models.deletion.DO_NOTHING, to='comptages.model'), + model_name="modelclass", + name="id_model", + field=models.ForeignKey( + db_column="id_model", + on_delete=django.db.models.deletion.DO_NOTHING, + to="comptages.model", + ), ), ] diff --git a/comptages/datamodel/migrations/0026_alter_modelclass_id.py b/comptages/datamodel/migrations/0026_alter_modelclass_id.py index 6045b8c2..88d06c68 100644 --- a/comptages/datamodel/migrations/0026_alter_modelclass_id.py +++ b/comptages/datamodel/migrations/0026_alter_modelclass_id.py @@ -5,15 +5,16 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0025_auto_20220204_1353'), + ("comptages", "0025_auto_20220204_1353"), ] operations = [ migrations.AlterField( - model_name='modelclass', - name='id', - field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False), + model_name="modelclass", + name="id", + field=models.UUIDField( + default=uuid.uuid4, editable=False, primary_key=True, serialize=False + ), ), ] diff --git a/comptages/datamodel/migrations/0027_default_uuid.py b/comptages/datamodel/migrations/0027_default_uuid.py index df7761a0..4f379546 100644 --- a/comptages/datamodel/migrations/0027_default_uuid.py +++ b/comptages/datamodel/migrations/0027_default_uuid.py @@ -4,26 +4,25 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0026_alter_modelclass_id'), + ("comptages", "0026_alter_modelclass_id"), ] operations = [ migrations.RunSQL('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'), migrations.RunSQL( - 'ALTER TABLE class_category ALTER COLUMN id SET DEFAULT uuid_generate_v4();' - ), + "ALTER TABLE class_category ALTER COLUMN id SET DEFAULT uuid_generate_v4();" + ), migrations.RunSQL( - 'ALTER TABLE model_class ALTER COLUMN id SET DEFAULT uuid_generate_v4();' - ), + "ALTER TABLE model_class ALTER COLUMN id SET DEFAULT uuid_generate_v4();" + ), migrations.RunSQL( - 'ALTER TABLE sensor_type_class ALTER COLUMN id SET DEFAULT uuid_generate_v4();' - ), + "ALTER TABLE sensor_type_class ALTER COLUMN id SET DEFAULT uuid_generate_v4();" + ), migrations.RunSQL( - 'ALTER TABLE sensor_type_installation ALTER COLUMN id SET DEFAULT uuid_generate_v4();' - ), + "ALTER TABLE sensor_type_installation ALTER COLUMN id SET DEFAULT uuid_generate_v4();" + ), migrations.RunSQL( - 'ALTER TABLE sensor_type_model ALTER COLUMN id SET DEFAULT uuid_generate_v4();' - ), + "ALTER TABLE sensor_type_model ALTER COLUMN id SET DEFAULT uuid_generate_v4();" + ), ] diff --git a/comptages/datamodel/migrations/0028_municipality.py b/comptages/datamodel/migrations/0028_municipality.py index 2ff81133..bd03c7d2 100644 --- a/comptages/datamodel/migrations/0028_municipality.py +++ b/comptages/datamodel/migrations/0028_municipality.py @@ -5,21 +5,23 @@ class Migration(migrations.Migration): - dependencies = [ - ('comptages', '0027_default_uuid'), + ("comptages", "0027_default_uuid"), ] operations = [ migrations.CreateModel( - name='Municipality', + name="Municipality", fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('geometry', django.contrib.gis.db.models.fields.MultiPolygonField(srid=2056)), - ('name', models.TextField()), + ("id", models.BigAutoField(primary_key=True, serialize=False)), + ( + "geometry", + django.contrib.gis.db.models.fields.MultiPolygonField(srid=2056), + ), + ("name", models.TextField()), ], options={ - 'db_table': 'municipality', + "db_table": "municipality", }, ), ] diff --git a/comptages/datamodel/models.py b/comptages/datamodel/models.py index a53d0b3a..51921669 100644 --- a/comptages/datamodel/models.py +++ b/comptages/datamodel/models.py @@ -12,7 +12,7 @@ class Brand(models.Model): formatter_name = models.TextField(blank=True, null=True) class Meta: - db_table = 'brand' + db_table = "brand" class Category(models.Model): @@ -23,7 +23,7 @@ class Category(models.Model): trash = models.BooleanField(default=False) class Meta: - db_table = 'category' + db_table = "category" class Class(models.Model): @@ -32,24 +32,28 @@ class Class(models.Model): description = models.TextField() class Meta: - db_table = 'class' + db_table = "class" class ClassCategory(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - id_class = models.ForeignKey(Class, models.DO_NOTHING, db_column='id_class') - id_category = models.ForeignKey(Category, models.DO_NOTHING, db_column='id_category') + id_class = models.ForeignKey(Class, models.DO_NOTHING, db_column="id_class") + id_category = models.ForeignKey( + Category, models.DO_NOTHING, db_column="id_category" + ) class Meta: - db_table = 'class_category' + db_table = "class_category" class CoreBuilding(models.Model): - structure_ptr = models.OneToOneField('CoreStructure', models.DO_NOTHING, primary_key=True) + structure_ptr = models.OneToOneField( + "CoreStructure", models.DO_NOTHING, primary_key=True + ) stories_count = models.IntegerField() class Meta: - db_table = 'core_building' + db_table = "core_building" class CoreStructure(models.Model): @@ -59,7 +63,7 @@ class CoreStructure(models.Model): label = models.CharField(max_length=255) class Meta: - db_table = 'core_structure' + db_table = "core_structure" class Count(models.Model): @@ -75,23 +79,35 @@ class Count(models.Model): valid = models.BooleanField(blank=True, null=True) dysfunction = models.BooleanField(blank=True, null=True) remarks = models.TextField(blank=True, null=True) - id_model = models.ForeignKey('Model', models.DO_NOTHING, db_column='id_model') - id_device = models.ForeignKey('Device', models.DO_NOTHING, db_column='id_device', blank=True, null=True) - id_sensor_type = models.ForeignKey('SensorType', models.DO_NOTHING, db_column='id_sensor_type') - id_class = models.ForeignKey(Class, models.DO_NOTHING, db_column='id_class', blank=True, null=True) - id_installation = models.ForeignKey('Installation', models.DO_NOTHING, db_column='id_installation') + id_model = models.ForeignKey("Model", models.DO_NOTHING, db_column="id_model") + id_device = models.ForeignKey( + "Device", models.DO_NOTHING, db_column="id_device", blank=True, null=True + ) + id_sensor_type = models.ForeignKey( + "SensorType", models.DO_NOTHING, db_column="id_sensor_type" + ) + id_class = models.ForeignKey( + Class, models.DO_NOTHING, db_column="id_class", blank=True, null=True + ) + id_installation = models.ForeignKey( + "Installation", models.DO_NOTHING, db_column="id_installation" + ) tjm = models.IntegerField(null=True) class Meta: - db_table = 'count' + db_table = "count" class CountDetail(models.Model): id = models.BigAutoField(primary_key=True) numbering = models.IntegerField() timestamp = models.DateTimeField() - distance_front_front = models.DecimalField(max_digits=3, decimal_places=1, blank=True, null=True) - distance_front_back = models.DecimalField(max_digits=3, decimal_places=1, blank=True, null=True) + distance_front_front = models.DecimalField( + max_digits=3, decimal_places=1, blank=True, null=True + ) + distance_front_back = models.DecimalField( + max_digits=3, decimal_places=1, blank=True, null=True + ) speed = models.SmallIntegerField(blank=True, null=True) length = models.SmallIntegerField(blank=True, null=True) height = models.CharField(max_length=2, blank=True, null=True) @@ -99,14 +115,16 @@ class CountDetail(models.Model): wrong_way = models.BooleanField(blank=True, null=True) file_name = models.TextField() import_status = models.SmallIntegerField() - id_lane = models.ForeignKey('Lane', models.DO_NOTHING, db_column='id_lane') - id_count = models.ForeignKey(Count, models.CASCADE, db_column='id_count') - id_category = models.ForeignKey(Category, models.DO_NOTHING, db_column='id_category', null=True) + id_lane = models.ForeignKey("Lane", models.DO_NOTHING, db_column="id_lane") + id_count = models.ForeignKey(Count, models.CASCADE, db_column="id_count") + id_category = models.ForeignKey( + Category, models.DO_NOTHING, db_column="id_category", null=True + ) times = models.IntegerField(default=1) from_aggregate = models.BooleanField(default=False) class Meta: - db_table = 'count_detail' + db_table = "count_detail" class DamageLog(models.Model): @@ -114,10 +132,10 @@ class DamageLog(models.Model): start_date = models.DateField() end_date = models.DateField() description = models.TextField() - id_device = models.ForeignKey('Device', models.DO_NOTHING, db_column='id_device') + id_device = models.ForeignKey("Device", models.DO_NOTHING, db_column="id_device") class Meta: - db_table = 'damage_log' + db_table = "damage_log" class Device(models.Model): @@ -125,10 +143,10 @@ class Device(models.Model): serial = models.TextField(blank=True, null=True) purchase_date = models.DateField(blank=True, null=True) name = models.TextField() - id_model = models.ForeignKey('Model', models.DO_NOTHING, db_column='id_model') + id_model = models.ForeignKey("Model", models.DO_NOTHING, db_column="id_model") class Meta: - db_table = 'device' + db_table = "device" class Installation(models.Model): @@ -143,7 +161,7 @@ class Installation(models.Model): active = models.BooleanField() class Meta: - db_table = 'installation' + db_table = "installation" @property def municipality(self): @@ -158,11 +176,17 @@ class Lane(models.Model): number = models.SmallIntegerField() direction = models.SmallIntegerField() direction_desc = models.TextField(blank=True, null=True) - id_installation = models.ForeignKey(Installation, models.DO_NOTHING, db_column='id_installation', blank=True, null=True) - id_section = models.ForeignKey('Section', models.DO_NOTHING, db_column='id_section') + id_installation = models.ForeignKey( + Installation, + models.DO_NOTHING, + db_column="id_installation", + blank=True, + null=True, + ) + id_section = models.ForeignKey("Section", models.DO_NOTHING, db_column="id_section") class Meta: - db_table = 'lane' + db_table = "lane" class Model(models.Model): @@ -170,20 +194,20 @@ class Model(models.Model): name = models.TextField() card_name = models.TextField(blank=True, null=True) configuration = models.TextField(blank=True, null=True) - id_brand = models.ForeignKey(Brand, models.DO_NOTHING, db_column='id_brand') + id_brand = models.ForeignKey(Brand, models.DO_NOTHING, db_column="id_brand") class Meta: - db_table = 'model' + db_table = "model" class ModelClass(models.Model): id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) - id_model = models.ForeignKey(Model, models.DO_NOTHING, db_column='id_model') - id_class = models.ForeignKey(Class, models.DO_NOTHING, db_column='id_class') + id_model = models.ForeignKey(Model, models.DO_NOTHING, db_column="id_model") + id_class = models.ForeignKey(Class, models.DO_NOTHING, db_column="id_class") class Meta: - db_table = 'model_class' - unique_together = (('id_model', 'id_class'),) + db_table = "model_class" + unique_together = (("id_model", "id_class"),) class Section(models.Model): @@ -196,31 +220,47 @@ class Section(models.Model): way = models.CharField(max_length=1, blank=True, null=True) start_pr = models.TextField(blank=True, null=True) end_pr = models.TextField(blank=True, null=True) - start_dist = models.DecimalField(max_digits=1000, decimal_places=500, blank=True, null=True) - end_dist = models.DecimalField(max_digits=1000, decimal_places=500, blank=True, null=True) + start_dist = models.DecimalField( + max_digits=1000, decimal_places=500, blank=True, null=True + ) + end_dist = models.DecimalField( + max_digits=1000, decimal_places=500, blank=True, null=True + ) place_name = models.TextField(blank=True, null=True) geometry = models.LineStringField(srid=2056) start_validity = models.DateField(blank=True, null=True) end_validity = models.DateField(blank=True, null=True) class Meta: - db_table = 'section' + db_table = "section" class Sensor(models.Model): id = models.BigAutoField(primary_key=True) - id_lane = models.ForeignKey(Lane, models.DO_NOTHING, db_column='id_lane', blank=True, null=True) - id_sensor_type = models.ForeignKey('SensorType', models.DO_NOTHING, db_column='id_sensor_type', blank=True, null=True) + id_lane = models.ForeignKey( + Lane, models.DO_NOTHING, db_column="id_lane", blank=True, null=True + ) + id_sensor_type = models.ForeignKey( + "SensorType", + models.DO_NOTHING, + db_column="id_sensor_type", + blank=True, + null=True, + ) start_pr = models.TextField(blank=True, null=True) end_pr = models.TextField(blank=True, null=True) - start_dist = models.DecimalField(max_digits=1000, decimal_places=500, blank=True, null=True) - end_dist = models.DecimalField(max_digits=1000, decimal_places=500, blank=True, null=True) + start_dist = models.DecimalField( + max_digits=1000, decimal_places=500, blank=True, null=True + ) + end_dist = models.DecimalField( + max_digits=1000, decimal_places=500, blank=True, null=True + ) start_service_date = models.DateField(blank=True, null=True) end_service_date = models.DateField(blank=True, null=True) geometry = models.GeometryField(blank=True, null=True, srid=2056) class Meta: - db_table = 'sensor' + db_table = "sensor" class SensorType(models.Model): @@ -229,44 +269,56 @@ class SensorType(models.Model): permanent = models.BooleanField(blank=True, null=True) class Meta: - db_table = 'sensor_type' + db_table = "sensor_type" class SensorTypeClass(models.Model): id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True) - id_sensor_type = models.ForeignKey(SensorType, models.DO_NOTHING, db_column='id_sensor_type') - id_class = models.ForeignKey(Class, models.DO_NOTHING, db_column='id_class') + id_sensor_type = models.ForeignKey( + SensorType, models.DO_NOTHING, db_column="id_sensor_type" + ) + id_class = models.ForeignKey(Class, models.DO_NOTHING, db_column="id_class") class Meta: - db_table = 'sensor_type_class' + db_table = "sensor_type_class" class SensorTypeInstallation(models.Model): id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True) - id_sensor_type = models.ForeignKey(SensorType, models.DO_NOTHING, db_column='id_sensor_type') - id_installation = models.ForeignKey(Installation, models.DO_NOTHING, db_column='id_installation') + id_sensor_type = models.ForeignKey( + SensorType, models.DO_NOTHING, db_column="id_sensor_type" + ) + id_installation = models.ForeignKey( + Installation, models.DO_NOTHING, db_column="id_installation" + ) class Meta: - db_table = 'sensor_type_installation' + db_table = "sensor_type_installation" class SensorTypeModel(models.Model): id = models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True) - id_sensor_type = models.ForeignKey(SensorType, models.DO_NOTHING, db_column='id_sensor_type') - id_model = models.ForeignKey(Model, models.DO_NOTHING, db_column='id_model') + id_sensor_type = models.ForeignKey( + SensorType, models.DO_NOTHING, db_column="id_sensor_type" + ) + id_model = models.ForeignKey(Model, models.DO_NOTHING, db_column="id_model") class Meta: - db_table = 'sensor_type_model' + db_table = "sensor_type_model" class SensorTypeSection(models.Model): id = models.BigAutoField(primary_key=True) geometry = models.GeometryField(blank=True, null=True, srid=2056) - id_sensor_type = models.ForeignKey(SensorType, models.DO_NOTHING, db_column='id_sensor_type', blank=True, null=True) - id_section = models.ForeignKey(Section, models.DO_NOTHING, db_column='id_section', blank=True, null=True) + id_sensor_type = models.ForeignKey( + SensorType, models.DO_NOTHING, db_column="id_sensor_type", blank=True, null=True + ) + id_section = models.ForeignKey( + Section, models.DO_NOTHING, db_column="id_section", blank=True, null=True + ) class Meta: - db_table = 'sensor_type_section' + db_table = "sensor_type_section" class SpecialPeriod(models.Model): @@ -278,7 +330,7 @@ class SpecialPeriod(models.Model): influence = models.TextField(blank=True, null=True) class Meta: - db_table = 'special_period' + db_table = "special_period" class Sector(models.Model): @@ -286,7 +338,7 @@ class Sector(models.Model): geometry = models.PolygonField(blank=True, null=True, srid=2056) class Meta: - db_table = 'sector' + db_table = "sector" class Municipality(models.Model): @@ -295,4 +347,4 @@ class Municipality(models.Model): name = models.TextField(blank=False, null=False) class Meta: - db_table = 'municipality' + db_table = "municipality" diff --git a/comptages/ics/ics_importer.py b/comptages/ics/ics_importer.py index 0b7045cf..ce584cf9 100644 --- a/comptages/ics/ics_importer.py +++ b/comptages/ics/ics_importer.py @@ -5,17 +5,18 @@ from comptages.core.utils import push_info -class IcsImporter(): +class IcsImporter: def __init__(self, layers): self.layers = layers self.ask_for_file() def ask_for_file(self): file_dialog = QFileDialog() - title = 'Importer fichier ics' - path = '/' + title = "Importer fichier ics" + path = "/" file_path = QFileDialog.getOpenFileName( - file_dialog, title, path, "Data file (*.ICS *.ics)")[0] + file_dialog, title, path, "Data file (*.ICS *.ics)" + )[0] if not file_path: return @@ -23,33 +24,28 @@ def ask_for_file(self): self.import_file(file_path) def import_file(self, file_path): - ics = open(file_path, 'rb') + ics = open(file_path, "rb") cal = icalendar.Calendar.from_ical(ics.read()) ics.close() - for event in cal.walk('vevent'): - - if 'DTSTART' not in event and 'DTEND' not in event: + for event in cal.walk("vevent"): + if "DTSTART" not in event and "DTEND" not in event: continue - summary = '' - if 'SUMMARY' in event: - summary = str(event['SUMMARY']) + summary = "" + if "SUMMARY" in event: + summary = str(event["SUMMARY"]) - location = '' - if 'LOCATION' in event: - location = str(event['LOCATION']) + location = "" + if "LOCATION" in event: + location = str(event["LOCATION"]) self.layers.write_special_period( - event['DTSTART'].dt, - event['DTEND'].dt, - summary, - location, - '') + event["DTSTART"].dt, event["DTEND"].dt, summary, location, "" + ) # str(event['PRIORITY']) # event['CATEGORIES'].cats - push_info('Importation terminée') - QgsMessageLog.logMessage( - 'Import ics finished', 'Comptages', Qgis.Info) + push_info("Importation terminée") + QgsMessageLog.logMessage("Import ics finished", "Comptages", Qgis.Info) diff --git a/comptages/plan/plan_creator.py b/comptages/plan/plan_creator.py index a3a7097c..d3c26c15 100644 --- a/comptages/plan/plan_creator.py +++ b/comptages/plan/plan_creator.py @@ -1,106 +1,98 @@ import os -from qgis.core import ( - QgsPrintLayout, QgsProject, QgsReadWriteContext, - QgsLayoutExporter) +from qgis.core import QgsPrintLayout, QgsProject, QgsReadWriteContext, QgsLayoutExporter from qgis.PyQt.QtXml import QDomDocument from qgis.utils import iface from comptages.core.settings import Settings from comptages.datamodel import models -class PlanCreator(): +class PlanCreator: def __init__(self): self.settings = Settings() - def export_pdf(self, count, file_name): - + def export_pdf(self, count: models.Count, file_name: str): current_dir = os.path.dirname(os.path.abspath(__file__)) - qpt_file_path = os.path.join( - current_dir, os.pardir, 'qml', 'plan.qpt') - self.layout = PlanCreator.create_layout_from_template( - qpt_file_path) + qpt_file_path = os.path.join(current_dir, os.pardir, "qml", "plan.qpt") + self.layout = PlanCreator.create_layout_from_template(qpt_file_path) self.set_fields(count) canvas = iface.mapCanvas() - map_item = self.layout.itemById('map') + map_item = self.layout.itemById("map") map_item.setExtent(canvas.extent()) exporter = QgsLayoutExporter(self.layout) - exporter.exportToPdf( - file_name, exporter.PdfExportSettings()) + exporter.exportToPdf(file_name, exporter.PdfExportSettings()) - def set_fields(self, count): - section = models.Section.objects.filter(lane__id_installation__count=count).distinct()[0] + def set_fields(self, count: models.Count): + section = models.Section.objects.filter( + lane__id_installation__count=count + ).distinct()[0] - self.set_text_item('f_01', count.id_installation.name) + self.set_text_item("f_01", count.id_installation.name) municipality = count.id_installation.municipality - municipality_name = '' + municipality_name = "" if municipality: municipality_name = municipality.name - self.set_text_item('f_03', municipality_name) - self.set_text_item('f_04', section.owner) - self.set_text_item('f_05', section.road) - self.set_text_item('f_06', section.way) - self.set_text_item( - 'f_07', - '{} + {} m'.format( - section.start_pr, - round(section.start_dist, 3))) - self.set_text_item( - 'f_08', - '{} + {} m'.format( - section.end_pr, - round(section.end_dist, 3))) - self.set_text_item('f_09', section.place_name) + self.set_text_item("f_03", municipality_name) + self.set_text_item("f_04", section.owner) + self.set_text_item("f_05", section.road) + self.set_text_item("f_06", section.way) self.set_text_item( - 'f_10', - count.start_process_date.strftime( - '%d.%m.%Y (%A)')) - self.set_text_item( - 'f_11', - count.end_process_date.strftime( - '%d.%m.%Y (%A)')) - self.set_text_item('f_14', '') - self.set_text_item('f_15', '') - - direction_desc = '' - if models.Lane.objects.filter(id_installation__count=count, direction=1).first(): - direction_desc = models.Lane.objects.filter(id_installation__count=count, direction=1).first().direction_desc - self.set_text_item( - 'f_30', - direction_desc) - - direction_desc = '' - if models.Lane.objects.filter(id_installation__count=count, direction=2).first(): - direction_desc = models.Lane.objects.filter(id_installation__count=count, direction=2).first().direction_desc + "f_07", "{} + {} m".format(section.start_pr, round(section.start_dist, 3)) + ) self.set_text_item( - 'f_31', - direction_desc) + "f_08", "{} + {} m".format(section.end_pr, round(section.end_dist, 3)) + ) + self.set_text_item("f_09", section.place_name) + self.set_text_item("f_10", count.start_process_date.strftime("%d.%m.%Y (%A)")) + self.set_text_item("f_11", count.end_process_date.strftime("%d.%m.%Y (%A)")) + self.set_text_item("f_14", "") + self.set_text_item("f_15", "") + + direction_desc = "" + if models.Lane.objects.filter( + id_installation__count=count, direction=1 + ).first(): + direction_desc = ( + models.Lane.objects.filter(id_installation__count=count, direction=1) + .first() + .direction_desc + ) + self.set_text_item("f_30", direction_desc) + + direction_desc = "" + if models.Lane.objects.filter( + id_installation__count=count, direction=2 + ).first(): + direction_desc = ( + models.Lane.objects.filter(id_installation__count=count, direction=2) + .first() + .direction_desc + ) + self.set_text_item("f_31", direction_desc) # Page 2 - self.set_text_item('f_17', 'Campagne de comptage') + self.set_text_item("f_17", "Campagne de comptage") self.set_text_item( - 'f_18', - 'Pose {}'.format(count.start_put_date.strftime( - '%A %d.%m.%Y'))) + "f_18", "Pose {}".format(count.start_put_date.strftime("%A %d.%m.%Y")) + ) self.set_text_item( - 'f_19', - 'Dépose {}'.format(count.end_put_date.strftime( - '%A %d.%m.%Y'))) - self.set_text_item('f_20', section.place_name) - self.set_text_item('f_21', count.id_installation.name) - self.set_text_item('f_22', '') - self.set_text_item('f_23', '') - - self.set_picture_item('picture_1', count.id_installation.picture) + "f_19", "Dépose {}".format(count.end_put_date.strftime("%A %d.%m.%Y")) + ) + self.set_text_item("f_20", section.place_name) + self.set_text_item("f_21", count.id_installation.name) + self.set_text_item("f_22", "") + self.set_text_item("f_23", "") + + self.set_picture_item("picture_1", count.id_installation.picture) current_dir = os.path.dirname(os.path.abspath(__file__)) self.set_picture_item( - 'logo', - os.path.join(current_dir, os.pardir, 'images', 'logo_ne.png')) + "logo", os.path.join(current_dir, os.pardir, "images", "logo_ne.png") + ) def set_text_item(self, name, text): self.layout.itemById(name).setText(text) @@ -109,16 +101,14 @@ def set_picture_item(self, name, file_name): if not file_name: return - picture_path = os.path.join( - self.settings.value('picture_directory'), file_name) + picture_path = os.path.join(self.settings.value("picture_directory"), file_name) self.layout.itemById(name).setPicturePath(picture_path) @staticmethod def create_layout_from_template(template_filename): layout = QgsPrintLayout(QgsProject().instance()) document = QDomDocument() - with open(os.path.join('data', 'general', - template_filename)) as template_file: + with open(os.path.join("data", "general", template_filename)) as template_file: template_content = template_file.read() document.setContent(template_content) layout.loadFromTemplate(document, QgsReadWriteContext()) diff --git a/comptages/qgissettingmanager b/comptages/qgissettingmanager index 820aec39..565a0bcd 160000 --- a/comptages/qgissettingmanager +++ b/comptages/qgissettingmanager @@ -1 +1 @@ -Subproject commit 820aec39b70ef83626658730f796d5008c20f1dd +Subproject commit 565a0bcd496ec3e42b3e39e06abb7cd24b8af48d diff --git a/comptages/report/yearly_report_bike.py b/comptages/report/yearly_report_bike.py index 3112a35c..c65934e9 100644 --- a/comptages/report/yearly_report_bike.py +++ b/comptages/report/yearly_report_bike.py @@ -1,4 +1,3 @@ - import os from string import ascii_uppercase @@ -6,7 +5,12 @@ from django.db.models import Sum, Avg, Max, Count from django.db.models.functions import Cast from django.db.models.fields import DateField -from django.db.models.functions import ExtractIsoWeekDay, ExtractHour, ExtractMonth, ExtractDay +from django.db.models.functions import ( + ExtractIsoWeekDay, + ExtractHour, + ExtractMonth, + ExtractDay, +) from openpyxl import load_workbook @@ -14,7 +18,7 @@ from comptages.datamodel.models import CountDetail, Section, Lane -class YearlyReportBike(): +class YearlyReportBike: def __init__(self, file_path, year, section_id): # TODO: pass section or section id? @@ -23,7 +27,6 @@ def __init__(self, file_path, year, section_id): self.section_id = section_id def values_by_direction(self): - # Get all the count details for section and the year qs = CountDetail.objects.filter( id_lane__id_section__id=self.section_id, @@ -33,11 +36,12 @@ def values_by_direction(self): ) # Total by day of the week (0->monday, 7->sunday) and by direction - result = qs.annotate(weekday=ExtractIsoWeekDay('timestamp')) \ - .values('weekday') \ - .annotate(total=Sum('times')) \ - .values('weekday', 'id_lane__direction', 'total') - + result = ( + qs.annotate(weekday=ExtractIsoWeekDay("timestamp")) + .values("weekday") + .annotate(total=Sum("times")) + .values("weekday", "id_lane__direction", "total") + ) def values_by_day_and_hour(self): # Get all the count details for section and the year @@ -51,11 +55,13 @@ def values_by_day_and_hour(self): # real days (with sum) and then aggregate by weekday (with average) # Total by day of the week (0->monday, 6->sunday) and by hour (0->23) - result = qs.annotate(weekday=ExtractIsoWeekDay('timestamp')) \ - .annotate(hour=ExtractHour('timestamp')) \ - .values('weekday', 'hour') \ - .annotate(tjm=Sum('times') / 51) \ - .values('weekday', 'hour', 'tjm') + result = ( + qs.annotate(weekday=ExtractIsoWeekDay("timestamp")) + .annotate(hour=ExtractHour("timestamp")) + .values("weekday", "hour") + .annotate(tjm=Sum("times") / 51) + .values("weekday", "hour", "tjm") + ) return result @@ -72,10 +78,12 @@ def values_by_hour_and_direction(self, direction, weekdays=[0, 1, 2, 3, 4, 5, 6] # TODO: don't divide by 365 # Total by hour (0->23) - result = qs.annotate(hour=ExtractHour('timestamp')) \ - .values('hour') \ - .annotate(tjm=Sum('times') / 365) \ - .values('hour', 'tjm') + result = ( + qs.annotate(hour=ExtractHour("timestamp")) + .values("hour") + .annotate(tjm=Sum("times") / 365) + .values("hour", "tjm") + ) return result @@ -91,11 +99,13 @@ def values_by_day_and_month(self): # real days (with sum) and then aggregate by weekday (with average) # Total by day of the week (0->monday, 6->sunday) and by month (1->12) - result = qs.annotate(weekday=ExtractIsoWeekDay('timestamp')) \ - .annotate(month=ExtractMonth('timestamp')) \ - .values('weekday', 'month') \ - .annotate(tjm=Sum('times') / 12) \ - .values('weekday', 'month', 'tjm') + result = ( + qs.annotate(weekday=ExtractIsoWeekDay("timestamp")) + .annotate(month=ExtractMonth("timestamp")) + .values("weekday", "month") + .annotate(tjm=Sum("times") / 12) + .values("weekday", "month", "tjm") + ) return result @@ -108,10 +118,12 @@ def values_by_day(self): ) # Group by date - result = qs.annotate(date=Cast('timestamp', DateField())) \ - .values('date') \ - .annotate(tjm=Sum('times')) \ - .values('date', 'tjm') + result = ( + qs.annotate(date=Cast("timestamp", DateField())) + .values("date") + .annotate(tjm=Sum("times")) + .values("date", "tjm") + ) return result @@ -126,10 +138,12 @@ def values_by_day_of_week(self): # real days (with sum) and then aggregate by weekday (with average) # Group by day of the week (0->monday, 7->sunday) - result = qs.annotate(weekday=ExtractIsoWeekDay('timestamp')) \ - .values('weekday') \ - .annotate(tjm=Sum('times') / 51) \ - .values('weekday', 'tjm') + result = ( + qs.annotate(weekday=ExtractIsoWeekDay("timestamp")) + .values("weekday") + .annotate(tjm=Sum("times") / 51) + .values("weekday", "tjm") + ) return result @@ -141,11 +155,15 @@ def values_by_class(self): import_status=definitions.IMPORT_STATUS_DEFINITIVE, ) - result = qs.annotate(res=Sum('times')).values('res').values('id_category__code').annotate(tjm=Count('id_category__code')) + result = ( + qs.annotate(res=Sum("times")) + .values("res") + .values("id_category__code") + .annotate(tjm=Count("id_category__code")) + ) return result def tjm_direction_bike(self, categories, direction, weekdays=[0, 1, 2, 3, 4, 5, 6]): - qs = CountDetail.objects.filter( id_lane__id_section__id=self.section_id, timestamp__year=self.year, @@ -156,74 +174,86 @@ def tjm_direction_bike(self, categories, direction, weekdays=[0, 1, 2, 3, 4, 5, ) # TODO: avoid the division? - return qs.aggregate(res=Sum('times'))['res'] / 365 + return qs.aggregate(res=Sum("times"))["res"] / 365 def total(self, categories=[1]): - qs = CountDetail.objects.filter( timestamp__year=self.year, id_category__code__in=categories, import_status=definitions.IMPORT_STATUS_DEFINITIVE, ) - return qs.aggregate(res=Sum('times'))['res'] + return qs.aggregate(res=Sum("times"))["res"] def max_day(self, categories=[1]): + qs = ( + CountDetail.objects.filter( + timestamp__year=self.year, + id_category__code__in=categories, + import_status=definitions.IMPORT_STATUS_DEFINITIVE, + ) + .annotate(date=Cast("timestamp", DateField())) + .values("date") + .annotate(total=Sum("times")) + .order_by("-total") + ) - qs = CountDetail.objects.filter( - timestamp__year=self.year, - id_category__code__in=categories, - import_status=definitions.IMPORT_STATUS_DEFINITIVE, - ).annotate( - date=Cast('timestamp', DateField())).values('date').annotate(total=Sum('times')).order_by('-total') - - return qs[0]['total'], qs[0]['date'] + return qs[0]["total"], qs[0]["date"] def max_month(self, categories=[1]): + qs = ( + CountDetail.objects.filter( + timestamp__year=self.year, + id_category__code__in=categories, + import_status=definitions.IMPORT_STATUS_DEFINITIVE, + ) + .annotate(month=ExtractMonth("timestamp")) + .values("month") + .annotate(total=Sum("times")) + .order_by("-total") + ) - qs = CountDetail.objects.filter( - timestamp__year=self.year, - id_category__code__in=categories, - import_status=definitions.IMPORT_STATUS_DEFINITIVE, - ).annotate( - month=ExtractMonth('timestamp')).values('month').annotate(total=Sum('times')).order_by('-total') - - return qs[0]['total'], qs[0]['month'] + return qs[0]["total"], qs[0]["month"] def min_month(self, categories=[1]): + qs = ( + CountDetail.objects.filter( + timestamp__year=self.year, + id_category__code__in=categories, + import_status=definitions.IMPORT_STATUS_DEFINITIVE, + ) + .annotate(month=ExtractMonth("timestamp")) + .values("month") + .annotate(total=Sum("times")) + .order_by("total") + ) - qs = CountDetail.objects.filter( - timestamp__year=self.year, - id_category__code__in=categories, - import_status=definitions.IMPORT_STATUS_DEFINITIVE, - ).annotate( - month=ExtractMonth('timestamp')).values('month').annotate(total=Sum('times')).order_by('total') - - return qs[0]['total'], qs[0]['month'] + return qs[0]["total"], qs[0]["month"] def run(self): current_dir = os.path.dirname(os.path.abspath(__file__)) - template = os.path.join(current_dir, 'template_yearly_bike.xlsx') + template = os.path.join(current_dir, "template_yearly_bike.xlsx") workbook = load_workbook(filename=template) - ws = workbook['Data_count'] + ws = workbook["Data_count"] section = Section.objects.get(id=self.section_id) - ws['B3'] = ('Poste de comptage : {} Axe : {}:{}{} ' - 'PR {} + {} m à PR {} + {} m').format( - section.id, - section.owner, - section.road, - section.way, - section.start_pr, - int(round(section.start_dist)), - section.end_pr, - int(round(section.end_dist))) - - ws['B4'] = 'Periode de comptage du 01/01/{0} au 31/12/{0}'.format( - self.year) - - ws['B5'] = 'Comptage {}'.format(self.year) + ws["B3"] = ( + "Poste de comptage : {} Axe : {}:{}{} " "PR {} + {} m à PR {} + {} m" + ).format( + section.id, + section.owner, + section.road, + section.way, + section.start_pr, + int(round(section.start_dist)), + section.end_pr, + int(round(section.end_dist)), + ) + + ws["B4"] = "Periode de comptage du 01/01/{0} au 31/12/{0}".format(self.year) + + ws["B5"] = "Comptage {}".format(self.year) # Get one count for the section and the year to get the base data count_detail = CountDetail.objects.filter( @@ -233,31 +263,32 @@ def run(self): )[0] count = count_detail.id_count - ws['B6'] = 'Type de capteur : {}'.format(count.id_sensor_type.name) - ws['B7'] = 'Modèle : {}'.format(count.id_model.name) - ws['B8'] = 'Classification : {}'.format(count.id_class.name) - ws['B9'] = 'Comptage véhicule par véhicule' + ws["B6"] = "Type de capteur : {}".format(count.id_sensor_type.name) + ws["B7"] = "Modèle : {}".format(count.id_model.name) + ws["B8"] = "Classification : {}".format(count.id_class.name) + ws["B9"] = "Comptage véhicule par véhicule" - ws['B12'] = 'Remarque : {}'.format(count.remarks) + ws["B12"] = "Remarque : {}".format(count.remarks) lanes = Lane.objects.filter(id_installation=count.id_installation) - ws['B13'] = lanes[0].direction_desc - if(len(lanes) > 1): - ws['B14'] = lanes[1].direction_desc + ws["B13"] = lanes[0].direction_desc + if len(lanes) > 1: + ws["B14"] = lanes[1].direction_desc - ws['B11'] = lanes[0].id_section.place_name + ws["B11"] = lanes[0].id_section.place_name - ws = workbook['AN_TE'] + ws = workbook["AN_TE"] row_offset = 14 column_offset = 1 data = self.values_by_day_and_hour() for i in data: ws.cell( - row=i['hour']+row_offset, - column=i['weekday']+column_offset, - value=i['tjm']) + row=i["hour"] + row_offset, + column=i["weekday"] + column_offset, + value=i["tjm"], + ) row_offset = 47 column_offset = 1 @@ -265,77 +296,62 @@ def run(self): data = self.values_by_day_and_month() for i in data: ws.cell( - row=i['month']+row_offset, - column=i['weekday']+column_offset, - value=i['tjm']) + row=i["month"] + row_offset, + column=i["weekday"] + column_offset, + value=i["tjm"], + ) - ws = workbook['CV_LV'] + ws = workbook["CV_LV"] - ws['F11'] = self.tjm_direction_bike([1], 1, weekdays=[0, 1, 2, 3, 4]) - ws['G11'] = self.tjm_direction_bike([1], 2, weekdays=[0, 1, 2, 3, 4]) - ws['H11'] = self.tjm_direction_bike([2, 3, 4, 5], 1, weekdays=[0, 1, 2, 3, 4]) - ws['I11'] = self.tjm_direction_bike([2, 3, 4, 5], 2, weekdays=[0, 1, 2, 3, 4]) + ws["F11"] = self.tjm_direction_bike([1], 1, weekdays=[0, 1, 2, 3, 4]) + ws["G11"] = self.tjm_direction_bike([1], 2, weekdays=[0, 1, 2, 3, 4]) + ws["H11"] = self.tjm_direction_bike([2, 3, 4, 5], 1, weekdays=[0, 1, 2, 3, 4]) + ws["I11"] = self.tjm_direction_bike([2, 3, 4, 5], 2, weekdays=[0, 1, 2, 3, 4]) - ws['F12'] = self.tjm_direction_bike([1], 1) - ws['G12'] = self.tjm_direction_bike([1], 2) - ws['H12'] = self.tjm_direction_bike([2, 3, 4, 5], 1) - ws['I12'] = self.tjm_direction_bike([2, 3, 4, 5], 2) + ws["F12"] = self.tjm_direction_bike([1], 1) + ws["G12"] = self.tjm_direction_bike([1], 2) + ws["H12"] = self.tjm_direction_bike([2, 3, 4, 5], 1) + ws["I12"] = self.tjm_direction_bike([2, 3, 4, 5], 2) - ws['J35'] = self.total() - ws['J39'] = self.max_day()[0] - ws['K39'] = self.max_day()[1] + ws["J35"] = self.total() + ws["J39"] = self.max_day()[0] + ws["K39"] = self.max_day()[1] - ws['J40'] = self.max_month()[0] - ws['K40'] = self.max_month()[1] + ws["J40"] = self.max_month()[0] + ws["K40"] = self.max_month()[1] - ws['J41'] = self.min_month()[0] - ws['k41'] = self.min_month()[1] + ws["J41"] = self.min_month()[0] + ws["k41"] = self.min_month()[1] - ws = workbook['Data_year'] + ws = workbook["Data_year"] row_offset = 4 column_offset = 1 data = self.values_by_day() row = row_offset for i in data: - ws.cell( - row=row, - column=column_offset, - value=i['date'] - ) - ws.cell( - row=row, - column=column_offset + 1, - value=i['tjm'] - ) + ws.cell(row=row, column=column_offset, value=i["date"]) + ws.cell(row=row, column=column_offset + 1, value=i["tjm"]) row += 1 - ws = workbook['Data_week'] + ws = workbook["Data_week"] row_offset = 4 column_offset = 2 data = self.values_by_day_of_week() row = row_offset for i in data: - ws.cell( - row=row, - column=column_offset, - value=i['tjm'] - ) + ws.cell(row=row, column=column_offset, value=i["tjm"]) row += 1 - ws = workbook['Data_hour'] + ws = workbook["Data_hour"] row_offset = 5 column_offset = 3 data = self.values_by_hour_and_direction(1) row = row_offset for i in data: - ws.cell( - row=row, - column=column_offset, - value=i['tjm'] - ) + ws.cell(row=row, column=column_offset, value=i["tjm"]) row += 1 row_offset = 5 @@ -344,11 +360,7 @@ def run(self): data = self.values_by_hour_and_direction(2) row = row_offset for i in data: - ws.cell( - row=row, - column=column_offset, - value=i['tjm'] - ) + ws.cell(row=row, column=column_offset, value=i["tjm"]) row += 1 # Weekend days only @@ -358,11 +370,7 @@ def run(self): data = self.values_by_hour_and_direction(1, [5, 6]) row = row_offset for i in data: - ws.cell( - row=row, - column=column_offset, - value=i['tjm'] - ) + ws.cell(row=row, column=column_offset, value=i["tjm"]) row += 1 row_offset = 37 @@ -371,35 +379,28 @@ def run(self): data = self.values_by_hour_and_direction(2, [5, 6]) row = row_offset for i in data: - ws.cell( - row=row, - column=column_offset, - value=i['tjm'] - ) + ws.cell(row=row, column=column_offset, value=i["tjm"]) row += 1 - ws = workbook['Data_class'] + ws = workbook["Data_class"] row_offset = 4 column_offset = 2 data = self.values_by_class() row = row_offset for i in data: - ws.cell( - row=row, - column=column_offset, - value=i['tjm'] - ) + ws.cell(row=row, column=column_offset, value=i["tjm"]) row += 1 - ws = workbook['AN_GR'] - ws.print_area = 'A1:Z62' + ws = workbook["AN_GR"] + ws.print_area = "A1:Z62" - ws = workbook['CAT'] - ws.print_area = 'A1:Z62' + ws = workbook["CAT"] + ws.print_area = "A1:Z62" # Save the file output = os.path.join( - self.file_path, '{}_{}_r.xlsx'.format(self.section_id, self.year)) + self.file_path, "{}_{}_r.xlsx".format(self.section_id, self.year) + ) workbook.save(filename=output) diff --git a/comptages/test/test_import.py b/comptages/test/test_import.py index 1392e580..6f438c09 100644 --- a/comptages/test/test_import.py +++ b/comptages/test/test_import.py @@ -9,7 +9,6 @@ class ImportTest(TransactionTestCase): - @classmethod def setUpClass(cls): pass @@ -40,9 +39,7 @@ def test_import_vbv1(self): id_installation=installation, ) - importer.import_file( - utils.test_data_path("00056520.V01"), - count) + importer.import_file(utils.test_data_path("00056520.V01"), count) self.assertEqual(models.CountDetail.objects.count(), 18114) @@ -75,9 +72,7 @@ def test_import_mc(self): id_installation=installation, ) - importer.import_file( - utils.test_data_path("64210836_TCHO-Capitaine.txt"), - count) + importer.import_file(utils.test_data_path("64210836_TCHO-Capitaine.txt"), count) self.assertEqual(models.CountDetail.objects.count(), 25867) @@ -110,9 +105,7 @@ def test_import_int2(self): id_installation=installation, ) - importer.import_file( - utils.test_data_path("10020260.A01"), - count) + importer.import_file(utils.test_data_path("10020260.A01"), count) tz = pytz.timezone("Europe/Zurich") @@ -144,8 +137,8 @@ def test_import_simple_int2(self): ) importer.import_file( - utils.test_data_path("simple_aggregate_multi_spec.i00"), - count) + utils.test_data_path("simple_aggregate_multi_spec.i00"), count + ) self.assertEqual(models.CountDetail.objects.count(), 52) @@ -184,7 +177,19 @@ def test_cat_bin(self): self.assertEqual( importer._populate_category_dict(count), - {0: 922, 1: 22, 2: 23, 3: 24, 4: 25, 5: 26, 6: 27, 7: 28, 8: 29, 9: 30, 10: 31} + { + 0: 922, + 1: 22, + 2: 23, + 3: 24, + 4: 25, + 5: 26, + 6: 27, + 7: 28, + 8: 29, + 9: 30, + 10: 31, + }, ) def test_lane_dict(self): diff --git a/comptages/test/test_report.py b/comptages/test/test_report.py index 4ae48665..b508653b 100644 --- a/comptages/test/test_report.py +++ b/comptages/test/test_report.py @@ -9,7 +9,6 @@ class ImportTest(TransactionTestCase): - @classmethod def setUpClass(cls): pass @@ -42,11 +41,7 @@ def test_report(self): id_installation=installation, ) - importer.import_file( - utils.test_data_path("00056520.V01"), - count) - importer.import_file( - utils.test_data_path("00056520.V02"), - count) + importer.import_file(utils.test_data_path("00056520.V01"), count) + importer.import_file(utils.test_data_path("00056520.V02"), count) report.prepare_reports("/tmp/", count) diff --git a/comptages/test/test_statistics.py b/comptages/test/test_statistics.py index ae69dc44..60a2bc4c 100644 --- a/comptages/test/test_statistics.py +++ b/comptages/test/test_statistics.py @@ -9,7 +9,6 @@ class StatisticsTest(TransactionTestCase): - @classmethod def setUpClass(cls): pass @@ -42,112 +41,140 @@ def test_time_data(self): id_installation=installation, ) - importer.import_file( - utils.test_data_path("00056520.V01"), - count) + importer.import_file(utils.test_data_path("00056520.V01"), count) self.assertEqual(models.CountDetail.objects.count(), 18114) - section = models.Section.objects.filter(lane__id_installation__count=count).distinct()[0] + section = models.Section.objects.filter( + lane__id_installation__count=count + ).distinct()[0] df = statistics.get_time_data(count, section) - self.assertEqual(df['thm'][0], 305) - self.assertEqual(df['thm'][1], 1306) + self.assertEqual(df["thm"][0], 305) + self.assertEqual(df["thm"][1], 1306) df, mean = statistics.get_day_data(count, section, direction=1) - self.assertEqual(df['tj'][0], 9871) + self.assertEqual(df["tj"][0], 9871) df, mean = statistics.get_day_data(count, section, direction=2) self.assertEqual(mean, 8243) - df = statistics.get_category_data(count, section, status=definitions.IMPORT_STATUS_QUARANTINE) - self.assertEqual(df['value'][0], 1) - self.assertEqual(df['value'][1], 93) - self.assertEqual(df['value'][2], 1) - self.assertEqual(df['value'][3], 17315) - self.assertEqual(df['value'][4], 16) - self.assertEqual(df['value'][5], 570) - self.assertEqual(df['value'][6], 15) - self.assertEqual(df['value'][7], 4) - self.assertEqual(df['value'][8], 70) - self.assertEqual(df['value'][9], 12) - self.assertEqual(df['value'][10], 17) + df = statistics.get_category_data( + count, section, status=definitions.IMPORT_STATUS_QUARANTINE + ) + self.assertEqual(df["value"][0], 1) + self.assertEqual(df["value"][1], 93) + self.assertEqual(df["value"][2], 1) + self.assertEqual(df["value"][3], 17315) + self.assertEqual(df["value"][4], 16) + self.assertEqual(df["value"][5], 570) + self.assertEqual(df["value"][6], 15) + self.assertEqual(df["value"][7], 4) + self.assertEqual(df["value"][8], 70) + self.assertEqual(df["value"][9], 12) + self.assertEqual(df["value"][10], 17) df = statistics.get_speed_data(count, section) - self.assertEqual(df['times'][0], 0) - self.assertEqual(df['times'][1], 1) - self.assertEqual(df['times'][2], 13) - self.assertEqual(df['times'][3], 638) - self.assertEqual(df['times'][4], 11331) - self.assertEqual(df['times'][5], 5792) - self.assertEqual(df['times'][6], 304) - self.assertEqual(df['times'][7], 29) - self.assertEqual(df['times'][8], 5) - self.assertEqual(df['times'][9], 0) - self.assertEqual(df['times'][10], 0) - self.assertEqual(df['times'][11], 0) - self.assertEqual(df['times'][12], 1) + self.assertEqual(df["times"][0], 0) + self.assertEqual(df["times"][1], 1) + self.assertEqual(df["times"][2], 13) + self.assertEqual(df["times"][3], 638) + self.assertEqual(df["times"][4], 11331) + self.assertEqual(df["times"][5], 5792) + self.assertEqual(df["times"][6], 304) + self.assertEqual(df["times"][7], 29) + self.assertEqual(df["times"][8], 5) + self.assertEqual(df["times"][9], 0) + self.assertEqual(df["times"][10], 0) + self.assertEqual(df["times"][11], 0) + self.assertEqual(df["times"][12], 1) def test_special_period(self): # Add a special period models.SpecialPeriod.objects.create( - start_date=datetime(2020, 1, 1), - end_date=datetime(2020, 1, 31) + start_date=datetime(2020, 1, 1), end_date=datetime(2020, 1, 31) ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2020, 1, 1), - datetime(2020, 1, 1))), - 1) + len( + statistics.get_special_periods( + datetime(2020, 1, 1), datetime(2020, 1, 1) + ) + ), + 1, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2020, 1, 1), - datetime(2020, 1, 31))), - 1) + len( + statistics.get_special_periods( + datetime(2020, 1, 1), datetime(2020, 1, 31) + ) + ), + 1, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2020, 1, 31), - datetime(2020, 1, 31))), - 1) + len( + statistics.get_special_periods( + datetime(2020, 1, 31), datetime(2020, 1, 31) + ) + ), + 1, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2019, 12, 25), - datetime(2020, 1, 1))), - 1) + len( + statistics.get_special_periods( + datetime(2019, 12, 25), datetime(2020, 1, 1) + ) + ), + 1, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2019, 12, 25), - datetime(2020, 1, 10))), - 1) + len( + statistics.get_special_periods( + datetime(2019, 12, 25), datetime(2020, 1, 10) + ) + ), + 1, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2019, 12, 25), - datetime(2019, 12, 26))), - 0) + len( + statistics.get_special_periods( + datetime(2019, 12, 25), datetime(2019, 12, 26) + ) + ), + 0, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2021, 12, 25), - datetime(2021, 12, 26))), - 0) + len( + statistics.get_special_periods( + datetime(2021, 12, 25), datetime(2021, 12, 26) + ) + ), + 0, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2020, 1, 10), - datetime(2020, 1, 15))), - 1) + len( + statistics.get_special_periods( + datetime(2020, 1, 10), datetime(2020, 1, 15) + ) + ), + 1, + ) self.assertEqual( - len(statistics.get_special_periods( - datetime(2020, 1, 10), - datetime(2020, 2, 15))), - 1) + len( + statistics.get_special_periods( + datetime(2020, 1, 10), datetime(2020, 2, 15) + ) + ), + 1, + ) def test_light_numbers(self): # Create count and import some data @@ -172,11 +199,11 @@ def test_light_numbers(self): id_installation=installation, ) - importer.import_file( - utils.test_data_path("00056365.A00"), - count) + importer.import_file(utils.test_data_path("00056365.A00"), count) - section = models.Section.objects.filter(lane__id_installation__count=count).distinct()[0] + section = models.Section.objects.filter( + lane__id_installation__count=count + ).distinct()[0] res = statistics.get_light_numbers( count, section, @@ -193,8 +220,8 @@ def test_light_numbers(self): end=tz.localize(datetime(2017, 3, 28)), ) - self.assertEqual(res['thm'][0], 252) - self.assertEqual(res['thm'][1], 255) + self.assertEqual(res["thm"][0], 252) + self.assertEqual(res["thm"][1], 255) monday = tz.localize(datetime(2017, 3, 27)) @@ -233,18 +260,12 @@ def test_get_speed_data_empty(self): id_installation=installation, ) - importer.import_file( - utils.test_data_path("53409999.V04"), - count) + importer.import_file(utils.test_data_path("53409999.V04"), count) # This is a special case and there are no data for the first 2 sections - sections = models.Section.objects.filter(lane__id_installation__count=count).distinct() - self.assertTrue( - statistics.get_speed_data( - count, - sections[0]).empty) - - self.assertFalse( - statistics.get_speed_data( - count, - sections[2]).empty) + sections = models.Section.objects.filter( + lane__id_installation__count=count + ).distinct() + self.assertTrue(statistics.get_speed_data(count, sections[0]).empty) + + self.assertFalse(statistics.get_speed_data(count, sections[2]).empty) diff --git a/comptages/test/utils.py b/comptages/test/utils.py index 5db68a4a..0f94bf7b 100644 --- a/comptages/test/utils.py +++ b/comptages/test/utils.py @@ -9,9 +9,7 @@ def test_data_path(file_path): """Return the path of file in the directory with the test data.""" path = os.path.join( - os.path.dirname(os.path.realpath(__file__)), - 'test_data/', - file_path + os.path.dirname(os.path.realpath(__file__)), "test_data/", file_path ) return path diff --git a/comptages/ui/resources.py b/comptages/ui/resources.py index 52493b2f..43284472 100644 --- a/comptages/ui/resources.py +++ b/comptages/ui/resources.py @@ -3035,7 +3035,7 @@ \x00\x00\x01\x69\x94\xac\x9a\x0e\ " -qt_version = [int(v) for v in QtCore.qVersion().split('.')] +qt_version = [int(v) for v in QtCore.qVersion().split(".")] if qt_version < [5, 8, 0]: rcc_version = 1 qt_resource_struct = qt_resource_struct_v1 @@ -3043,10 +3043,17 @@ rcc_version = 2 qt_resource_struct = qt_resource_struct_v2 + def qInitResources(): - QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) + QtCore.qRegisterResourceData( + rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data + ) + def qCleanupResources(): - QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data) + QtCore.qUnregisterResourceData( + rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data + ) + qInitResources() diff --git a/docker-compose.yml b/docker-compose.yml index 7b03a154..2bf562e0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,11 @@ -version: '3.7' - services: qgis: + depends_on: [db] build: ./ volumes: - /tmp/.X11-unix:/tmp/.X11-unix ro - - ./comptages:/root/.local/share/QGIS/QGIS3/profiles/default/python/plugins/comptages/ - - ./test_data:/test_data + - ${PWD}/comptages:/root/.local/share/QGIS/QGIS3/profiles/default/python/plugins/comptages/ + - ${PWD}/test_data:/test_data command: - qgis environment: @@ -14,6 +13,17 @@ services: privileged: true network_mode: host + qgis_tester: + profiles: [local_test] + depends_on: [db] + build: + dockerfile: ./Dockerfile-test + environment: + LOCAL_TEST: 1 + command: sh -c 'xvfb-run python3 manage.py migrate && python3 manage.py test comptages.test -v2' + volumes: + - ${PWD}:/OpenComptage + db: image: postgis/postgis:12-2.5 ports: diff --git a/manage.py b/manage.py index b6305e9a..c756bc78 100644 --- a/manage.py +++ b/manage.py @@ -1,13 +1,13 @@ +import os import sys from comptages import prepare_django from django.core.management import execute_from_command_line if __name__ == "__main__": - default_db = { "ENGINE": "django.contrib.gis.db.backends.postgis", - "HOST": "localhost", + "HOST": "db" if os.environ.get("LOCAL_TEST") == "1" else "localhost", "PORT": "5432", "NAME": "comptages", "USER": "postgres", diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..851484cf --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,42 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "comptages" +version = "0.1" +dynamic = ["dependencies"] + +[project.optional-dependencies] +check = ["pyright>=1.1.36", "black>=23.11.0", "qgis-plugin-ci>=2.8.1"] + +[tool.black] +extend-exclude = ''' +/( + # The following are specific to Black, you probably don't want those. + | qgissettingmanager +)/ +''' + +[tool.qgis-plugin-ci] +plugin_path = "comptages" +github_organization_slug = "opengisch" +project_slug = "OpenComptage" + +[tool.pyright] +pythonVersion = "3.10" +exclude = [ + "**/nodes_modules", + "**/__pycache__", + "**/migrations", + "**/qgissettingmanager", + "**/test", +] + +reportMissingImports = false +useLibraryCodeForTypes = true +[tool.setuptools] +package-dir = { "" = "comptages" } + +[tool.setuptools.dynamic] +dependencies = { file = ["requirements.txt"] } diff --git a/requirements.txt b/requirements.txt index 3414ae9b..48608fd0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ nose2==0.8.0 -psycopg2-binary==2.8.6 +psycopg2-binary==2.8.6 # on Debian derivates this requires `libpq-dev` icalendar==4.0.3 openpyxl==3.1.2 django==3.2.15 diff --git a/test_data/dev.qgz b/test_data/dev.qgz new file mode 100644 index 0000000000000000000000000000000000000000..4eecac9565ff61407731bab65b173bce66ac6e02 GIT binary patch literal 33791 zcmbTdW0Yo3llI$HUAArGF0;$FZQHhOyQ<5!ZQEv-ZTs~9nP=WL^PZWt&N?4;4(lbN|uFcjx0^MVcb7M6{E65&KbaVFq{+(j=6$3R$}XuXGh zn1(qh2(pS|Zx#Lq85o#ioE0Z0DlnAKVRxVUb?CkX?BMlNQq6FTV0SC^=H~6_tS8nM zEXQ&L|3F95o0_HQe#gV3>g&qP?(OU0j2mCQA_vF&!>P|ZCp#xc8@_s4hG%6vo`4{R z#FeE@#kS&i%=$UAk#-Gr7v6_Td;NS>PL59Z_sj0W;o8Vd6D6d#LdAsd$F1n++tI}y z3PjH{r2Vy(qe4Z?nP}?ujI6pw+Qu>0}pNW5efV)9H8KZ%$X*~KMw3IVF%HMl)c%^;gTDrP1xMpa~d4)<~6I# z)uT45DUQM(SC{pSoUqcDr`jxEYo$`8nn!cGPfl?CFr@3%(F$b0dfESoSrXq_#B_R| zBbZ-!DxQLAGBt;j24^$B2wW9DcUxO)#ih`g>0zlMM6sP1{ZVgbaPYSCLP2K;Ed@`S z2lQ!u=U@Se%~lx&32WyyYQ87}KGoYV0b7zY@Z^U<#>U3|xHu*wfioi^_z6FP+N(e0 zdEeeKqpns;2(~~p6WRyHtzOL_MLpfn?QzEaTL!AJ0 zdigViat_=+OWls%K=F8Ewxa{KeVxCffMqd8CG`UE7|nV%34!@Q%!r2Dw&d{`M0UyR`vy&aAOk~=>6SQ zJK%ZSBZ0O0)yc|8r&BAFmFaFnE(BkElBA2Bw{0)9L*`@t6KTTBsFytHfWimr^)nMK zKKpji@`PW6N9YE)i$1WvZ>MSAizE)bKg)*n+Q{aDj?)mA-NPwSM))|HZN%fM{}{7O ztWJ0L}lAJGf!YxWXObM@V5&JF1HI-lsp&-+F)D5uJ-_|=Kh=-qp$r2 zwz-&i^oD@yY_shzVab>BFdmljAeGKNqMzr0Rl~)qXAXos=UY({&M2ULrm|V3K80?) zPlkOXs_JjDm4*EV!ulR-l1}@8*BPrRL~yPP{C;!13g#s@uAKpUA7V=n7OUY$uxIpg zLw+XADII=PaTld^Ih>!iI`X_1?^t6tAxwWCm~&gSP}eqGHK)9jQ+&Lw-Do(_udp`X zs$NqVDSrBVw)-&=b)5QSJs5>PTSX{UPX@CYv(7SnR?5VztR6ha$rw@)V-DM~6~9Rs zQH%lQ4!LDPi`#(6rqgftSiI0C8Dco3Fj;2-$7wByX+id3*m!U|JKIkD8zCKo`1So- z)r^WH2eUUK^p1Z*?GBg&eF)=tk9E$e;iuqIQgHtV8C2nRDj8&@Nx+SzI7q$z3pd8ekB%~kM;oK48Q zU)#lm%*&px_WE_UqBgpS+-X%6lAn`gVW4Sg$0zY<0zqB0jcUASno39Q3#&p{{>z!H zFyT%$)hnBCbE`6SRi>tt^WA@98uRHV(%!8!fwXmMHtgIkxaZxiPgY2XRP;M@#<@48 zR~jNms}&oGPoccg{d{61GGkz!jMn{b0Nx#u^joMxqMfSj-%q2U*T4`EC3U>fqxDL? zK5aTST;f9EblVy^)O0h6;`Jkoe}787VA#+bD_i-jN5u*pRbwBV3wV=;EYnV*0DWAF zxJFMKy_s39EJj7Zi9)|4UU7D;C_UR+S{VZq>~jU8Sjf2tVdUEsoRt=4txEhnHzE4) zMOtxG=#Q#bLcjxk|C{vFkin?8*yh8@6<(TK>4}HtLUIVvxkb&2<#T&E*te>Fb>Xf3 z9{5LGa?1LpGq!|mJZe|b6G!>_!g~3d^s)A(h|rcDK-LdZ)J zqt_4PD$pgl4vm(AmWPyiYsw6&mUxG()|~^**=OQzoaVQwgX^vr61cSK)}s3r)>*ySTF&EquYxd>Cqt)d31W z+c^OFv+SX>L+r>1=Xgx3-+O^^hVV(>?{<3qw273QQ$icNITpER*Ft9q8y}MQ>Bd0^ zgKSrNKeTq6L7ltYJCCCi>duLCuSDudH$Lazi9|wvOLNJPrMbEGAP{H0A7Hc`D#bQ3 zI&Y@pcdTLjEI~Vm-80@rzbw}Wfn{;vWOuCTM*6OhzB0S)DUL8X@**fB zb)}D6HbnoRZsjhGaX#N#@DkOcX3wmNu{^hK?j~2)Qv|+;y-maKtafH`~WM;ayv>rI2dqwUJ#5uiTL3}a~%@JADos& zi?dQ#W|=U9q}`TUYmLjR^^tYo}u9nXqAQc!2ncQ&U2E~Y&2#}uJ~nL7`%hojc(a*s4R>f6|sb9Pe~{%B1{p{o~nMT z48j+sfB6;~39GfdM71fmVQy`VSBQq-RV*7AnYbuJM zH+sy3E+^sBLeA2!|7nR;+}b`>sZ*9}n-cXZnX#iN%@|s<=)%N2@m0zyZ7@Xq-v*6U zYUee@e684gt=YUNdVx%{M4C+&+cu|MQTY{ltloNYAG_?aSz&@r@zy1R3Xf#+s(k!O zdBRl%+OuMBYw7M&*(bEASVp0FLZf*`qkTc6enq2uL&LeJX)MO_lY3=7UyEw8?YFAW zwQ~2n0c=q-g2LuGL_8w(T;mioM;BUdW|FT*GxovuI9Y6|zU0Cr#mf-GDKsnwQOwX# zuL5F^{m}3EuSGlkw3mcm&-Pllelhf{M>0pn1gYfhM-j*lSrmlR>e@R4F3`9Q$R`3J=F=sPYL|OmloW6!;%tAuW-{KL3O=A+Ai-{8llMnuAH=z= zLWz;Y_v*Z5D-$V{AFy`D(deV`S2>>+;S!IZ;%JK#+TuYx*yyDbE-nIt`%esdm?9%A z=O8D?x3P}OdU3Gof*KsjbdSyXZgY2JyM1M6(@_;Es4sN)Og=Z`#HGPhYpZ`S!asS6gy6*2qC98s6#Q2v*m1Ei#ZX@-JK@Rkk&oO0L zcE?z9SKHLCa9VTOYL;11Uhp2$PIK8W0O(uFacGY@-GI>#gFQJ7w_}ju#&TzwEuF+8 z`o<{Z{@oHX!H#r0ht~3E!04~RgVw1gKnoGg^Gl z(Re)YjYJ8p?S!+(XDUQqdjr+dLO~D?I8T%)A9?PLo4QhN>hyC822kX8)6jYC+dzzC zQ#cDUgSS7jB8(%xN@S%GErGHjOJF`-U#e(dFQRJ4p6)7f1pOMgXTzY$Hq^uvz+SFg zGBX|BTvK6RE2dVY+IZ4y0@iAqR3R?L;0P+hB=Lo&Lg2#HU_A+^t^AXVrZUt7+Kec^K(ECS5wZJo@E}8C*`0 zk{uRX<=uhdfIBzhTKMsXue*&IJ3X4z3)Zw=9&FE+f%V$(qGOdZsYc}s?_h-PJa+n0 zPrpnVUj^SU5w8NFx?TxbLbE+oE9qaRyApXRB*`sPtrr-QI4*?$bkh*4T)06~J+7oWhaNmDp6Pcd5V$l8ie_T45~DbDV_V|<(R7n=wyJ5wFA zIq>t57uq9M56QkR-Ld3sCbSYdxanv_`{`4*5@=Cd{U>6kK(gmIYbme2jKO?c*q0TW z7ZPH{_{n{kup}v-l=eRU1t7M$c)icD?%CRlxJOH+D)_~2t{Q8mdQvF8uUkGx1{N`E z1-U002qlk85fG5UVfR8pe0?T+D1S>9V_0pM19urll{6L~rLKlQz#PlPwg?%SiLWSe z@VkrZ+TXqw%`-doBy2mR6h%OtFPm?1t3k<<^D64EqZJjRB1HD?LTB-=U zA-$|zimsnscv@ra>~yP{o8?=g#VP8S=h`ha&j*8Oquw9IF36eNAz?SPV@2}iNTEhO zUXi7J5E5X?iuxv9cHEyS<5^@3*-|=A8syARv14E-AZx1U5zHsEv-fn?Txc9->Szmy zO%y~W__G9kxI|sJBblgTO$(6~vvj<rqSx4*9P3an!{SZeQxL}2)PI!;q z^nR|t(!7-kVcrWYvef%)J=+PDmGXVIHm-@#P?sw!DOIFgPe|EUkyF#I+{$WLjLVXd zruQF^Qv*=TlA#L|jdX%qIST2nB|)MRv6+MOu0PA3Bah}E?2l^_$yWl;jf}TyWnz+2 zN!#3}QSHpbts@ZoiP$nigWQ8@(g3k(+@_5XUlpjI7w8|z-yaggkK3mfw}+$N&@ zk&QrF*?an&s)7&_XvU4NT(lOoyfw1>SuEtxhLv7AqbTr;y;FGkiOcO<67ckztS{ZHDE zizSPuF5R6R*c%BCEV1HU3FLk1c$m{S!mM5``s@xX1qgBxinX*s`!LciX^0TS48;Ct z1h;s#=q~QnVt$Z9sRoY(ef7nUkvN>|Wmbk0p!JD}V5yRm!=QOVd%;K_3C?1@hI$M6 zqUl_3aU<*4Cl|x((O@E&&NR^a)^|vs{L2EgkHygE$AKFC{S`5IcoJ;bGJ#Xor>++8 zhFoUjV>m&pX-nvkI#*58lQ?Jd$%~@xkH%+ww4_ZCfBjNaOgw+fhde-Li9QEw?b>`5c{OP_j%phTwI;O zczvNi(cVprN$R{(vb^y&zJ?Ju!3a57XeC?NHIquFWn~X3CCP{G0Iy?d_WrhZJAH!3 z)XxQMngnSg$+QR)iYa$_^xlxt#RzH`0XzUWf;m84EEfRbybB3B;Ot2w#VVwfP06}20yt4zj}cOe%zuo8J{^QCvDXMGO$s>iR9%k|^~@AF z$($SkmZoic`OKk$TrM4iDv{3^sZAZ7qQ#*Sk+VMA3lGd=3OKOos5+`SyWdFCtyFhNunV)rc-6e( zHv^VHd@~o81&uPU%i}TxkEVtwlZeTP#}%2pydj>2tSuy}z4WR8q$Y(vG^tfy5KU5T zFjBKzKbnMF89JGKKZ!(|5M&~SJ`$-^evo*Qn+xH;-t^%f@nrH5`70e=?Uhh$kN>xh zJ{*Zmex{$KT#Xl*OrFynZ>xBwz*Acvkra?0B%D+mhJ+?hH%?SO-=wT26i%v(Ap{vn zp^rfNz_44Y!(NUaa~2GK{y4_P3BPPMLXy^PSj_o!N#YfBKI2&#!n57WHGp<89qck( z^9Tjkcq*njV9Sb}z=C7iz~io>)6n9qJmuEx{uLIQi5%wkCirXoON6ITUH7`QR*c?7 zNrOaqW}EHlZMs~&q$-hk?}I~~=n2wwb*F;!0;*G1k$+>|-`T(m;H3tQzuX`C2!A%f z!JDeIciC=cp}w^142oAKx!&0Id`5by(dYj-aZ5{+E1?oV)u(Cd8711-`>;Ej&{1G- z%C#sZiD@z1%5vcx#I$=d~(_+!RUn?MhaXfM34e8h~%$EgdhoG z5Y7ic{2!eri2qh5phu|9S4_0Vulz6Sv3ouM<@^kY!I;ftYEye<^I5sw9)Lg+#3aFe$x~0ell-^BO1Z4Ilk;4_8_FQAG5rcF+MFw!xDMDby{S-D^A|zbVN@Fmh6runXY&U;|!O zGNylEnlWD@QZ+R7(?A0KtHjBui?}5@Wt+w(ZP~`DsTrmzk)>@#F-b+1+D55l?(9h; zVmoDHmR0+lQHO?A^ql#?r2{rgVX2wcwU*{J1x4w%fQH+`=_ctwodM;M8cXQ!<~$e{ zE`M$vQwTE&?x;okpp6jUj{Y{lN)ZR50+eK(7Xb8Z?k4p&kuL zWw2ipLM<8{+OmKdsQ@*iM)<5mkcvWXve&c6n8TD&n@xSbOMSXeeOpL5nmbrV0W~nv1{B))(V-bj3n3aa za-ktb6@nA0fVPSpHnXN<9R>njxd6ZTbHU|H}=* z4B{U`FpDr4_+{!JB3GId4*tnm7iD$+Sgo~Ue4DN7M+iX=0u(~fi=cysA&_iY@k0Jn*YSGqffwXF{Hkby1wqEp0h1I4I54a51>H+V!0*yy^`Zd_p#QzdOczj zuDXmq>Ldh+RErP2CG*_h29y|e@Dcs?Jk2zh^SjxSs^#agxQ-u7vzh7~@z7t()Y35R@8I&8z{m~DqipN=F*24ZXah@H?u;J}64;bPLT zEo;s&)OGt)aZG*9pMWM+Ef678z)_JZN6V4&x9yPoIU&Z3whI#FOONN*JIHlups3q- z{AOqgLTlnLRwmOdP;^J~#HFOdYuHnk2jZxb<7&1+1~jlynO5H* zNt{bRniO`41BW7Ghh64R+vnk#SHOs9pyKH>d`W6u`cYi5&*%8?JwDsb2cd%JqXUhN)HR{*GA70!q%K_i|sBemzZ4|UlgY@w;V-zY?2l1TZiJCeLb_Gcc;-~Z=YE@>>u#%tA7X~V@N!sS|H=h*y|6j%r-IMSr!yR zG8k-NJ#mTPn`~SQqn(B4Mz&hHJOX;ACI$SLwz+5h+Ye!!FLncyqOIUC9&iej6q0S( z7t}%Ejdf3yTEvVS+G1oZjScZ&Q*k-gH4h8413+St)fh?lF65YLT>kXXeyj|v6(pyN z8c9(b8l=n6JE09dFq*_Mgw7k>!+r>w;yD*t&}ItPk$^(W*Xf~7D4mk0lo_OBrIhLa zW9HEG4Pfsr(gX(vIf@q;e#e|TNtG-VpHnY6tZhabp#a45miiNv0OCc;y>Av>$xw6S z(DKR26ep79;TH<3=ZP-A(pl?kqAx5BJ2sS@YHzKr%&>pW4kvJohQQV51^(hlEBKcY zEP{1(q89~sbYc)i|L>-yLE({86aKfcVTtgFP$XOg8ZIgg2SCF~tMR1QY%YE-U$8(} zRFOH$OoughHOxRTBASx5bhXrAAR>l39S)$bZy0~1Y^FoyUSFR9GBE*$Wt}LnQ8>#= z*(eS5-ja|ZUXAy@<$@ve_mS7P6#g+W`m%VG#UfF= zr4{FbX%brlStgyMOh{>vOWi+zbBg)R-XMyRR%&;2bCO`9*75@G~PXrBI zn)1l7yLLzAM&*{2wt&5R=ZCo&a$Q(*`+a3{_WDhaXQI&Q@rP(sNO5a9MLC7pW87Of z$-l3^V~+ArNU2JiZob%x3kx0d-`UeQGpEzSGAgDgq4CNlfFYx6M(#d9;cE=2JgSLK z6v#**^4`RpBf^?qsj37h>*2!dU0Wfin^PJ?dyZn6$X)5l5l-vGp5yoS&(G3C>diLw z26No8le3IORI9^-NP#`F+W0gih}tK_%t#D8P<|8AO#S{f{kH?`F)T_~Fu-aL=`X}i zXZBK`)k&6oAWMRL>rrGjM@t@fE}Ed~>y<<|xgdV@>C+@ znV9sI*%A$5%0yF{k*UU7CS1lk(^NO>)JHDWTiEez9PXo;+Qa_MdpQ2@w}I&y?%g#Mv?MBKIThF0(u84MK7DMwso-wj*v1ufSaL50Y`p)itw273 zwxWZYz5(poOl}_3Q*^$ewY^5`rsYmtvk|T)(bt|qyXV2lUZdq;$^orP+A_7%{>m&_ zja<^CT$kLCQMV?M^0(8?>7b*Y=SJLPFET@F`_n_92 zcuB*f0+YShT~VjbkjlEe<1b;UB2L!BZJ!Qnv-b=}g=bOifmQs+b%_psWcUE}G2uN|cSLW_PlCexWUsFLz7uXm>MCf1`q(9J!2N_nH`}4~$%I?@Y%m z&#@uFvvFxoX>GumjO{hN-*l9_kCvz3kv7*Oljw5V`8CzIK-Z1 z30(~O2jInQ#7V;Ou6ck9*^u+4N0FAT)249RYu9jw^_BI(&FeAihuj6m``zpXh1r9nQjGvCGchQ(g9y}jxEL9s<>Sx^o9CzI!lTSW1C8zH~#M;Vinp-lG z-$#yflMI`e?y*YQ@cWu?YRkjr&9>^?2wsV5?_j47PtwYy?ehvr_8VSc0k$$xGzVXw z-|szFe;=OdZ<4q)TL}-Un8qtkaz{h-1tj^V|Q97v1c#veWc~8<%#l74NbbhTkOZth=L~#nByEGLEK0f z1RB&HHg_iE+qk?A{Mar>ifreTvBBaf7fI%Yb=MGbsFw3V0W0>hKQC~6PS-)8SX(y` zwN`Ic;WJG{53h?Hx^t8;CZ;te-WF3oP<&n_v3urH5N`tdGJTgZ^DhgymUY?E5Z zhGkHj$qZAu0@MU(G9KMLHk1pzzB>?qajA?-?YEUBX7;wZXYi5*JtBXqaH4Q!?x??S z<3d{=$-+4FJ^aW|lj4Qo&QUuZ&VIgwQAem2hT;ZyLOiPa6UR+Fa1$KVY#AC_t2)#Z zTAR{lu7RbTv@HaocDC~-H%4}B5W%=7V`UPQIAB#x(pp(N@a5~n!}IIS?ctX)bTz&Y zEOw8G?LN}ypMNq|aaR~>DI@|2L@7Zs?Aarf81yp%5SKJ7yiZR6#uT^au4aN%SItxk zKL_myr${#5Io7(-VcaSlZygUVe)0j8fk9OzlNS!EXO>x`aQW~-lt+sDOg8bnH4yQ! zaJl8Rcvy6#WVEGWJi5V;Br#%qN?aVV8no1GRbFE}T9IhOrC$<@m8!3#N6JbyA^Pg% z!-6*&Dm#8xk7rX7i}lp^z*XMXuo@rzoodw9mm0TFqXttCESAkl8J%!VADs}3P{}4# z4g7(;&QT<0c>`*vz906j>^F*9#&OGc};Z&J_02il4ePV5I{e zLYC5r9XnG{ryW#91?a>Mtt0(HgQ@oGyJN(p9pOi}(utjtXv2XfQze@qt=M+>N!NGy z*t$t+;sEqg_UFPQ>k`ee8D&%H#+Cc)DdJTVLWjz&<7iVezl^4#9i2-jaimHC4?VXR z>Z*E7yD6yyRF5s7!_q;O_Rt2gx>vOMQHw z%@?}uK-4B%N?YkX_g-PG>!Dw*cMb4*tv2|da^rpHG0(Q2SLlKeiGNi!>1u#9aL z{F(ya(hs19X}$A0{FKW$j`yLMRAs-AP)LF6tbZsHtN{!V2i5>I*KVjKd%S+29Z}&2 z1dpj=1x+|quYFR(4)$ja3D(h{_UmckczWkd{!}~v`NXH^Z^P=*rLUgh`Z^1kYVN<$ z%5Le5O*!%`OqIAtgIgjklMFqt=PD9(Q7;w^xvZ}=7Ian5J{)LaHho2)ovNRw_W?YE z8Pw0)(N1Y|6-p-3!JO|gNO>>h(C?fpA$H4b+LX9o6PfOc_XgPB3nuM<<;PMb8VQ(O= zE~FdA;>ToZ&a@1SC55~h!91pEwWYp{zx#Xc;QlMMveES!x4Jcqc#MZ4s*D!&(5)Yo z-*N&Ve5ASF;@KRO=`;sjBj(A$co_1Kr*R{ibbe_vOfv$`B-+c7c0}!@zannuOPQVo zlJ)CCi^7RK)_j~Zq)8)VW?wmhv}}9Id+u-wbcO+r*c8e+6NTFlDbnX#cG)iVi%n(?P%+l_;4D_#Q*z@#CCjQI6*u{tQ%Q3hRDk z*o7gR_)B}d&@=xmygoav$hX`H!93XY9UUNFVdXD{`-e0WGnS61WU)}cUsz!v-g(H> z(%wH4#8kUz!@W6idzeCk38+aYTm^`2mhAu7_`OS9J#FuHcQk*TV-NNhvN-pj1%(Hi z1U)(VLO&zD&V71M%_^Gz|Bt)&y%7C>!d+Po@XMACdB0u?e7qf9?4g2o{_Y`Mn}3z{ zHGiMneZEpkS|XlsiNWB~x-x4kdqmJadt{Ugm;gQ>JZ`#GR*bqpfksqQdwy@jOjpzL zu7J6^Ac(0FgG(zHLhogQMWI153KG-F@cOg{Y~t%#wHq-ko!5leV%t!3c`EKloH`rh zQhPf&f}qIQ@~lkzFYMwt;o1WF=I>7d$vV)w z60^I5G-gL*E&=f5Nd~@XV~fUGJUn`U5b>9u6o(i$6tj9Zg%t68LBGWv^*7({>UeuM zm9}+CZr?s>I@c(fOFnm~$a`eG51F6$zx^be4*P%!2du#D?9~MRAoJQu+U{54vcf$O zWe(csaE-%vp5LHDppvw`-c=<5pWgt7QOt+mV{iDQH&{H=lIQA*?bHg zTdIXCVNt5V$%Bx*ov#H3u6SA%8DhNuV~MU3pzE!MttY+2@k#bhjSUX8Y7;d%Q%iH;5~;clTjL{7c$6;unWTRaD-srNMyJ?5{TTHdL`Es8lUj zn13D#;N&2toQf3>=K}B)FgAUd(8qD%OqXYz!jI6@S7955z5;^4Xg{@FHUjL!8&Z73 z)}`*^i_PzA&5a@ z=>@|iz7C)ox9HTN5d;DRy-0pDpaFTWG|bu|wBmn{t=+$>Yj!(3A>}1QR0;h@0#m$A zQzl{;EO_M zF8-oL=xrXPHCMFEe1l+#uLrB`fw(NW%xF|=?o?X3ppeU~8cO1bzr|fi$a~TO4|n`} z4i>E$pU;KKNiJF+|E(Ty8`rJ1KvLYk(BI=yzNL>XXY($v950bp5z(~w9d32`svy_? z43Tq;j^+5L$q}cti(v8g3cK`MJQ)VU02{eI)D^X~!{vrQRm~5nY*nA_ccphPU zp>&S4FyGL(;17RZCUGadLGD^zIzjFj9*!Z=Hgx$NE})rB=~qiJ>NgSLOp>~9)ZTui z5TyDl2!sFW9*TK;HXUQ|4Y7`GrBa_VL~vGH08Ec56c(ixQQyv7{(=kt16!aOFWl=7 zicWUn=VHEK|0Jksdw^+ZuQF{G$NZ$Xhx3LEW@7*@IM6hw_|q3nR+(iMHP1ybCx$LP zB9fz5bK1(lE|%!7vY3ED{&(omR#Uobx^uhr@9K);_Z0U$^>9)vSc#Iq zS&i)d1PM9t6#3is1>^L;YsMpddjrpZ$6LkxG?1!oz_1=X4b_iXK?|vk_z@qi-FLBF-A3Rgw(O36!M9|#x?soGgq4p|C}`U;ZLRzaesTBMs6s*%|KAlJ!Z`M|;0#T)HTpFbeA(A^1 z>As&|LGI@Mi}X-L&;oXI(f==`N8=x)huhhm>c22O;s2fKQT@yGQ2(3hIcjWYZOd40 zs1j+*mQhbu`G@JTNRhs(QgDDv+@P6FOsKCC>YRERa>=@E#^f79+=w#$+YNS^m1)Kt zLLMF!#F*Z-xF0roTMH>ySC-fhxVmFxA#AHyG*+*gu+Dzo=Bkw9jxo2cB&2qi(4-#w z%PcX{E(|9$hlAd)0n&sc4LbGiL`yPh(-kEI0U0xHbDd%BW+NqzZe6;4{+qS)1F^8= zPeLZ-*|y(@ZF#%Bdi1-N>;>@MJ!=mChI)iGDqnerGj!*1(;s{Kb;|f^`1r}(a8N7I z)XE4!q&uZ!l93pnbPK7T2&Qs#=`lLXd#XY8sgs3+l{YIZ4$D{cfTyJ_XU%MSr@0tE5~6d-^v|2I`Im0 zxqnVrOn8JkC3!2#CwWV=aJ+BD%&)O=ig+45c@)%@e{xzr2CR{~SPyN=a}u#`9-9baLQzBDVC?q*qT@~$$pc-Nl7xvcmtU$Jl&o$GPe z}es1ZMpAj8t+D_>83~^ z!Sn+5<{(KiDWLBo5UhT+wDcFZ(rN6DYQYgu*C~Qdw$Z-w18`|-t0T8Nkha&8IT`0s z^O9@3n_?vcn5pvQx${JdES%eDR-9jjE0l^aGvN>=mZ+Q;&o91bluD4Wlu5CYiT)+5 zSs2-@L(QlpJ&r6R7kullo=q||2M3r6b8zSJ!waa>2xuJQDps>f$Df9&q16$U_(?Bh z9v{+elq~jRD37w$o3&k?u}!othkjHCv#Oa1rTE;~EFAH;kCbRYOsxLU znSvQq5Xfwrbj>o;D;P%Z6X(ua9i}-My!E#`JG#ySHkz}ys$+&hy9k7Z?UVgdW6wuI zmi|#L6L#LWWA;ndljN?MvVgcASdQUplVkon44WV4np>*onrF%);kvG-Vl+$oG&c_;!QM6<6ki4C_o4K$Vx`08S!r_> ziU43hBE8evSzVHdp!UN7p_VItj(qWTlpJiPE3L-xEwM})axATibWTpGSQMiRw8Y(T zRzk6`!!}sdGNp;PbR1aG9nT?!7f&Wza%>%tAenRFlO^XCb4C z!?SrDu@?P7ZOgd$?AoY{3be{*RrwUa&lQYCTUQo`&GTW(5uhf4M+=S|P?TGzm(H-jA_9wX?itUyE%0iVef8V6QA`9a_DP11J+@JF zj5LiXgw{7&FX*Cyp?n@_tv)z4fkdq7z>Qb`1FjuhjFk#ei`X>K2J6&7OFj*budmN z(>~0b0hruYL)N({6w=H%MVKksSTFa#*|rRI#1o?iF1C@)sc?D|Tlw&`*UZE)PD5y@ zN=Fq^zt-lp>OT8Wg6oq%VX9e)a*!?yzSFmQK+GGO9&}Jde}bmee7yHx}d>FN2LwQPL@7` zdK8jkdsAU{{axMWN>ge^e2rr*t+Y~EZgc0{D1y_viYw}#RNajS{l*Cr$I(I8I!(VC zJKy~{Y`0g`W3tDfNADfNg z8Gx9A*!A3wY8|w=xa^}vCL%D-SW1MM$k$s5IdE%f4(ErT)lmT$aX1U^DSA*?oqz<* z$55_TR%$4fF`4Nisc#f?ZLM-T+TSuh^8@3w)LO+bQcDe{ZaHD$qgw=YeQ?N6r@uW~NO3;I|BvvWF+0yNv2_0$PdD7BTjxWD^Kl8%qh-Bf zT&20f`8R%KjRN--6v+E`&Rk1gL8nUzbk)D@%GdvxhM+lEpqUulSN3P~XP2HKG)e(J z>`)jkZ-MhCar7^L5;LYcWTn*E97hP8zbLTK!Z{0rwi{8aS!;?v)8juV@R9GoQDEVz zmY>Y^c`9}yYM{dF`qT^5p*8wW7roR)k5#=&H_M`783;|dVOY-)Prv$)c_eGc8kPg-A<&EF2N_rw{E=pwN>eQ((RkQhSm}=y@>+$CY;}63en7>k>FiNjc!6FiyE+I(! zTW~RTurf94$d~}*GtO6egcqk6hKgtZb=y7tuND_Pkkj_$njTi+Gq%w;S_-~YKL=y?H%Y&kTV)_ppHRn+3 z+V9NKnJYbZ3h?D0e@1U@AYn$X@)*>jS!Z%=H{hW}`>?(XF<`4SFlU&CnzISzepVcD z{JN7y>4n^(w(HM_qKg9ssrTbUd$ErLRbly@nMOfXyRES?$f46%9sibVzN2@@`mg2T zDaQX+9&Wt%TrR$AB$TzKiYuZzIoA3HnbXLcqHQn{8>8uWet2%3webJilhM+QPQ+C2Hv(~F8BuZ#o z+WU2MRwtl6-Sz3{E)I25y1a@eDr(cvE4pjb(OK{NO>|0)c2cLf_91x^WvVHULIQ1H z(0A^@EwyFn(zH?O;<TDDNc+ATbl`E@mbI$`2GjPab7-hBsu-*=HhqtkfmIbuR`tI-Ewm{ z&4SGN#*@_3HxTboOx)wWH1P>3^zsDMkVZu~^+*q?u!(C3Gy>Lt%fOAW>PzceyJD9y zAsJ<^q1;_n@WzftRE5@2>(=M#fdFA^%=%{TU+W6l!N6|fKjd1p z>XrN-o&EKa|5iRQ`lmV-(NcS8UzPulQed}J^&fPo)MmhLf5~8&DxI#4iivW7T`I`` zQ`=hxMY67GyEu)zdjpNTySuwX<22g1L*wr5?(XjHQqZ`&yW2;vwdR|>CuYu>I1wj^ zs5dLCGT{fA1-D$!d2F{1a;LzT3a`2ua^{g7WESytvD1)*!5Ovz%vkK_yL;d4 zc6~JqVnghn0~|k%m##LbEEV>wRChf(X0*6rx`a3U7j5n$wxU=&FMI5i%*8^=a?{3N zSCa0>kONoS0oxKY?XzE%pq3SUuxwY5j5MgzxO%B}z+Cr`yU-jk>FiE+rS?oRv{NSa zs@Q+~SrA1QtPv6pb*c?B$nrK3$A39>GID`hZxmF{ZxY6lGScpjfNEv{X1n+6jYt@m z+`|er=`$|5QBncNG!$V{bG^=W%myj0*$bx{ul08%J(5apqUl^jhCnwVH`$W%!D@@7 zuEZaBhrZN^=ka2f6j(Nr7--zhF1w1LkiO}Vi=fd>Uidc z#qsZ#3t5#bWZju-zbFi497M`tL1mW4tD^h|T+_?k(zV0rshn0f(+ zZUJUYf8#-rIB0NH3@(I(Y_OU~d3d);#^B0&PvE@E_D)WFf9d*2Ja zTOibmA&yh-z24DhPa%2I#a*6@@w-Ie+A&Zml;AHp7~-~zR}#Mxub>uJdM4>qGz=&5 zBg{{1fsD=k(#Mfm)}TuSL9izm)Kb`h#rBSp;|{vh1u;j^MrwgerjUfWipCi|gZG%{e-m*-;@HX^tx)0*Gj z=zOW#!u83>uk^x8BGkdun6@QBL1TQ$F&VbCXR*gY8Iz+x>cw)z1#uyj68HPP!-daa zk>x4X@}4jqghSp!SJ10DqQN`I9B5#uEEFXJrN*TMQ@^)3)H)>Lm(t-<_)Bf_e&Dzf zZnu{^SOmbOWA|V{eT=Zc`#rK{Jx&aMzm2H};d^B5j~UbYRbqFSr%y=bkgG3#tAWa* zc@m@VJLh5#?PxPT%vH)Sf-A;U_Q*GCOFMGWEN(0uC%W#U(ySpK>Jkn6OPc>Rh8 z`zDqeM#UuM%AutmqKu+JL^LbK8PG6TZP}I`gV_$!BmC>yN}Y&~#&2h3v+02_6Bd-j z3N7i^$$KL@MhcdUcBl95@?(^T?|`T&ho4Y^rM3we(PU$F`uRTk_tCSUuzO6ZGVU@; zo|Q756H4Cs65hShsnKJ(0XB&FtGBB(KS>(|)y19epD~Wf>T33y_smIORL-^4qJ>x3 z@M%#zU2rdKMRw*?&%mx5)$vvL>?)dX+K`@l zxxU0h>AOBBxew!FGhfK1=O#;qe6lAB-jr*Jb)bjFabrFFvTz9YmKf!F%MPnp=@JS_ zk!MJOj!hsg-~ug?Rv?k%dnuBY3Fm#`lK15#2^?VPS$R#p6oTikoxA#E5*u!+{f_t> zISTP>iJn~*`Xw6@$K$WtS@SE}Fb=4AT*ko4YmGAl@xTwe-YkpR44d&tvOu{tgbASw z=SN;SypBhgrn>06Xx%7n%m*{3e#MXNCou&lCkFyO1iTy?4o{Z-zR`0?BOmNGH6J(K z$s%Gm#i&Xy>N4^i5o!5&y!~{u{c*ovied@Sz8`_0S^QhqKZ?$;2FRB!OM%LhEwjj+qFp^Qv1$8mH zZrW)%j4Kc#Z_=*ee;Wcw+xSF6 zZ-lOQJy4%V0bwze>XA{F@0-g&43Qx-ts84t4*7LwN{9E`9(&Scc(l}Hu@Ac;0xcZY z@a+Rq7zgi6e2ZXW6S|E!R5eq_X$+GNa9^aX1FTHIqnaV?D4VS-7ECQ78|a~31b-Bf zLrnWU$gwKPwa3j>;rmtl`+4E}dApYqUzIm*ca~pTyi}JQchIg8d8^^z0p2r+6f!Hv z;Hlr1Cj8m11@4Ts%Rujqj8Pxvg|b3EEEVz|-$5XnR2@1(f%>mHzl62>oq68%Mcu{o z22VL$C#_;p5a{_%R*&CN%Ai3;GdkNm<79K5ETfpc)l&H5xq(N*3mnrn_DPZ;L77w6 z%hvvpaoRGB%MdrTj|&>xX3@GoiN7MMvE}o>YMCowk(3ZJB?bESiG3H;=CSqglSJ|! z{gI*JX-d9cUD>J)q1m7fv#s4FZ`qmtkhWKLzGHFDN_BpGVue;_HKW;PRB$FnCp?-N ztK5U13^p>2gJ88lFXLg^<3`hwljB#jTVWf>s3LUpQnKqhY}it>&rlC>u(>@zsb1#J zH30lL!)1q?uKpWcdJ+#>r)yzCPR?znmO$=N{z&{g`_F!Y3R-;E-r@@yT)y|e0_`mW zUdHfW>lormUT~7a$>X`wjfm{hd7Y)P21H{{yBTprSBBqB5{$eF3@XO%{V<2vQjCFn z_OEFN#XLerjmPx~!c=0WmKb!Zp_!RK~fbdOo{aVV`mNG#+K6^;+axM7}eU8t@isss20yF3Z<|i#Z(9h`R1i9_s8To4McpVx@M+3ndxz# ze4&N>ipX>K)!KpLqiGnYJib$i-9YXkH_Ywg{BRvU7N%g1Bq%i)s|E_Oa6(wd0EU~V z6E&o&iT1E=#cz2dB}P}~x*5lfs*^gR679ze)4f-g$q|dLhiRU}<){zVO~HcgA&UoR z*0%?O>EpTY6!#5j`UQVbB;};WblB>d!{Ctr=4H&smW>LyhWzm|4B|w1xYDm!$#@u& z_OZf-6fAW|S}Zw=a{_Xh2|IZzAC6z_bj1ARDvh`rW?_@@P~LseW46GS-MBQ`@ud2- z8UI)xYnszv-W4DcBHKx|H-JiTP&<;JAiTi*Nu5X&7tx;4iUX%Cp3#dZ+xU&&!JRhs zvlR(IRot-_3$lEQu9)Q;s(dK37^`f6?-t$HM_1v>{UEOwwbr3M;gvTXH%AD()b3B) zus1;G-3S&lK75KU`Q;lBe0AK2T(J4T#^r&Qk?Cw-TdvU5^VTCW65h=VCu@N;2s#F1&U3#l!9Fmx+e#C5HR#47 z)RZ9LE|{1!E-GU1yx`!QG*}1wNgCkO8==Kffgy0n68zp-g|!vicN$=SG%)vOqN)5F z;qxAH6QjA@&udv3L1<4eBeab|H1~LBBp5cS$L`Xgeec!povDl3uS9~yS2Ae1&#m2( zWum%Uk&u^F6od;S=pdI$K@6Ahz2V(HGBk`}DSxvi2BXz}XavIUZYy}!d0U>3)~Zkt ze9yk1=%*QJ!w&QSfcMzx-YaqMNYPHp1Bz_OJrHZ(9I@sqn1dlI@VrZNX3DFF?`o>c zVip60YO!X^i^W%Z;ZGHwxc}*skBI8x`q74ooI+C%$`-sZE~{X~5_y?>o~;oPae6%wi-EM)8*wAg#h-R0sK4eXkcdutE<^Z4On0 zTyddNLRN}Lx=Ivy%EC3VC+#r~z`Osrxw#sd8OodTj=TSO=@Occ2!`o6Zt>rn?`9oBM>*LN>&Y|!gAEX+Ak)zu=BjN zP<>^=d;}RAv7RCHclljNCzThrM)ptpeT+4`)zM^4IVsn@o{jJ94YIO!RseT47-Mb0 z=KdWtpZb;u(m>-L!fgNdzq$Zoy$(9=bq_-XL1Ej`Ei-wRsZ+%+gKTao^tvG21jk_* zl}+QhA?l}T2{HZG8g{aMOS5=lKOZ1pA5C{3(_!&$N8b)7(1QyUNC6TK;OQf9rI412 zQxSyuYK1Vubu$J2VI{({2S=SDZfJ5gDYNa)GibKRh(zBDZ?1^Y1F@ZX({GeMW-{>< zv#uZij%wo6dc9Jtd1BR!i5`EYoy8;*r6+Ix5cUi`bG=OL!RN!}Xlk}fl{QRwC?4M( z?YI>TZvPdE)pXiOB^iZ2FW(K>=rQ)Azr|D(eZ;KDt?vukGjdl*L!SW1AUrsZs)0pH z8MvJPQNDu(+ZC?*)MQ}`w61Vd%29yk&rzM)>Va>w>Nge7m`{RP-a6$2-nGhKudiwf1YBJ$9}hctwx-G^!6>~)2N%;jhmtP*D}JGmca zwZBD<5vP=b@f0d>}9S=9t2MAgbd zm@HGRkS&AA;pe4fs*8XNA=Na79HXZc}2vkFyUvS~ol8rlzJU zA=OODvMesX?-Tz^?g;kjsJ1gS2R`^m1Qx^`TvSnUH&QbJ&)9x z8E-H0nylKQmHml8~WV^qu< zE`mu%-?}L0K2T^Y;?!1~PBm+9la#RZ!^~XnqV#G?)R=OKG3vdx=;o&{X)ZK)tmqV+ z4{=4ra2dz7%OHh<$~A-*D0Fau=IJI(OWgWPHmIfMCs#F?mU1>Z69XQ!x3fTJN24V;yJ0bS)5&OZ7t&#Jbc|w&w*hd zf&`GDj@o-M!P^_G=);Lpv^5A#vEEube@S$MOZwmu9Z^RZOBf@00TwS@N6O_f4U%7$BFtZjzmp1x&yPC78s9R6*JUMQF zVgVg%*{JWRi~Z@{1}OghPTq;%>w~;=t~xFBQ=Km=-`_O=I64KDeP6YMz{~%Qs;YnW zjBL`w@@sE-OY7AUv0M8hZB}dOOHhU1YjEIC^)*e%Ck3*OH;|m#+k1R35j(;T&u)6s zXTnG(7o~Xc4 z6YlFr;PQNIGn~^ZOI)z;Z48VUkk89tzR_%aH6#k+gPXjba8AiO+?VRq5o0%TA=Uc5 zvG?>mM%u;+Q-enE)ObUg;F(qfG40MNfCDWQ|#C(bL-gzD7_8lHhH|LGBxp=Uo$=#(&rx>B#W=Z5s^Z zeQSyx=JrH1&-KNU>j(qmM-cqJVOI+PfRwqGLL^M;Z3n(=eJ`pnE&elDYiryV@l^Pb z2=csmhp&Z+p6>v|94_OZld$<%PT_<>LR3$I{FFRIQu}0s2y(sm8!wn~mw_E@Y zsAn38yn&O(Q24wfkpDpBGS`tOFRX@GUm@KHrc@}mSIIWE=#H*tGs*@v@+W8F+*>xr zJ&H!GFTusrzrBg0zy@!n#j%4wBz$YRKdTZIw`tqv1B5wtYQ9Bp>cCQy{{@jFYn@Cy zByK(3D?;P@77Rz{0-DVpi!~-~#r8FZ1b^kBpEE>JONTu<~cYDU)y+KNg7$hsyNL$H&LpH zqh=R)_f{sznAte?i9~VuZTCy1plVv|Y)|dpOQqsr@=QlJg!D7{m?AP}&~9{H^qG82 z)ndoT7~8i_oM1^0QFG;2+ZS$Ugxqe^*8Tj%?@R;7;h}J^Y$%luQ~7|@xGTGY>y(-> z(8=12s{df{-jq9pX-DSUoPP$wpj@g`7zv%D^_WD~iM5Y1z(O8*R|_aH=tT05Ibzm2 zJ#uom?JhB7vk&V3^6hHA!H-G?F|D2;>w|PGb^Z74Rp+Y3$xG@| z+ymT1?Y)E?>lQ-5VZ}^WUE!zuXizi?sjQQ}*n|R!BocD>9D%A9r0S2B8#!28sFCLh zVCH&Boj*Bs4q;qZ6Z^e5&1Ge^Dqwi~CF=T+ilJc6tXM<$@#txjnMO7;td29(07+$u zK!Xx>;pFI2&AEPKX2>v4M7+qjz0H$wpOehp;ef+Zh_#R=rE~ zt3BRwRL!DkPQCiyP|qeh){e(Z_PeGzZl!Q40Am&MY(Zc&=wf;P01!&l>G=zCCkIQK zFhdJlOBz;015l#Kimdr{0hBg~8Vkh#d>HyIc!H{V5XRNG|L(5ZS~M#^r&>nFET7{8 z4X13DEeMK+eaLBZVoz6%l)&K1ljV5hXO>qkGj}ElF-;Tv=AK(eZiDYdEh}dGao+a@ z9*J3m3+(Nt3N4=J5v^0D(&69iLVzQ$EKo+Rhs z0EfLfa|TyV1l0ZhZrRbc8`Lp1_Oz7)~&ev~^e%2Z#NqNqC)}B-b`@W3AIgE6MY|`=f zbc1uEHvDtw-}yb+SQLSm} zgBFQiiu8KhMf4HkJ0o>1J%;hBQ{^Ib!rAU#qkk{eelx=yhR)zhj*?%eF;xCz04!!h z(}JTIimofvr3LSNam4=+l8yh&>-xk2)9}6I1{6%${gcf+=#ERzg zKkVRUkB?!NOM)k`7Iafr-@ZIcYLjQaPYZzZ9HiPL{TIlbKg8i;M( zRf^D&tx!VZ4H{M_-xEZaAm}3+b{a&`ApNQFjoLqVY#!z7Y zYfjnNFN1nQQxJGt$LTJ3dxrQbfrNXZ(75^9tHQOHb2w&+-|l8Wwo3oj;fIGPVG7+j z{;v)%TxFG`H$5)LmBz4TsEgFpzfx@mM$JJB^|)pRKgh#Vug*P&sdS+bD>OK5&s(Mg z8}Xqh`)DuIdK0m9)qfeIbm)2ism7C?Th*Uj+In|=>habwbm8vps758;SFd^GTq~Ue zVqjt}LC*4k_{`rH&heR%zrgkMl|<2pYh_rKAMJSYW~^|YhqECF>ks364PqJ$Eay=(H4y>rIfnb=A`OrTc$N<4u#5-< zx?fjjyf6?{lMq&3%I36s7vZN?Eu4Jf$I)Q!wjQPsw5`J;MUJsR7NvazV?Cv2xgADf z9Z%3$j-U~=?`ssi>V5R{;zcv%H>3sp)2m1Q<5hPsov7*_yU7aiCb-o(es7;>1DEe9)Om-!2*B=XZ^i5T4et)l=eJy z^W2v9+;;OU^Va@u3*-%Ntw{UEdxaZ|l5^OMROI04L&{7O_%9Ou^gj}P9_G}7{HH`8 zyZc9?hp9u4%2)TYrzgCf!-#UZdOYuSsycJuu8t#SL6$GRZ`i84;#aW~N6(;I>vOP^ z&2SW=%QLhb@mrds&u~cNb1otIIXSMOyAsi!Kr*Bp-e^k@_k03-?39)1oEwK;=_c;~ z*mQd*9G&m`s*QbI!*zYptAZxRPL6X`l&+fQHEv42Ro(eVqvz^G(yr7iZexZWwR2u- zy*a#Yo7ir9p4A%Y)v7=5JWrmagB!UxSDo?1N7H9|F)o5#K(&wotDEI zy$$B>8j_@i!5@oYiPB?LU--e-uB<~P=eg@DE13F>x)Gebio-K|c6|4tg{f}7R-nkNQsY$qZGB z;$tk2H8zfdFWX()ChnIh0rWi#AjcX$${aOnIo^AVEMSZjkpXZJOt`CQh}u;KwBQ>2 zsd)@56&3Bs7SN!F0g**y1cGu*=FM!;Xo@;6x{u1*-lq{ppKAT9$@NIHXx7*BKpPPo z%B)EHBb+fx*QV16mPl2h=#HvDy29(aAk)*Dcdo0D?%)3o(CQZEf;iTq;8bUe*DZd zXF2;Uxv8+FAy>4{$D1kSg~{fD?Hu;}d_HIQGEDccrl^?~x&D;vRS?1pi48RR zpVu0#B`}#~Y%=-wM(_(gSv-yz@$_0D%1B<}q{e~2n=XNtQ~WUEDp1E@jtt=Db95_a#+iJibR34Z0F`=6|_5fLLh6jJ;4EBBJ23xO*M(#M5hBVmT%`) z41O#^eL$**^1SY51&Apd?cM2m>Q$aN@+j42@J=8sQmMXzX7vQ2n4O#%U13ur*^i8m zV0i9EqMCtyEVBD{OnufBa8P-Yo*>{?14Lv<(OJ;2f2#EzutYSM*`Mg9kUqd$woo~q z>o!n4&_=Xm4W_r5`<+t~F!1$ibxgi@)rvR=u&z-()@dTabSau%ps73XJ;hy&;(MqW z&gxVsJe-fWX7yILnFG#su$}oLTps{fE^0rv-{M8rrQm%0N0jHkDGhWBiTo;<$fc3& zN?EBZ5<`7mTE7O)4P`W26H&t|)f_$dnR*RX%K#HS} z=x%uK|Dp%gStb26EBAV>F%HcUk@IpVEz+zxE50~lUlY@=<7Qn83Zg>`0Pg?0t^lKl z^C`*erebcz8vYJI0sJM&qi(F#FI%wrg`P>71v^7pY=xPHTwu`&HJ*nDVa&i6_D|>r zI^+bt3#RrygD<>d0GQ|s8I((KI+N!ELa@n=8Ur9e#?l->MrS{j#>}o}PXxG7UYLdk z-nor0j7?q4B%RScpln=Aa>5G)6SZ^7nmf1GJ4Y#_NT%}mQ~YZ8>>bZ8Zrc1iyURNx zagLucLcUBgP55oxGZm>E6qWR{I!%rg?zN$tpZ4PEWliR9$Ro1Pbq$)N7)e zz6(*TMC~M^0`1hu@H=JSu>J<}rfS?i)7q~kPfaveyS|I7vXONgIp4f0C$ddtj^6Jx z8)$4aqDx0Ml?^pAKi1S(xhuwORLYzHj@WK?AQs{YZ{b!Z!V8#{Q`_#d?721Wh^bt0 zvvdz9@yKw7Be){TGhA}Kw3jXW64B;0bx}_urS!xK_h6>a08h;jnU&n39E~QSoa2cIJ1Ssxt@m1K}iPwlpebcd~9Z*wc zFB8(|&@pG=If-?&6Run4)L5LWmxbr-Dvu+|Ao)Fl7Ik|kWsqmOR{MKu03rR(A_aQ8 zmBo{pjIJ8IcS#NnRx;u97J+|+Ws)3sAilt!i*$81`fo1B=L8up*3~M}=q1Sbzif_! z@0J%-;F$!OCJw}~A=8401%Al-INM*e0M{K$P~oAPfhCh3$NQVeX6mo%PI|Ii)INMH zKMb_cZCk9WGu-v7S$zgzsv0D!dphypIG>w{eE>a9sIgD_T#PgR3L-j(+SQ&`l!Sc~ z3}r~6qKcA|mHxyd>2*A6q2gQN30MiMtKBlduiF+1<%E4X7u}B`qkn;cy-Sl|3IRq8 zDPD+f!BX+EnLlW`9fh*#{_aX-ToQ3b{DU`{=#@vI3+QOEQ6%<@P0kHBTp?m2PX8Y) z65d1p#IyDH|7wz;j=>5X{EJDF{4`{rL<79l|AjPdYZgVSH4PD#n3>rB!j)+oxvZw? zfr1bbm;z9a50ckY3vLnrZ*2*YISNSxQnBm7a3|>=6XW?@$qa#)*x_aTv$MhV7#I=E zM>ovjul`DxC{`Pb4~4T?O-|FzUXLu6uh@9G5V1TNJ)vLUMmrzwzfZ&!A_$cO5Yuj! zYvGEmGjiZTrBV%H zkRDsoQ`Z}(Z&yQGx$H);%H(PLOq7{5K9b+UlYpjzo^unXX@s`F{zC{x$F+wE+O`FA z?$lzQppnbS$W1DuNvD*wH^SJvc6eH<=ici@*2=7R$)%Oc(sLbCLZ_36D$q-q*aNt` zW{$^B`3EOq>;ty{GFOTV6r#!#q6+9N^?rP^QK`n2$Rz_Pq}n9<+ z?uj|4gtvm(@Y)mFxm7Z}pULdm#eJ(8DuvQp1$6)I!!Xn}Nzs9Ghtu34r!j)jb`Tko zTf{oD+7c9mRg~vK`m*wpaM`5`uu}i-$W79&wuJNp3FB)YHyTbXAEtVkg*m(G181IZ zF0i?_WAG^OFi?m3P3EpGx`8Ik4cVw~3J4Z#fBiEeLt5%eG~WC1cT`N#BMql|Axi+5C!A>I1nKoDY&7PirT#09hi{+-Gz>iD5-K{ta zC9MXvYbuIVlDMxH5t{tki#hN|enR)7%tf=nI@HIwfkbTFhwIoz;cv2of&vC6%mG#t zm$Yuhl6@!vzYiRDf$D{)MY8k!f`9_?JK0|Yd>8QNygp*xg>C=0u7rWte{m)FCxeMc zbYv>$SImp@?m6MGVjkUj9xEJ>z}{bPuiu6cvS_Dxy()7~qcwHjccS=?7uZZOiufNp z^}sIhrbCJuM`m!x5dKFcpiqk&zq0rv>+QteA)3PQ2Wmx zsK051eFHL-1k}i_+&+s}{|tOfp8WQ21K+CM;{HDle1qiI{q{!>Svzfu?)L@%w^NE6^YS9TN=m;CPxVwwDC#NFZXpJ zUMw`wPqaC!d4H52MN}#-;B+|YoG`cA=~%PW>zb^~YjVyv@O zDa!sg1H+xx|3EC`_8!K6bauu45pBfh_8zz|{?LzolA0Z}=IF-iaP8AFJj-62!8R8w z8*>AtR11p5F8v*OCXwG{A4Ng;~63mg@UN#CwY8BKRrGGX{wfyfShsI8#ol4_H)Z=Blr%oDa{8WOY*K? zT?4%b>>hvy?98?haDImC5Gkbnn8ycDpM!-jxfb|7B-hm}&gDQ)@G{YHinaCTgC(eZ zO{SK8W@3dh=nod;0v*EH`?I+*wquA}@S3fk_PYl#Jy_Tl}P7z@q3e9(2l8 z6y?6qycd!uku^3({%ua^noeBkUx2$qm`nEkV@{k|T((gBHR1-qg#M2?F}+DH8|Re| z1<`*wA(8I;A{p2Hd!NdZ+!nLnyAVO*$^1^p*R&uT2CI-GYck`|HdeUlZApuh%1aby z$K%q4$(E$g#HYhn_^SkW<3)%gazW^(dATJpV9^vGJV%cEP@pwga{m(eaIKp;#gSbQ zCSYd3%M=}LAr~b%!P6+M$hT4&Q%doN!?2pB)e#+?qlZ4{VEiW#>C>Lbi(or9tu*fI zPR5Z1TS3rzjybCZcEIJ}`TpA#Bk&{OalH5P5Ab4IVWeS)R1QnfJCj}LjVd@2=jRLl z`1fiCn8x9z;PHZkrswg>nYI8KZt#uG3ts_~2V%dGH-a(R%fE)xJkTFNG3uyJIz=he zl~P{XhP@=dAe&st6`v0Ryz3g(r33ELluF{A9yDM1e~amHkp^Wj&bd6!l>WQux> zJuSV9RJrUbVvouJOtCYV}+Z(67spR%Su z&JEr>uCz6#97KELu*g{O%M0ZEr+afB2uX?vE|#0TVq;veGgcRgHGpl+*z&YS=akW@ zZcKd%;pINPqW|L=;&>@ARYQexM-qRb%UeWTb*~lvW8wF8O=qCpYjN0}m=BhkQ?+h| z{oNT}S=iq75_OV@ltH`p^+918ZSo&(L&}uP$Y0zBG5bgA0$)J0egYW*&g4nRuq^k+ z`-7prIEMX9yM!VZZgt_zEq5eo6p*WfcTyJ!3{-)(ya6x-}#w?I)qIAwO ziw`;Ya`D%at={`nq)EWl&19Q5Ys+TAm+_URV>ts&L>hgwB^Ht0RnKY>&x`` z)-z)}fvm1C7lk%h^8#)FVOxRnlaY@;U78Y71t{VRKLYR}5fD$O5P#IM05kNrL}+^A zOo*JE>Pis*O)(In$KxfV#j{41rQ^n+3|fYu)Fp<%yw7^5yH0LB&DgZe^=c0!ZuAbB z7#_a~R6Vc{E8#Q**zzxa97Hov5G(#{xG(?R&S0^Le_rw}}Wt3TgRS9T zyd!ISgxX~B|1|j$e~e%)nCxw4YUA==?o>-X{bTz({j-=J&kAQ>0TJ>s(8NLuKsp^J z3}rwuwrJU#uLrGH-xHn-3C3MVpNWy37fjOG%gXB$ubJXW-JUqSu$eaV&!&Eg2*?C8V^|4|Iicqyq z*fw?UpTTMbLQ!l%!l#KaZWES!5he3xZ&5DbpO1%a4?$zIi)){lpT;RblXhiM_Ud*v zfpu8Oi-FdYNl|N$kSSfldaHx&&SZ#V>lwz5nkS^Fc&7&r936s96$ihjeKGsMX65~X z7AXpRN-m^0_sUM26SIG+=MKDv^sUK^V!-uKV8FJjwmXu9Hi31Rmj#y+Tfn3dL}F(r zRuUCsm4LoE2Zs^==c2{}6rv)cPxP*_9g%>xJaEsu&5V0_{jYJQO3Nm~coQpN7yG|} zHV94_SRyQt!zuX)hyESh3|@Bg>czP3yp57)qvkooXS0l0cga>*=n8kS09BU`Xa3i! z{;o+2rUJEqwDus6SGZl)3;d@t!TU~I3Ba=+iu|hQ+??S=CxW{>TQ|=U!>PCaDw`n< z!X6`xZv*bOm7`0Oof#X%$>2^4ntn^dPO~xnDtR}Sv_R)U0PHSxN|dyE1jR{wV($6a zb!WM^rK5N3T^2JOQg3h!W8*KhH{yjm!PIZt+Z=0YVg%KFPjHQ&!LcV&c{%k(uBjJa z2v&tyvsJ@dU??OWrs&sTlC@n=Xz}>KUPrrc_@3y9Y}fu7;2)w7-_5d#*W@DH&6#Ir zxH3eW$__hptJY2$wd~p|dOM$ezJ1Oy{nkV%Af`~?xO{Tlk+?oxB=*)z;4oBp&AV@+YsHvSr8qt{8LXWH-I8!W?;m~RcHzATKNyW2xEa*n5kRy+%%$oK^^nc|lx zr+DW{-}HJJ;&*@13b6fbmTW|^FJh|8v=@Yr->%kT8xf@5r(pqyPOZkJP@)OQ_RfZk zOXsp7HoaD|aXNG4EGtJd2t3p4++n3vtkbE6r0gHd%}v^kw;#-t~$m_Vk?J-N8sJc!TCO9S}?PG z{|fA+QX+Yr|5CcrOd;#RdHKBh#onE4(Xv8QHq+b==<~UKoff;XV`&P_au8UqP;Z$9 z0#D(VwH{9l$TGpvWSEm$?Z|Ra#8c<0)fNPvm$96&@?Oju8Y_4*Ne8%$6EDxN%m7($ zOPf6NR>j)39^|BRYjxj(Hk@y|Wf$hTtd@-x3R)+Y7K>}6nr*4EOLR+AyUaU%9bPB| zzIf+vAM#S5V5mSKKwp6F1#HyvyBNM&fdT;m$pZr+elCiZf<_`5diD;kmd5t9Mh0g~ zGIA&anB7-8K0433>$ODU>f6BBAu@aYJ>Li51YD6U{h~2z>!o_?>+8cxuIkyk!b6-5 zx7Vr+!$UBnf~=f7)I_k^^@i=N6)6w!Qdm~A`CVms^5)bL?k9s`l7d(|~$I=S%j6%=t`l^=~MVu7# zvSqW?H$637G_8CR@%h8!2S})-WhG@WtZ>t6MTBZJN!=ZP@;6d7SD0!!gr@&Z7dJM9S!BY8=7ho*0`qiupy-mOa^A{2h&y+>lCdq6fsar6D3C`%fDRb*lE+y zVOZ5EGg!PY=kq0BtQ(xz?LxJ7!~vRa($b{PSGd`fE(TSstYw|>>xB|6;AX^%=(k|z zew<_FStgX<7);c|6h$c2nYbFF4JlhWsXCbmQ?t$8Ij$0v*bv?{&j%Qv`avW2Fg zDikg&oTAJ`x#Qi4*lM|p)>IVIbmqAUwaXMQkB7HmLgz-ENX=qoPZxa%HfUEYOh`_w zym@KQ7Zw?lk(dw;Zg|N&ihb~XSlbLu8HTbl5a~6)HF|Bcxu3RaD_xT5#7%Dw;%c9fGZKYLW! zy3g_eXv!vtuA;Tkp3e6zi|9|MT$@~h`X*qhQW+L3n%9cN3YBn6Mz@b@wk71Q+%6*L zaJw-H)|Kk#kG#nW(-MU$qGo5xrT~lcQn43kpT8GCKwuilZ+w4k`3yb%z0vdk+s5e! z@L%8R#rvvgwfY0{ul0X;S$tMcUJ4k55A^Sg^Q)|lS_kEWY2)WfKIXxJ{yz4v z69DD={8^2RooH=M?f>