Skip to content

Commit

Permalink
Signalements – Archiver automatiquement les signalements issus d'aler…
Browse files Browse the repository at this point in the history
…tes (UPDATE ENV VAR) (#3659)

## Linked issues

- Resolve #3189
- à rajouter en PROD : `MONITORFISH_SCHEDULING_ENABLED=true`

## Post MEP en PROD :
Exécuter la requête pour récupérer les signalement pour **1 mois**
(prends ~10s)
```
WITH recent_dep_messages AS (
            SELECT lr.cfr, lr.ircs, lr.external_identification, lr.operation_number, MAX(lr.operation_datetime_utc) as last_dep_date_time
            FROM logbook_reports lr
            WHERE
                lr.operation_datetime_utc > NOW() - INTERVAL '1 month' AND
                lr.log_type = 'DEP'
            GROUP BY lr.cfr, lr.ircs, lr.external_identification, lr.operation_number
        ),

        acknowledged_report_ids AS (
            SELECT DISTINCT referenced_report_id
            FROM logbook_reports lr
            WHERE
                lr.operation_datetime_utc > NOW() - INTERVAL '1 month' AND
                lr.operation_type = 'RET' AND
                lr.value->>'returnStatus' = '000'
        )

        SELECT
            r.id as id,
            r.value as value
        FROM
            reportings r
        INNER JOIN
            (select * from recent_dep_messages) rdp
            ON CASE
                WHEN r.vessel_identifier = 'INTERNAL_REFERENCE_NUMBER' THEN r.internal_reference_number = rdp.cfr
                WHEN r.vessel_identifier = 'IRCS' THEN r.ircs = rdp.ircs
                WHEN r.vessel_identifier = 'EXTERNAL_REFERENCE_NUMBER' THEN r.external_reference_number = rdp.external_identification
            END

        WHERE
            r.archived is false AND
            r.deleted is false AND
            rdp.last_dep_date_time >= r.validation_date AND
            rdp.operation_number IN (SELECT referenced_report_id FROM acknowledged_report_ids)
```

Puis les archiver :
```
UPDATE
            reportings
        SET
            archived = TRUE
        WHERE
            id IN (:ids)
```

----

- [ ] Tests E2E (Cypress)
  • Loading branch information
louptheron committed Sep 17, 2024
2 parents c396217 + 328de65 commit 54375ba
Show file tree
Hide file tree
Showing 21 changed files with 272 additions and 19 deletions.
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,10 @@ clean: docker-env
docker compose --env-file ./infra/docker/.env -f ./infra/docker/docker-compose.cypress.yml down -v
docker compose -f ./infra/docker/docker-compose.puppeteer.yml down -v

.PHONY: generate-test-data ##LOCAL Generate test data (SQL files from .jsonc)
generate-test-data:
cd frontend && npm run generate:testdata

compile-back:
cd backend && ./gradlew assemble

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import org.springframework.context.annotation.Configuration
import org.springframework.scheduling.annotation.EnableScheduling

@ConditionalOnProperty(
value = ["monitorfish.scheduling.enable"],
value = ["monitorfish.scheduling.enabled"],
havingValue = "true",
matchIfMissing = true,
)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package fr.gouv.cnsp.monitorfish.domain.repositories

import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert
import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertType
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.InfractionSuspicion
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Observation
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Reporting
Expand Down Expand Up @@ -48,7 +49,11 @@ interface ReportingRepository {
fromDate: ZonedDateTime,
): List<Reporting>

fun findUnarchivedReportings(): List<Pair<Int, AlertType>>

fun archive(id: Int)

fun archiveReportings(ids: List<Int>): Int

fun delete(id: Int)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package fr.gouv.cnsp.monitorfish.domain.use_cases.reporting

import fr.gouv.cnsp.monitorfish.config.UseCase
import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertTypeMapping
import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository
import org.slf4j.LoggerFactory
import org.springframework.scheduling.annotation.Scheduled
import org.springframework.transaction.annotation.Transactional

@UseCase
class ArchiveOutdatedReportings(private val reportingRepository: ReportingRepository) {
private val logger = LoggerFactory.getLogger(ArchiveOutdatedReportings::class.java)

// At every 5 minutes, after 1 minute of initial delay
@Scheduled(fixedDelay = 300000, initialDelay = 6000)
@Transactional
fun execute() {
val reportingCandidatesToArchive = reportingRepository.findUnarchivedReportings()

val filteredReportingIdsToArchive =
reportingCandidatesToArchive.filter {
it.second.type == AlertTypeMapping.MISSING_FAR_ALERT ||
it.second.type == AlertTypeMapping.THREE_MILES_TRAWLING_ALERT
}.map { it.first }

logger.info("Found ${filteredReportingIdsToArchive.size} reportings to archive.")
val numberOfArchivedReportings = reportingRepository.archiveReportings(filteredReportingIdsToArchive)

logger.info("Archived $numberOfArchivedReportings reportings")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,19 +30,20 @@ class GetVesselReportings(
vesselIdentifier: VesselIdentifier?,
fromDate: ZonedDateTime,
): VesselReportings {

val (controlUnits, controlUnitsTimeTaken) = measureTimedValue { getAllControlUnits.execute() }
logger.info("TIME_RECORD - 'getAllControlUnits' took $controlUnitsTimeTaken")

val (reportings, reportingsTimeTaken) =
measureTimedValue { findReportings(
vesselId,
vesselIdentifier,
internalReferenceNumber,
fromDate,
ircs,
externalReferenceNumber,
) }
measureTimedValue {
findReportings(
vesselId,
vesselIdentifier,
internalReferenceNumber,
fromDate,
ircs,
externalReferenceNumber,
)
}
logger.info("TIME_RECORD - 'findReportings' took $reportingsTimeTaken")

val (current, currentTimeTaken) =
Expand Down Expand Up @@ -73,7 +74,10 @@ class GetVesselReportings(
}
logger.info("TIME_RECORD - 'archivedYearsToReportings' took $archivedYearsToReportingsTimeTaken")

val (infractionSuspicionsSummary, infractionSuspicionsSummaryTimeTaken) = measureTimedValue { getInfractionSuspicionsSummary(reportings.filter { it.isArchived }) }
val (infractionSuspicionsSummary, infractionSuspicionsSummaryTimeTaken) =
measureTimedValue {
getInfractionSuspicionsSummary(reportings.filter { it.isArchived })
}
logger.info("TIME_RECORD - 'infractionSuspicionsSummary' took $infractionSuspicionsSummaryTimeTaken")
val numberOfInfractionSuspicions = infractionSuspicionsSummary.sumOf { it.numberOfOccurrences }
val numberOfObservation =
Expand Down Expand Up @@ -124,7 +128,7 @@ class GetVesselReportings(
}

return@map ReportingTitleAndNumberOfOccurrences(
title = infraction?.infraction?.let {"$it (NATINF $natinfCode)"} ?: "NATINF $natinfCode",
title = infraction?.infraction?.let { "$it (NATINF $natinfCode)" } ?: "NATINF $natinfCode",
numberOfOccurrences = reportings.size,
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,14 @@ package fr.gouv.cnsp.monitorfish.infrastructure.database.repositories

import com.fasterxml.jackson.databind.ObjectMapper
import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert
import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertType
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.InfractionSuspicion
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Observation
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Reporting
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.ReportingType
import fr.gouv.cnsp.monitorfish.domain.entities.reporting.filters.ReportingFilter
import fr.gouv.cnsp.monitorfish.domain.entities.vessel.VesselIdentifier
import fr.gouv.cnsp.monitorfish.domain.mappers.ReportingMapper
import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository
import fr.gouv.cnsp.monitorfish.infrastructure.database.entities.ReportingEntity
import fr.gouv.cnsp.monitorfish.infrastructure.database.repositories.interfaces.DBReportingRepository
Expand Down Expand Up @@ -166,6 +168,23 @@ class JpaReportingRepository(
dbReportingRepository.archiveReporting(id)
}

override fun findUnarchivedReportings(): List<Pair<Int, AlertType>> {
return dbReportingRepository.findAllUnarchivedAfterDEPLogbookMessage().map { result ->
Pair(
result[0] as Int,
ReportingMapper.getReportingValueFromJSON(
mapper,
result[1] as String?,
ReportingType.ALERT,
) as AlertType,
)
}
}

override fun archiveReportings(ids: List<Int>): Int {
return dbReportingRepository.archiveReportings(ids)
}

@Transactional
override fun delete(id: Int) {
dbReportingRepository.deleteReporting(id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,67 @@ interface DBReportingRepository : CrudRepository<ReportingEntity, Int> {
)
fun archiveReporting(id: Int)

/**
* Search for unarchived reportings (created for max 1 hour ago) after vessels' have started a new trip.
* (a DEP logbook message is received after the reporting validation_date)
*/
@Query(
value = """
WITH recent_dep_messages AS (
SELECT lr.cfr, lr.ircs, lr.external_identification, lr.operation_number, MAX(lr.operation_datetime_utc) as last_dep_date_time
FROM logbook_reports lr
WHERE
lr.operation_datetime_utc > NOW() - INTERVAL '1 hour' AND
lr.log_type = 'DEP'
GROUP BY lr.cfr, lr.ircs, lr.external_identification, lr.operation_number
),
acknowledged_report_ids AS (
SELECT DISTINCT referenced_report_id
FROM logbook_reports lr
WHERE
lr.operation_datetime_utc > NOW() - INTERVAL '1 hour' AND
lr.operation_type = 'RET' AND
lr.value->>'returnStatus' = '000'
)
SELECT
r.id as id,
r.value as value
FROM
reportings r
INNER JOIN
(select * from recent_dep_messages) rdp
ON CASE
WHEN r.vessel_identifier = 'INTERNAL_REFERENCE_NUMBER' THEN r.internal_reference_number = rdp.cfr
WHEN r.vessel_identifier = 'IRCS' THEN r.ircs = rdp.ircs
WHEN r.vessel_identifier = 'EXTERNAL_REFERENCE_NUMBER' THEN r.external_reference_number = rdp.external_identification
END
WHERE
r.archived is false AND
r.deleted is false AND
rdp.last_dep_date_time >= r.validation_date AND
rdp.operation_number IN (SELECT referenced_report_id FROM acknowledged_report_ids)
""",
nativeQuery = true,
)
fun findAllUnarchivedAfterDEPLogbookMessage(): List<Array<Any>>

@Modifying(clearAutomatically = true, flushAutomatically = true)
@Query(
value = """
UPDATE
reportings
SET
archived = TRUE
WHERE
id IN (:ids)
""",
nativeQuery = true,
)
fun archiveReportings(ids: List<Int>): Int

@Modifying(clearAutomatically = true)
@Query(
value = """
Expand Down
3 changes: 3 additions & 0 deletions backend/src/main/resources/application.properties
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ monitorfish.oidc.enabled=${monitorfish.oidc.enabled}
monitorfish.oidc.issuer-uri=${monitorfish.oidc.issuer-uri}
monitorfish.oidc.userinfo-endpoint=${monitorfish.oidc.userinfo-endpoint}

# Scheduling
monitorfish.scheduling.enabled=${monitorfish.scheduling.enabled}

# Whether response compression is enabled.
server.compression.enabled=true

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ TRUNCATE TABLE reportings RESTART IDENTITY CASCADE;
INSERT INTO reportings (type, vessel_name, internal_reference_number, external_reference_number, ircs,
vessel_identifier, flag_state, creation_date, validation_date, archived, deleted, value, latitude, longitude, vessel_id)
VALUES ('ALERT', 'MARIAGE ÎLE HASARD', 'ABC000180832', 'VP374069', 'CG1312', 'INTERNAL_REFERENCE_NUMBER', 'FR',
NOW() - ('1 DAY')::interval, NOW(), false, false, ('{' ||
NOW() - ('1 DAY')::interval, NOW() - ('30 MINUTES')::interval, false, false, ('{' ||
'"seaFront": "NAMO",' ||
'"riskFactor": 3.5647,' ||
'"type": "THREE_MILES_TRAWLING_ALERT",' ||
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,12 @@ INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_O

INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_120', '<Flux>Message FLUX xml</Flux>');

INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_121', '<Flux>Message FLUX xml</Flux>');

INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_120_RET', '<Flux>Message FLUX xml</Flux>');

INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_121_RET', '<Flux>Message FLUX xml</Flux>');

INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, vessel_name, trip_gears, trip_segments, value) VALUES (101, 'FAKE_OPERATION_101', NULL, 'FAK000999999', true, 'FRA', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'PNO', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'FAKE_OPERATION_101', 'DAT', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'JT/VISIOCaptures V1.4.7', 'ERS', 'PHENOMENE', '[{"gear":"TBN","mesh":100,"dimensions":"250;180"},{"gear":"OTT","mesh":120.5,"dimensions":"250;280"}]', '[{"segment":"SWW04","segmentName":"Chaluts pélagiques"},{"segment":"SWW06","segmentName":"Sennes"}]', '{"riskFactor":2.1,"catchOnboard":[{"weight":25,"nbFish":null,"species":"COD","faoZone":"27.8.a","effortZone":"C","economicZone":"FRA","statisticalRectangle":"23E6"}],"isBeingSent":false,"isInVerificationScope":false,"isSent":false,"isVerified":false,"pnoTypes":[{"pnoTypeName":"Préavis type A","minimumNotificationPeriod":4,"hasDesignatedPorts":false},{"pnoTypeName":"Préavis type B","minimumNotificationPeriod":8,"hasDesignatedPorts":true}],"port":"FRSML","predictedArrivalDatetimeUtc":null,"predictedLandingDatetimeUtc":null,"purpose":"LAN","tripStartDate":null}');
UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedArrivalDatetimeUtc}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' + INTERVAL '3 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 101;
UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedLandingDatetimeUtc}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' + INTERVAL '3.5 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 101;
Expand Down Expand Up @@ -233,3 +237,7 @@ UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedLandingDatetimeUt
UPDATE logbook_reports SET value = JSONB_SET(value, '{tripStartDate}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' - INTERVAL '10 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 120;

INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_120', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', 'FAKE_OPERATION_120_RET', 'RET', 'ERS', '{"returnStatus":"000"}');

INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, trip_gears, trip_segments, vessel_name, value) VALUES (121, 'FAKE_OPERATION_121', NULL, 'ABC000180832', true, 'FRA', NOW() - INTERVAL '15 minutes', 'DEP', NOW() - INTERVAL '15 minutes', 'FAKE_OPERATION_121', 'DAT', NOW() - INTERVAL '15 minutes', 'TurboCatch (3.7-1)', 'ERS', NULL, NULL, 'MARIAGE ÎLE HASARD', '{"gearOnboard":[{"gear":"GTR","mesh":100}],"departurePort":"AEJAZ","anticipatedActivity":"FSH","tripStartDate":"NOW() - INTERVAL ''15 minutes''","departureDatetimeUtc":"NOW() - INTERVAL ''15 minutes''"}');

INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_121', NOW() - INTERVAL '14 minutes', NOW() - INTERVAL '14 minutes', 'FAKE_OPERATION_121_RET', 'RET', 'ERS', '{"returnStatus":"000"}');
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@
{ "operation_number": "FAKE_OPERATION_119", "xml_message": "<Flux>Message FLUX xml</Flux>" },
{ "operation_number": "FAKE_OPERATION_119_RET", "xml_message": "<Flux>Message FLUX xml</Flux>" },
{ "operation_number": "FAKE_OPERATION_120", "xml_message": "<Flux>Message FLUX xml</Flux>" },
{ "operation_number": "FAKE_OPERATION_120_RET", "xml_message": "<Flux>Message FLUX xml</Flux>" }
{ "operation_number": "FAKE_OPERATION_121", "xml_message": "<Flux>Message FLUX xml</Flux>" },
{ "operation_number": "FAKE_OPERATION_120_RET", "xml_message": "<Flux>Message FLUX xml</Flux>" },
{ "operation_number": "FAKE_OPERATION_121_RET", "xml_message": "<Flux>Message FLUX xml</Flux>" }
]
},
{
Expand Down Expand Up @@ -1503,6 +1505,48 @@
"value:jsonb": {
"returnStatus": "000"
}
},

// - Vessel: MARIAGE ÎLE HASARD
// - Flag state: FR
{
"id": 121,
"report_id": "FAKE_OPERATION_121",
"referenced_report_id": null,
"cfr": "ABC000180832",
"enriched": true,
"flag_state": "FRA",
"integration_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'",
"log_type": "DEP",
"operation_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'",
"operation_number": "FAKE_OPERATION_121",
"operation_type": "DAT",
"report_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'",
"software": "TurboCatch (3.7-1)",
"transmission_format": "ERS",
"trip_gears": null,
"trip_segments": null,
"vessel_name": "MARIAGE ÎLE HASARD",
"value:jsonb": {
"gearOnboard": [{"gear": "GTR", "mesh": 100.0}],
"departurePort": "AEJAZ",
"anticipatedActivity": "FSH",
"tripStartDate": "NOW() - INTERVAL '15 minutes'",
"departureDatetimeUtc": "NOW() - INTERVAL '15 minutes'"
}
},
{
"id": 1120,
"report_id": null,
"referenced_report_id": "FAKE_OPERATION_121",
"integration_datetime_utc:sql": "NOW() - INTERVAL '14 minutes'",
"operation_datetime_utc:sql": "NOW() - INTERVAL '14 minutes'",
"operation_number": "FAKE_OPERATION_121_RET",
"operation_type": "RET",
"transmission_format": "ERS",
"value:jsonb": {
"returnStatus": "000"
}
}
]
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package fr.gouv.cnsp.monitorfish.domain.use_cases.reporting

import com.nhaarman.mockitokotlin2.eq
import com.nhaarman.mockitokotlin2.verify
import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.MissingFARAlert
import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.ThreeMilesTrawlingAlert
import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.TwelveMilesFishingAlert
import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.extension.ExtendWith
import org.mockito.BDDMockito.given
import org.springframework.boot.test.mock.mockito.MockBean
import org.springframework.test.context.junit.jupiter.SpringExtension

@ExtendWith(SpringExtension::class)
class ArchiveOutdatedReportingsUTests {
@MockBean
private lateinit var reportingRepository: ReportingRepository

@Test
fun `execute Should archive outdated reportings`() {
// Given
given(reportingRepository.findUnarchivedReportings()).willReturn(
listOf(
Pair(1, TwelveMilesFishingAlert("NAMO")),
Pair(2, ThreeMilesTrawlingAlert("NAMO")),
Pair(3, MissingFARAlert("NAMO")),
),
)

// When
ArchiveOutdatedReportings(reportingRepository).execute()

// Then
verify(reportingRepository).archiveReportings(eq(listOf(2, 3)))
}
}
Loading

0 comments on commit 54375ba

Please sign in to comment.