diff --git a/Makefile b/Makefile index 6a68e43b6d..fc60361ee1 100644 --- a/Makefile +++ b/Makefile @@ -52,6 +52,10 @@ clean: docker-env docker compose --env-file ./infra/docker/.env -f ./infra/docker/docker-compose.cypress.yml down -v docker compose -f ./infra/docker/docker-compose.puppeteer.yml down -v +.PHONY: generate-test-data ##LOCAL Generate test data (SQL files from .jsonc) +generate-test-data: + cd frontend && npm run generate:testdata + compile-back: cd backend && ./gradlew assemble diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt index 9f4f2e1101..6f6395142f 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt @@ -5,7 +5,7 @@ import org.springframework.context.annotation.Configuration import org.springframework.scheduling.annotation.EnableScheduling @ConditionalOnProperty( - value = ["monitorfish.scheduling.enable"], + value = ["monitorfish.scheduling.enabled"], havingValue = "true", matchIfMissing = true, ) diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt index 74f13220dd..a617966aa5 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt @@ -1,6 +1,7 @@ package fr.gouv.cnsp.monitorfish.domain.repositories import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertType import fr.gouv.cnsp.monitorfish.domain.entities.reporting.InfractionSuspicion import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Observation import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Reporting @@ -48,7 +49,11 @@ interface ReportingRepository { fromDate: ZonedDateTime, ): List + fun findUnarchivedReportings(): List> + fun archive(id: Int) + fun archiveReportings(ids: List): Int + fun delete(id: Int) } diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt new file mode 100644 index 0000000000..752a7b1be1 --- /dev/null +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt @@ -0,0 +1,31 @@ +package fr.gouv.cnsp.monitorfish.domain.use_cases.reporting + +import fr.gouv.cnsp.monitorfish.config.UseCase +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertTypeMapping +import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository +import org.slf4j.LoggerFactory +import org.springframework.scheduling.annotation.Scheduled +import org.springframework.transaction.annotation.Transactional + +@UseCase +class ArchiveOutdatedReportings(private val reportingRepository: ReportingRepository) { + private val logger = LoggerFactory.getLogger(ArchiveOutdatedReportings::class.java) + + // At every 5 minutes, after 1 minute of initial delay + @Scheduled(fixedDelay = 300000, initialDelay = 6000) + @Transactional + fun execute() { + val reportingCandidatesToArchive = reportingRepository.findUnarchivedReportings() + + val filteredReportingIdsToArchive = + reportingCandidatesToArchive.filter { + it.second.type == AlertTypeMapping.MISSING_FAR_ALERT || + it.second.type == AlertTypeMapping.THREE_MILES_TRAWLING_ALERT + }.map { it.first } + + logger.info("Found ${filteredReportingIdsToArchive.size} reportings to archive.") + val numberOfArchivedReportings = reportingRepository.archiveReportings(filteredReportingIdsToArchive) + + logger.info("Archived $numberOfArchivedReportings reportings") + } +} diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt index 0c08b4c03c..306c07c728 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt @@ -30,19 +30,20 @@ class GetVesselReportings( vesselIdentifier: VesselIdentifier?, fromDate: ZonedDateTime, ): VesselReportings { - val (controlUnits, controlUnitsTimeTaken) = measureTimedValue { getAllControlUnits.execute() } logger.info("TIME_RECORD - 'getAllControlUnits' took $controlUnitsTimeTaken") val (reportings, reportingsTimeTaken) = - measureTimedValue { findReportings( - vesselId, - vesselIdentifier, - internalReferenceNumber, - fromDate, - ircs, - externalReferenceNumber, - ) } + measureTimedValue { + findReportings( + vesselId, + vesselIdentifier, + internalReferenceNumber, + fromDate, + ircs, + externalReferenceNumber, + ) + } logger.info("TIME_RECORD - 'findReportings' took $reportingsTimeTaken") val (current, currentTimeTaken) = @@ -73,7 +74,10 @@ class GetVesselReportings( } logger.info("TIME_RECORD - 'archivedYearsToReportings' took $archivedYearsToReportingsTimeTaken") - val (infractionSuspicionsSummary, infractionSuspicionsSummaryTimeTaken) = measureTimedValue { getInfractionSuspicionsSummary(reportings.filter { it.isArchived }) } + val (infractionSuspicionsSummary, infractionSuspicionsSummaryTimeTaken) = + measureTimedValue { + getInfractionSuspicionsSummary(reportings.filter { it.isArchived }) + } logger.info("TIME_RECORD - 'infractionSuspicionsSummary' took $infractionSuspicionsSummaryTimeTaken") val numberOfInfractionSuspicions = infractionSuspicionsSummary.sumOf { it.numberOfOccurrences } val numberOfObservation = @@ -124,7 +128,7 @@ class GetVesselReportings( } return@map ReportingTitleAndNumberOfOccurrences( - title = infraction?.infraction?.let {"$it (NATINF $natinfCode)"} ?: "NATINF $natinfCode", + title = infraction?.infraction?.let { "$it (NATINF $natinfCode)" } ?: "NATINF $natinfCode", numberOfOccurrences = reportings.size, ) } diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt index 87ec143174..eda44fa9ac 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt @@ -2,12 +2,14 @@ package fr.gouv.cnsp.monitorfish.infrastructure.database.repositories import com.fasterxml.jackson.databind.ObjectMapper import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertType import fr.gouv.cnsp.monitorfish.domain.entities.reporting.InfractionSuspicion import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Observation import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Reporting import fr.gouv.cnsp.monitorfish.domain.entities.reporting.ReportingType import fr.gouv.cnsp.monitorfish.domain.entities.reporting.filters.ReportingFilter import fr.gouv.cnsp.monitorfish.domain.entities.vessel.VesselIdentifier +import fr.gouv.cnsp.monitorfish.domain.mappers.ReportingMapper import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository import fr.gouv.cnsp.monitorfish.infrastructure.database.entities.ReportingEntity import fr.gouv.cnsp.monitorfish.infrastructure.database.repositories.interfaces.DBReportingRepository @@ -166,6 +168,23 @@ class JpaReportingRepository( dbReportingRepository.archiveReporting(id) } + override fun findUnarchivedReportings(): List> { + return dbReportingRepository.findAllUnarchivedAfterDEPLogbookMessage().map { result -> + Pair( + result[0] as Int, + ReportingMapper.getReportingValueFromJSON( + mapper, + result[1] as String?, + ReportingType.ALERT, + ) as AlertType, + ) + } + } + + override fun archiveReportings(ids: List): Int { + return dbReportingRepository.archiveReportings(ids) + } + @Transactional override fun delete(id: Int) { dbReportingRepository.deleteReporting(id) diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt index 788d2850fe..b750435157 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt @@ -75,6 +75,67 @@ interface DBReportingRepository : CrudRepository { ) fun archiveReporting(id: Int) + /** + * Search for unarchived reportings (created for max 1 hour ago) after vessels' have started a new trip. + * (a DEP logbook message is received after the reporting validation_date) + */ + @Query( + value = """ + WITH recent_dep_messages AS ( + SELECT lr.cfr, lr.ircs, lr.external_identification, lr.operation_number, MAX(lr.operation_datetime_utc) as last_dep_date_time + FROM logbook_reports lr + WHERE + lr.operation_datetime_utc > NOW() - INTERVAL '1 hour' AND + lr.log_type = 'DEP' + GROUP BY lr.cfr, lr.ircs, lr.external_identification, lr.operation_number + ), + + acknowledged_report_ids AS ( + SELECT DISTINCT referenced_report_id + FROM logbook_reports lr + WHERE + lr.operation_datetime_utc > NOW() - INTERVAL '1 hour' AND + lr.operation_type = 'RET' AND + lr.value->>'returnStatus' = '000' + ) + + SELECT + r.id as id, + r.value as value + FROM + reportings r + INNER JOIN + (select * from recent_dep_messages) rdp + ON CASE + WHEN r.vessel_identifier = 'INTERNAL_REFERENCE_NUMBER' THEN r.internal_reference_number = rdp.cfr + WHEN r.vessel_identifier = 'IRCS' THEN r.ircs = rdp.ircs + WHEN r.vessel_identifier = 'EXTERNAL_REFERENCE_NUMBER' THEN r.external_reference_number = rdp.external_identification + END + + WHERE + r.archived is false AND + r.deleted is false AND + rdp.last_dep_date_time >= r.validation_date AND + rdp.operation_number IN (SELECT referenced_report_id FROM acknowledged_report_ids) + """, + nativeQuery = true, + ) + fun findAllUnarchivedAfterDEPLogbookMessage(): List> + + @Modifying(clearAutomatically = true, flushAutomatically = true) + @Query( + value = """ + UPDATE + reportings + SET + archived = TRUE + WHERE + id IN (:ids) + """, + nativeQuery = true, + ) + fun archiveReportings(ids: List): Int + @Modifying(clearAutomatically = true) @Query( value = """ diff --git a/backend/src/main/resources/application.properties b/backend/src/main/resources/application.properties index 44cad74f60..cb99273b70 100644 --- a/backend/src/main/resources/application.properties +++ b/backend/src/main/resources/application.properties @@ -31,6 +31,9 @@ monitorfish.oidc.enabled=${monitorfish.oidc.enabled} monitorfish.oidc.issuer-uri=${monitorfish.oidc.issuer-uri} monitorfish.oidc.userinfo-endpoint=${monitorfish.oidc.userinfo-endpoint} +# Scheduling +monitorfish.scheduling.enabled=${monitorfish.scheduling.enabled} + # Whether response compression is enabled. server.compression.enabled=true diff --git a/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql b/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql index 60a115abe8..8167719ec6 100644 --- a/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql +++ b/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql @@ -2,7 +2,7 @@ TRUNCATE TABLE reportings RESTART IDENTITY CASCADE; INSERT INTO reportings (type, vessel_name, internal_reference_number, external_reference_number, ircs, vessel_identifier, flag_state, creation_date, validation_date, archived, deleted, value, latitude, longitude, vessel_id) VALUES ('ALERT', 'MARIAGE ÎLE HASARD', 'ABC000180832', 'VP374069', 'CG1312', 'INTERNAL_REFERENCE_NUMBER', 'FR', - NOW() - ('1 DAY')::interval, NOW(), false, false, ('{' || + NOW() - ('1 DAY')::interval, NOW() - ('30 MINUTES')::interval, false, false, ('{' || '"seaFront": "NAMO",' || '"riskFactor": 3.5647,' || '"type": "THREE_MILES_TRAWLING_ALERT",' || diff --git a/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql b/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql index efb93c52c5..7645dfa529 100644 --- a/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql +++ b/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql @@ -87,8 +87,12 @@ INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_O INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_120', 'Message FLUX xml'); +INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_121', 'Message FLUX xml'); + INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_120_RET', 'Message FLUX xml'); +INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_121_RET', 'Message FLUX xml'); + INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, vessel_name, trip_gears, trip_segments, value) VALUES (101, 'FAKE_OPERATION_101', NULL, 'FAK000999999', true, 'FRA', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'PNO', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'FAKE_OPERATION_101', 'DAT', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'JT/VISIOCaptures V1.4.7', 'ERS', 'PHENOMENE', '[{"gear":"TBN","mesh":100,"dimensions":"250;180"},{"gear":"OTT","mesh":120.5,"dimensions":"250;280"}]', '[{"segment":"SWW04","segmentName":"Chaluts pélagiques"},{"segment":"SWW06","segmentName":"Sennes"}]', '{"riskFactor":2.1,"catchOnboard":[{"weight":25,"nbFish":null,"species":"COD","faoZone":"27.8.a","effortZone":"C","economicZone":"FRA","statisticalRectangle":"23E6"}],"isBeingSent":false,"isInVerificationScope":false,"isSent":false,"isVerified":false,"pnoTypes":[{"pnoTypeName":"Préavis type A","minimumNotificationPeriod":4,"hasDesignatedPorts":false},{"pnoTypeName":"Préavis type B","minimumNotificationPeriod":8,"hasDesignatedPorts":true}],"port":"FRSML","predictedArrivalDatetimeUtc":null,"predictedLandingDatetimeUtc":null,"purpose":"LAN","tripStartDate":null}'); UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedArrivalDatetimeUtc}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' + INTERVAL '3 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 101; UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedLandingDatetimeUtc}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' + INTERVAL '3.5 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 101; @@ -233,3 +237,7 @@ UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedLandingDatetimeUt UPDATE logbook_reports SET value = JSONB_SET(value, '{tripStartDate}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' - INTERVAL '10 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 120; INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_120', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', 'FAKE_OPERATION_120_RET', 'RET', 'ERS', '{"returnStatus":"000"}'); + +INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, trip_gears, trip_segments, vessel_name, value) VALUES (121, 'FAKE_OPERATION_121', NULL, 'ABC000180832', true, 'FRA', NOW() - INTERVAL '15 minutes', 'DEP', NOW() - INTERVAL '15 minutes', 'FAKE_OPERATION_121', 'DAT', NOW() - INTERVAL '15 minutes', 'TurboCatch (3.7-1)', 'ERS', NULL, NULL, 'MARIAGE ÎLE HASARD', '{"gearOnboard":[{"gear":"GTR","mesh":100}],"departurePort":"AEJAZ","anticipatedActivity":"FSH","tripStartDate":"NOW() - INTERVAL ''15 minutes''","departureDatetimeUtc":"NOW() - INTERVAL ''15 minutes''"}'); + +INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_121', NOW() - INTERVAL '14 minutes', NOW() - INTERVAL '14 minutes', 'FAKE_OPERATION_121_RET', 'RET', 'ERS', '{"returnStatus":"000"}'); diff --git a/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc b/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc index a069e7a8c8..8f7d2797fe 100644 --- a/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc +++ b/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc @@ -45,7 +45,9 @@ { "operation_number": "FAKE_OPERATION_119", "xml_message": "Message FLUX xml" }, { "operation_number": "FAKE_OPERATION_119_RET", "xml_message": "Message FLUX xml" }, { "operation_number": "FAKE_OPERATION_120", "xml_message": "Message FLUX xml" }, - { "operation_number": "FAKE_OPERATION_120_RET", "xml_message": "Message FLUX xml" } + { "operation_number": "FAKE_OPERATION_121", "xml_message": "Message FLUX xml" }, + { "operation_number": "FAKE_OPERATION_120_RET", "xml_message": "Message FLUX xml" }, + { "operation_number": "FAKE_OPERATION_121_RET", "xml_message": "Message FLUX xml" } ] }, { @@ -1503,6 +1505,48 @@ "value:jsonb": { "returnStatus": "000" } + }, + + // - Vessel: MARIAGE ÎLE HASARD + // - Flag state: FR + { + "id": 121, + "report_id": "FAKE_OPERATION_121", + "referenced_report_id": null, + "cfr": "ABC000180832", + "enriched": true, + "flag_state": "FRA", + "integration_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'", + "log_type": "DEP", + "operation_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'", + "operation_number": "FAKE_OPERATION_121", + "operation_type": "DAT", + "report_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'", + "software": "TurboCatch (3.7-1)", + "transmission_format": "ERS", + "trip_gears": null, + "trip_segments": null, + "vessel_name": "MARIAGE ÎLE HASARD", + "value:jsonb": { + "gearOnboard": [{"gear": "GTR", "mesh": 100.0}], + "departurePort": "AEJAZ", + "anticipatedActivity": "FSH", + "tripStartDate": "NOW() - INTERVAL '15 minutes'", + "departureDatetimeUtc": "NOW() - INTERVAL '15 minutes'" + } + }, + { + "id": 1120, + "report_id": null, + "referenced_report_id": "FAKE_OPERATION_121", + "integration_datetime_utc:sql": "NOW() - INTERVAL '14 minutes'", + "operation_datetime_utc:sql": "NOW() - INTERVAL '14 minutes'", + "operation_number": "FAKE_OPERATION_121_RET", + "operation_type": "RET", + "transmission_format": "ERS", + "value:jsonb": { + "returnStatus": "000" + } } ] } diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportingsUTests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportingsUTests.kt new file mode 100644 index 0000000000..e871428a04 --- /dev/null +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportingsUTests.kt @@ -0,0 +1,37 @@ +package fr.gouv.cnsp.monitorfish.domain.use_cases.reporting + +import com.nhaarman.mockitokotlin2.eq +import com.nhaarman.mockitokotlin2.verify +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.MissingFARAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.ThreeMilesTrawlingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.TwelveMilesFishingAlert +import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith +import org.mockito.BDDMockito.given +import org.springframework.boot.test.mock.mockito.MockBean +import org.springframework.test.context.junit.jupiter.SpringExtension + +@ExtendWith(SpringExtension::class) +class ArchiveOutdatedReportingsUTests { + @MockBean + private lateinit var reportingRepository: ReportingRepository + + @Test + fun `execute Should archive outdated reportings`() { + // Given + given(reportingRepository.findUnarchivedReportings()).willReturn( + listOf( + Pair(1, TwelveMilesFishingAlert("NAMO")), + Pair(2, ThreeMilesTrawlingAlert("NAMO")), + Pair(3, MissingFARAlert("NAMO")), + ), + ) + + // When + ArchiveOutdatedReportings(reportingRepository).execute() + + // Then + verify(reportingRepository).archiveReportings(eq(listOf(2, 3))) + } +} diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt index 04d48ffc27..82caaccc3b 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt @@ -413,9 +413,13 @@ class GetVesselReportingsUTests { val infractionSuspicionsSummary = result.summary.infractionSuspicionsSummary assertThat(result.summary.infractionSuspicionsSummary).hasSize(4) assertThat(infractionSuspicionsSummary[0].numberOfOccurrences).isEqualTo(2) - assertThat(infractionSuspicionsSummary[0].title).isEqualTo("12 milles - Pêche sans droits historiques (NATINF 2610)") + assertThat( + infractionSuspicionsSummary[0].title, + ).isEqualTo("12 milles - Pêche sans droits historiques (NATINF 2610)") assertThat(infractionSuspicionsSummary[1].numberOfOccurrences).isEqualTo(1) - assertThat(infractionSuspicionsSummary[1].title).isEqualTo("Non-emission de message \"FAR\" en 48h (NATINF 27689)") + assertThat( + infractionSuspicionsSummary[1].title, + ).isEqualTo("Non-emission de message \"FAR\" en 48h (NATINF 27689)") assertThat(infractionSuspicionsSummary[2].numberOfOccurrences).isEqualTo(1) assertThat( infractionSuspicionsSummary[2].title, diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt index 693f6399b9..2a337a522b 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt @@ -18,7 +18,9 @@ import java.time.temporal.ChronoUnit @Testcontainers @TestPropertySource("classpath:/application.properties") -@SpringBootTest +@SpringBootTest( + properties = ["monitorfish.scheduling.enabled=false"], +) abstract class AbstractDBTests { @MockBean private lateinit var jwtDecoder: JwtDecoder diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt index cba717f6f7..eda93c2083 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt @@ -23,7 +23,7 @@ import java.time.ZoneOffset.UTC import java.time.ZonedDateTime @Import(MapperConfiguration::class) -@SpringBootTest(properties = ["monitorfish.scheduling.enable=false"]) +@SpringBootTest class JpaLogbookReportRepositoryITests : AbstractDBTests() { @Autowired private lateinit var jpaLogbookReportRepository: JpaLogbookReportRepository diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt index fdfcb18852..2066fc9c8e 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt @@ -1,7 +1,9 @@ package fr.gouv.cnsp.monitorfish.infrastructure.database.repositories import com.neovisionaries.i18n.CountryCode +import fr.gouv.cnsp.monitorfish.config.MapperConfiguration import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertTypeMapping import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.ThreeMilesTrawlingAlert import fr.gouv.cnsp.monitorfish.domain.entities.reporting.* import fr.gouv.cnsp.monitorfish.domain.entities.reporting.filters.ReportingFilter @@ -9,9 +11,11 @@ import fr.gouv.cnsp.monitorfish.domain.entities.vessel.VesselIdentifier import org.assertj.core.api.Assertions.assertThat import org.junit.jupiter.api.Test import org.springframework.beans.factory.annotation.Autowired +import org.springframework.context.annotation.Import import org.springframework.transaction.annotation.Transactional import java.time.ZonedDateTime +@Import(MapperConfiguration::class) class JpaReportingRepositoryITests : AbstractDBTests() { @Autowired private lateinit var jpaReportingRepository: JpaReportingRepository @@ -417,4 +421,26 @@ class JpaReportingRepositoryITests : AbstractDBTests() { assertThat((reporting.value as Observation).title).isEqualTo(updatedReporting.title) assertThat((reporting.value as Observation).description).isEqualTo(updatedReporting.description) } + + @Test + @Transactional + fun `findUnarchivedReportings Should return archive candidates`() { + // When + val reportings = jpaReportingRepository.findUnarchivedReportings() + + // Then + assertThat(reportings).hasSize(1) + assertThat(reportings.first().first).isEqualTo(1) + assertThat(reportings.first().second.type).isEqualTo(AlertTypeMapping.THREE_MILES_TRAWLING_ALERT) + } + + @Test + @Transactional + fun `archiveReportings Should archive reportings`() { + // When + val archivedReportings = jpaReportingRepository.archiveReportings(listOf(1)) + + // Then + assertThat(archivedReportings).isEqualTo(1) + } } diff --git a/backend/src/test/resources/application.properties b/backend/src/test/resources/application.properties index 17498d0085..6b3869d65c 100644 --- a/backend/src/test/resources/application.properties +++ b/backend/src/test/resources/application.properties @@ -2,7 +2,7 @@ spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true spring.jpa.hibernate.ddl-auto=validate server.port=8080 spring.flyway.enabled=true -monitorfish.scheduling.enable=false +monitorfish.scheduling.enabled=false spring.datasource.url=jdbc:tc:postgresql:9.6.8://localhost/testdb spring.datasource.username=user spring.datasource.password=pass diff --git a/infra/configurations/application-local.properties b/infra/configurations/application-local.properties index 72d74c0001..5318de59c6 100644 --- a/infra/configurations/application-local.properties +++ b/infra/configurations/application-local.properties @@ -24,6 +24,8 @@ monitorfish.api.protected.super-user-paths=/bff/v1/beacon_malfunctions,/bff/v1/m monitorfish.api.protected.public-paths=/api/v1/authorization/management/*,/api/v1/beacon_malfunctions/*,/api/v1/mission_actions/* monitorfish.api.protected.api-key=DUMMY-API-KEY +monitorfish.scheduling.enabled=true + ################### # Database settings diff --git a/infra/docker/docker-compose.cypress.yml b/infra/docker/docker-compose.cypress.yml index b0c84d27fb..e1f6bda3df 100644 --- a/infra/docker/docker-compose.cypress.yml +++ b/infra/docker/docker-compose.cypress.yml @@ -42,6 +42,7 @@ services: - FRONTEND_OIDC_CLIENT_ID=monitorfish - MONITORFISH_OIDC_ENABLED=false - FRONTEND_OIDC_ENABLED=false + - MONITORFISH_SCHEDULING_ENABLED=false - FRONTEND_OIDC_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr - FRONTEND_OIDC_LOGOUT_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr - FRONTEND_MONITORFISH_VERSION= diff --git a/infra/docker/docker-compose.puppeteer.yml b/infra/docker/docker-compose.puppeteer.yml index 13414f13ea..885b2138c9 100644 --- a/infra/docker/docker-compose.puppeteer.yml +++ b/infra/docker/docker-compose.puppeteer.yml @@ -57,6 +57,7 @@ services: - FRONTEND_OIDC_AUTHORITY=https://authentification.recette.din.developpement-durable.gouv.fr/authSAML/oidc/monitorfish - FRONTEND_OIDC_CLIENT_ID=monitorfish - MONITORFISH_OIDC_ENABLED=false + - MONITORFISH_SCHEDULING_ENABLED=false - FRONTEND_OIDC_ENABLED=false - FRONTEND_OIDC_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr - FRONTEND_OIDC_LOGOUT_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr diff --git a/infra/remote/docker-compose.yml b/infra/remote/docker-compose.yml index 7a43bb1dbb..c01292c85b 100644 --- a/infra/remote/docker-compose.yml +++ b/infra/remote/docker-compose.yml @@ -41,6 +41,7 @@ services: - MONITORFISH_API_PROTECTED_API_KEY=$MONITORFISH_API_PROTECTED_API_KEY - MONITORFISH_API_PROTECTED_PATHS=$MONITORFISH_API_PROTECTED_PATHS - MONITORFISH_API_PROTECTED_PUBLIC_PATHS=$MONITORFISH_API_PROTECTED_PUBLIC_PATHS + - MONITORFISH_SCHEDULING_ENABLED=$MONITORFISH_SCHEDULING_ENABLED - FRONTEND_SENTRY_ENV=$MONITORFISH_SENTRY_ENV - SENTRY_DSN=$MONITORFISH_SENTRY_DSN - FRONTEND_SENTRY_DSN=$MONITORFISH_SENTRY_DSN