From e09d07ab5d17164b8db70ef7b229321b38f543ca Mon Sep 17 00:00:00 2001 From: Loup Theron Date: Tue, 17 Sep 2024 15:20:24 +0200 Subject: [PATCH 1/3] wip: Add use-case and query --- Makefile | 4 ++ .../monitorfish/config/SchedulingConfig.kt | 2 +- .../repositories/ReportingRepository.kt | 6 ++ .../reporting/ArchiveOutdatedReportings.kt | 32 +++++++++ .../repositories/JpaReportingRepository.kt | 14 ++++ .../interfaces/DBReportingRepository.kt | 65 +++++++++++++++++++ .../src/main/resources/application.properties | 3 + .../V666.19.0__Insert_dummy_reportings.sql | 2 +- ...6.5.1__Insert_more_pno_logbook_reports.sql | 8 +++ ...5.1__Insert_more_pno_logbook_reports.jsonc | 46 ++++++++++++- .../database/repositories/AbstractDBTests.kt | 4 +- .../JpaLogbookReportRepositoryITests.kt | 2 +- .../JpaReportingRepositoryITests.kt | 13 ++++ .../src/test/resources/application.properties | 2 +- .../application-local.properties | 2 + infra/remote/docker-compose.yml | 1 + 16 files changed, 200 insertions(+), 6 deletions(-) create mode 100644 backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt diff --git a/Makefile b/Makefile index 6a68e43b6d..fc60361ee1 100644 --- a/Makefile +++ b/Makefile @@ -52,6 +52,10 @@ clean: docker-env docker compose --env-file ./infra/docker/.env -f ./infra/docker/docker-compose.cypress.yml down -v docker compose -f ./infra/docker/docker-compose.puppeteer.yml down -v +.PHONY: generate-test-data ##LOCAL Generate test data (SQL files from .jsonc) +generate-test-data: + cd frontend && npm run generate:testdata + compile-back: cd backend && ./gradlew assemble diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt index 9f4f2e1101..6f6395142f 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/config/SchedulingConfig.kt @@ -5,7 +5,7 @@ import org.springframework.context.annotation.Configuration import org.springframework.scheduling.annotation.EnableScheduling @ConditionalOnProperty( - value = ["monitorfish.scheduling.enable"], + value = ["monitorfish.scheduling.enabled"], havingValue = "true", matchIfMissing = true, ) diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt index 74f13220dd..bc7e24afd3 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt @@ -1,9 +1,11 @@ package fr.gouv.cnsp.monitorfish.domain.repositories import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertType import fr.gouv.cnsp.monitorfish.domain.entities.reporting.InfractionSuspicion import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Observation import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Reporting +import fr.gouv.cnsp.monitorfish.domain.entities.reporting.ReportingValue import fr.gouv.cnsp.monitorfish.domain.entities.reporting.filters.ReportingFilter import fr.gouv.cnsp.monitorfish.domain.entities.vessel.VesselIdentifier import java.time.ZonedDateTime @@ -48,7 +50,11 @@ interface ReportingRepository { fromDate: ZonedDateTime, ): List + fun findUnarchivedReportingsAfterNewVesselTrip(): List> + fun archive(id: Int) + fun archiveReportings(ids: List): Int + fun delete(id: Int) } diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt new file mode 100644 index 0000000000..a4f2381dfb --- /dev/null +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt @@ -0,0 +1,32 @@ +package fr.gouv.cnsp.monitorfish.domain.use_cases.reporting + +import fr.gouv.cnsp.monitorfish.config.UseCase +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertTypeMapping +import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository +import org.slf4j.LoggerFactory +import org.springframework.scheduling.annotation.Scheduled +import org.springframework.transaction.annotation.Transactional + +// TODO TEST + +@UseCase +class ArchiveOutdatedReportings(private val reportingRepository: ReportingRepository) { + private val logger = LoggerFactory.getLogger(ArchiveOutdatedReportings::class.java) + + // At every 5 minutes, after 1 minute of initial delay + @Scheduled(fixedDelay = 300000, initialDelay = 6000) + @Transactional + fun execute() { + val reportingCandidatesToArchive = reportingRepository.findUnarchivedReportingsAfterNewVesselTrip() + + val filteredReportingIdsToArchive = reportingCandidatesToArchive.filter { + it.second.type == AlertTypeMapping.MISSING_FAR_ALERT || + it.second.type == AlertTypeMapping.THREE_MILES_TRAWLING_ALERT + }.map { it.first } + + logger.info("Found ${filteredReportingIdsToArchive.size} reportings to archive.") + //val numberOfArchivedReportings = reportingRepository.archiveReportings(filteredReportingIdsToArchive) + + //logger.info("Archived $numberOfArchivedReportings reportings") + } +} diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt index 87ec143174..b82b4777ef 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt @@ -2,12 +2,14 @@ package fr.gouv.cnsp.monitorfish.infrastructure.database.repositories import com.fasterxml.jackson.databind.ObjectMapper import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertType import fr.gouv.cnsp.monitorfish.domain.entities.reporting.InfractionSuspicion import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Observation import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Reporting import fr.gouv.cnsp.monitorfish.domain.entities.reporting.ReportingType import fr.gouv.cnsp.monitorfish.domain.entities.reporting.filters.ReportingFilter import fr.gouv.cnsp.monitorfish.domain.entities.vessel.VesselIdentifier +import fr.gouv.cnsp.monitorfish.domain.mappers.ReportingMapper import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository import fr.gouv.cnsp.monitorfish.infrastructure.database.entities.ReportingEntity import fr.gouv.cnsp.monitorfish.infrastructure.database.repositories.interfaces.DBReportingRepository @@ -166,6 +168,18 @@ class JpaReportingRepository( dbReportingRepository.archiveReporting(id) } + override fun findUnarchivedReportingsAfterNewVesselTrip(): List> { + return dbReportingRepository.findAllUnarchivedAfterDEPLogbookMessage().map { result -> + Pair( + result[0] as Int, + ReportingMapper.getReportingValueFromJSON(mapper, result[1] as String?, ReportingType.ALERT) as AlertType) + } + } + + override fun archiveReportings(ids: List): Int { + return dbReportingRepository.archiveReportings(ids) + } + @Transactional override fun delete(id: Int) { dbReportingRepository.deleteReporting(id) diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt index 788d2850fe..065561a372 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt @@ -75,6 +75,71 @@ interface DBReportingRepository : CrudRepository { ) fun archiveReporting(id: Int) + /** + * Search for unarchived reportings (created for max 1 hour ago) after vessels' have started a new trip. + * (a DEP logbook message is received after the reporting validation_date) + */ + @Query( + value = """ + WITH recent_dep_messages AS ( + SELECT lr.cfr, lr.ircs, lr.external_identification, lr.operation_number, MAX(lr.operation_datetime_utc) as last_dep_date_time + FROM logbook_reports lr + WHERE + lr.operation_datetime_utc > NOW() - INTERVAL '1 day' AND + lr.log_type = 'DEP' + GROUP BY lr.cfr, lr.ircs, lr.external_identification, lr.operation_number + ), + + acknowledged_report_ids AS ( + SELECT DISTINCT referenced_report_id + FROM logbook_reports lr + WHERE + lr.operation_datetime_utc > NOW() - INTERVAL '1 day' AND + lr.operation_type = 'RET' AND + lr.value->>'returnStatus' = '000' + ) + + SELECT + r.id as id, + r.value as value + FROM + reportings r + INNER JOIN + (select * from recent_dep_messages) rdp + ON CASE + WHEN r.vessel_identifier = 'INTERNAL_REFERENCE_NUMBER' THEN r.internal_reference_number = rdp.cfr + WHEN r.vessel_identifier = 'IRCS' THEN r.ircs = rdp.ircs + WHEN r.vessel_identifier = 'EXTERNAL_REFERENCE_NUMBER' THEN r.external_reference_number = rdp.external_identification + END + + WHERE + r.archived is false AND + r.deleted is false AND + rdp.last_dep_date_time >= r.validation_date + + """, + nativeQuery = true, + ) + fun findAllUnarchivedAfterDEPLogbookMessage(): List> + /* + + rdp.operation_number IN (SELECT referenced_report_id FROM acknowledged_report_ids) + */ + + @Modifying(clearAutomatically = true, flushAutomatically = true) + @Query( + value = """ + UPDATE + reportings + SET + archived = TRUE + WHERE + id IN (:ids) + """, + nativeQuery = true, + ) + fun archiveReportings(ids: List): Int + @Modifying(clearAutomatically = true) @Query( value = """ diff --git a/backend/src/main/resources/application.properties b/backend/src/main/resources/application.properties index 44cad74f60..cb99273b70 100644 --- a/backend/src/main/resources/application.properties +++ b/backend/src/main/resources/application.properties @@ -31,6 +31,9 @@ monitorfish.oidc.enabled=${monitorfish.oidc.enabled} monitorfish.oidc.issuer-uri=${monitorfish.oidc.issuer-uri} monitorfish.oidc.userinfo-endpoint=${monitorfish.oidc.userinfo-endpoint} +# Scheduling +monitorfish.scheduling.enabled=${monitorfish.scheduling.enabled} + # Whether response compression is enabled. server.compression.enabled=true diff --git a/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql b/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql index 60a115abe8..8167719ec6 100644 --- a/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql +++ b/backend/src/main/resources/db/testdata/V666.19.0__Insert_dummy_reportings.sql @@ -2,7 +2,7 @@ TRUNCATE TABLE reportings RESTART IDENTITY CASCADE; INSERT INTO reportings (type, vessel_name, internal_reference_number, external_reference_number, ircs, vessel_identifier, flag_state, creation_date, validation_date, archived, deleted, value, latitude, longitude, vessel_id) VALUES ('ALERT', 'MARIAGE ÎLE HASARD', 'ABC000180832', 'VP374069', 'CG1312', 'INTERNAL_REFERENCE_NUMBER', 'FR', - NOW() - ('1 DAY')::interval, NOW(), false, false, ('{' || + NOW() - ('1 DAY')::interval, NOW() - ('30 MINUTES')::interval, false, false, ('{' || '"seaFront": "NAMO",' || '"riskFactor": 3.5647,' || '"type": "THREE_MILES_TRAWLING_ALERT",' || diff --git a/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql b/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql index efb93c52c5..759d9e1d70 100644 --- a/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql +++ b/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql @@ -87,8 +87,12 @@ INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_O INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_120', 'Message FLUX xml'); +INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_121', 'Message FLUX xml'); + INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_120_RET', 'Message FLUX xml'); +INSERT INTO logbook_raw_messages (operation_number, xml_message) VALUES ('FAKE_OPERATION_121_RET', 'Message FLUX xml'); + INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, vessel_name, trip_gears, trip_segments, value) VALUES (101, 'FAKE_OPERATION_101', NULL, 'FAK000999999', true, 'FRA', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'PNO', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'FAKE_OPERATION_101', 'DAT', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'JT/VISIOCaptures V1.4.7', 'ERS', 'PHENOMENE', '[{"gear":"TBN","mesh":100,"dimensions":"250;180"},{"gear":"OTT","mesh":120.5,"dimensions":"250;280"}]', '[{"segment":"SWW04","segmentName":"Chaluts pélagiques"},{"segment":"SWW06","segmentName":"Sennes"}]', '{"riskFactor":2.1,"catchOnboard":[{"weight":25,"nbFish":null,"species":"COD","faoZone":"27.8.a","effortZone":"C","economicZone":"FRA","statisticalRectangle":"23E6"}],"isBeingSent":false,"isInVerificationScope":false,"isSent":false,"isVerified":false,"pnoTypes":[{"pnoTypeName":"Préavis type A","minimumNotificationPeriod":4,"hasDesignatedPorts":false},{"pnoTypeName":"Préavis type B","minimumNotificationPeriod":8,"hasDesignatedPorts":true}],"port":"FRSML","predictedArrivalDatetimeUtc":null,"predictedLandingDatetimeUtc":null,"purpose":"LAN","tripStartDate":null}'); UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedArrivalDatetimeUtc}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' + INTERVAL '3 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 101; UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedLandingDatetimeUtc}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' + INTERVAL '3.5 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 101; @@ -233,3 +237,7 @@ UPDATE logbook_reports SET value = JSONB_SET(value, '{predictedLandingDatetimeUt UPDATE logbook_reports SET value = JSONB_SET(value, '{tripStartDate}', TO_JSONB(TO_CHAR(NOW() AT TIME ZONE 'UTC' - INTERVAL '10 hours', 'YYYY-MM-DD"T"HH24:MI:SS"Z"')), true) WHERE id = 120; INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_120', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', 'FAKE_OPERATION_120_RET', 'RET', 'ERS', '{"returnStatus":"000"}'); + +INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, trip_gears, trip_segments, vessel_name, value) VALUES (121, 'FAKE_OPERATION_121', NULL, 'ABC000180832', true, 'FRA', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'DEP', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'FAKE_OPERATION_121', 'DAT', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'TurboCatch (3.7-1)', 'ERS', NULL, NULL, 'MARIAGE ÎLE HASARD', '{"gearOnboard":[{"gear":"GTR","mesh":100}],"departurePort":"AEJAZ","anticipatedActivity":"FSH","tripStartDate":"NOW() AT TIME ZONE ''UTC'' - INTERVAL ''15 minutes''","departureDatetimeUtc":"NOW() AT TIME ZONE ''UTC'' - INTERVAL ''15 minutes''"}'); + +INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_121', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', 'FAKE_OPERATION_121_RET', 'RET', 'ERS', '{"returnStatus":"000"}'); diff --git a/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc b/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc index a069e7a8c8..aefed329ac 100644 --- a/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc +++ b/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc @@ -45,7 +45,9 @@ { "operation_number": "FAKE_OPERATION_119", "xml_message": "Message FLUX xml" }, { "operation_number": "FAKE_OPERATION_119_RET", "xml_message": "Message FLUX xml" }, { "operation_number": "FAKE_OPERATION_120", "xml_message": "Message FLUX xml" }, - { "operation_number": "FAKE_OPERATION_120_RET", "xml_message": "Message FLUX xml" } + { "operation_number": "FAKE_OPERATION_121", "xml_message": "Message FLUX xml" }, + { "operation_number": "FAKE_OPERATION_120_RET", "xml_message": "Message FLUX xml" }, + { "operation_number": "FAKE_OPERATION_121_RET", "xml_message": "Message FLUX xml" } ] }, { @@ -1503,6 +1505,48 @@ "value:jsonb": { "returnStatus": "000" } + }, + + // - Vessel: MARIAGE ÎLE HASARD + // - Flag state: FR + { + "id": 121, + "report_id": "FAKE_OPERATION_121", + "referenced_report_id": null, + "cfr": "ABC000180832", + "enriched": true, + "flag_state": "FRA", + "integration_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", + "log_type": "DEP", + "operation_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", + "operation_number": "FAKE_OPERATION_121", + "operation_type": "DAT", + "report_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", + "software": "TurboCatch (3.7-1)", + "transmission_format": "ERS", + "trip_gears": null, + "trip_segments": null, + "vessel_name": "MARIAGE ÎLE HASARD", + "value:jsonb": { + "gearOnboard": [{"gear": "GTR", "mesh": 100.0}], + "departurePort": "AEJAZ", + "anticipatedActivity": "FSH", + "tripStartDate": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", + "departureDatetimeUtc": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'" + } + }, + { + "id": 1120, + "report_id": null, + "referenced_report_id": "FAKE_OPERATION_121", + "integration_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes'", + "operation_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes'", + "operation_number": "FAKE_OPERATION_121_RET", + "operation_type": "RET", + "transmission_format": "ERS", + "value:jsonb": { + "returnStatus": "000" + } } ] } diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt index 693f6399b9..2a337a522b 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/AbstractDBTests.kt @@ -18,7 +18,9 @@ import java.time.temporal.ChronoUnit @Testcontainers @TestPropertySource("classpath:/application.properties") -@SpringBootTest +@SpringBootTest( + properties = ["monitorfish.scheduling.enabled=false"], +) abstract class AbstractDBTests { @MockBean private lateinit var jwtDecoder: JwtDecoder diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt index cba717f6f7..eda93c2083 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaLogbookReportRepositoryITests.kt @@ -23,7 +23,7 @@ import java.time.ZoneOffset.UTC import java.time.ZonedDateTime @Import(MapperConfiguration::class) -@SpringBootTest(properties = ["monitorfish.scheduling.enable=false"]) +@SpringBootTest class JpaLogbookReportRepositoryITests : AbstractDBTests() { @Autowired private lateinit var jpaLogbookReportRepository: JpaLogbookReportRepository diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt index fdfcb18852..f54dd0aac0 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt @@ -1,6 +1,7 @@ package fr.gouv.cnsp.monitorfish.infrastructure.database.repositories import com.neovisionaries.i18n.CountryCode +import fr.gouv.cnsp.monitorfish.config.MapperConfiguration import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.ThreeMilesTrawlingAlert import fr.gouv.cnsp.monitorfish.domain.entities.reporting.* @@ -9,9 +10,11 @@ import fr.gouv.cnsp.monitorfish.domain.entities.vessel.VesselIdentifier import org.assertj.core.api.Assertions.assertThat import org.junit.jupiter.api.Test import org.springframework.beans.factory.annotation.Autowired +import org.springframework.context.annotation.Import import org.springframework.transaction.annotation.Transactional import java.time.ZonedDateTime +@Import(MapperConfiguration::class) class JpaReportingRepositoryITests : AbstractDBTests() { @Autowired private lateinit var jpaReportingRepository: JpaReportingRepository @@ -417,4 +420,14 @@ class JpaReportingRepositoryITests : AbstractDBTests() { assertThat((reporting.value as Observation).title).isEqualTo(updatedReporting.title) assertThat((reporting.value as Observation).description).isEqualTo(updatedReporting.description) } + + @Test + @Transactional + fun `findReportingUnarchivedAfterDEPLogbookMessage Should return archive candidates`() { + // When + val reportings = jpaReportingRepository.findUnarchivedReportingsAfterNewVesselTrip() + + // Then + assertThat(reportings).hasSize(1) + } } diff --git a/backend/src/test/resources/application.properties b/backend/src/test/resources/application.properties index 17498d0085..6b3869d65c 100644 --- a/backend/src/test/resources/application.properties +++ b/backend/src/test/resources/application.properties @@ -2,7 +2,7 @@ spring.jpa.properties.hibernate.jdbc.lob.non_contextual_creation=true spring.jpa.hibernate.ddl-auto=validate server.port=8080 spring.flyway.enabled=true -monitorfish.scheduling.enable=false +monitorfish.scheduling.enabled=false spring.datasource.url=jdbc:tc:postgresql:9.6.8://localhost/testdb spring.datasource.username=user spring.datasource.password=pass diff --git a/infra/configurations/application-local.properties b/infra/configurations/application-local.properties index 72d74c0001..5318de59c6 100644 --- a/infra/configurations/application-local.properties +++ b/infra/configurations/application-local.properties @@ -24,6 +24,8 @@ monitorfish.api.protected.super-user-paths=/bff/v1/beacon_malfunctions,/bff/v1/m monitorfish.api.protected.public-paths=/api/v1/authorization/management/*,/api/v1/beacon_malfunctions/*,/api/v1/mission_actions/* monitorfish.api.protected.api-key=DUMMY-API-KEY +monitorfish.scheduling.enabled=true + ################### # Database settings diff --git a/infra/remote/docker-compose.yml b/infra/remote/docker-compose.yml index 7a43bb1dbb..c01292c85b 100644 --- a/infra/remote/docker-compose.yml +++ b/infra/remote/docker-compose.yml @@ -41,6 +41,7 @@ services: - MONITORFISH_API_PROTECTED_API_KEY=$MONITORFISH_API_PROTECTED_API_KEY - MONITORFISH_API_PROTECTED_PATHS=$MONITORFISH_API_PROTECTED_PATHS - MONITORFISH_API_PROTECTED_PUBLIC_PATHS=$MONITORFISH_API_PROTECTED_PUBLIC_PATHS + - MONITORFISH_SCHEDULING_ENABLED=$MONITORFISH_SCHEDULING_ENABLED - FRONTEND_SENTRY_ENV=$MONITORFISH_SENTRY_ENV - SENTRY_DSN=$MONITORFISH_SENTRY_DSN - FRONTEND_SENTRY_DSN=$MONITORFISH_SENTRY_DSN From 5cdb5ff7ef9dc2fe2ea2e86540b7bb75716a267f Mon Sep 17 00:00:00 2001 From: Loup Theron Date: Tue, 17 Sep 2024 16:33:58 +0200 Subject: [PATCH 2/3] Finalize scheduled archive reportings --- .../repositories/ReportingRepository.kt | 3 +- .../reporting/ArchiveOutdatedReportings.kt | 17 ++++----- .../reporting/GetVesselReportings.kt | 26 +++++++------ .../repositories/JpaReportingRepository.kt | 9 ++++- .../interfaces/DBReportingRepository.kt | 12 ++---- ...6.5.1__Insert_more_pno_logbook_reports.sql | 4 +- ...5.1__Insert_more_pno_logbook_reports.jsonc | 14 +++---- .../ArchiveOutdatedReportingsUTests.kt | 37 +++++++++++++++++++ .../reporting/GetVesselReportingsUTests.kt | 8 +++- .../JpaReportingRepositoryITests.kt | 17 ++++++++- 10 files changed, 102 insertions(+), 45 deletions(-) create mode 100644 backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportingsUTests.kt diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt index bc7e24afd3..a617966aa5 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/repositories/ReportingRepository.kt @@ -5,7 +5,6 @@ import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertType import fr.gouv.cnsp.monitorfish.domain.entities.reporting.InfractionSuspicion import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Observation import fr.gouv.cnsp.monitorfish.domain.entities.reporting.Reporting -import fr.gouv.cnsp.monitorfish.domain.entities.reporting.ReportingValue import fr.gouv.cnsp.monitorfish.domain.entities.reporting.filters.ReportingFilter import fr.gouv.cnsp.monitorfish.domain.entities.vessel.VesselIdentifier import java.time.ZonedDateTime @@ -50,7 +49,7 @@ interface ReportingRepository { fromDate: ZonedDateTime, ): List - fun findUnarchivedReportingsAfterNewVesselTrip(): List> + fun findUnarchivedReportings(): List> fun archive(id: Int) diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt index a4f2381dfb..752a7b1be1 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportings.kt @@ -7,8 +7,6 @@ import org.slf4j.LoggerFactory import org.springframework.scheduling.annotation.Scheduled import org.springframework.transaction.annotation.Transactional -// TODO TEST - @UseCase class ArchiveOutdatedReportings(private val reportingRepository: ReportingRepository) { private val logger = LoggerFactory.getLogger(ArchiveOutdatedReportings::class.java) @@ -17,16 +15,17 @@ class ArchiveOutdatedReportings(private val reportingRepository: ReportingReposi @Scheduled(fixedDelay = 300000, initialDelay = 6000) @Transactional fun execute() { - val reportingCandidatesToArchive = reportingRepository.findUnarchivedReportingsAfterNewVesselTrip() + val reportingCandidatesToArchive = reportingRepository.findUnarchivedReportings() - val filteredReportingIdsToArchive = reportingCandidatesToArchive.filter { - it.second.type == AlertTypeMapping.MISSING_FAR_ALERT || - it.second.type == AlertTypeMapping.THREE_MILES_TRAWLING_ALERT - }.map { it.first } + val filteredReportingIdsToArchive = + reportingCandidatesToArchive.filter { + it.second.type == AlertTypeMapping.MISSING_FAR_ALERT || + it.second.type == AlertTypeMapping.THREE_MILES_TRAWLING_ALERT + }.map { it.first } logger.info("Found ${filteredReportingIdsToArchive.size} reportings to archive.") - //val numberOfArchivedReportings = reportingRepository.archiveReportings(filteredReportingIdsToArchive) + val numberOfArchivedReportings = reportingRepository.archiveReportings(filteredReportingIdsToArchive) - //logger.info("Archived $numberOfArchivedReportings reportings") + logger.info("Archived $numberOfArchivedReportings reportings") } } diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt index 0c08b4c03c..306c07c728 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportings.kt @@ -30,19 +30,20 @@ class GetVesselReportings( vesselIdentifier: VesselIdentifier?, fromDate: ZonedDateTime, ): VesselReportings { - val (controlUnits, controlUnitsTimeTaken) = measureTimedValue { getAllControlUnits.execute() } logger.info("TIME_RECORD - 'getAllControlUnits' took $controlUnitsTimeTaken") val (reportings, reportingsTimeTaken) = - measureTimedValue { findReportings( - vesselId, - vesselIdentifier, - internalReferenceNumber, - fromDate, - ircs, - externalReferenceNumber, - ) } + measureTimedValue { + findReportings( + vesselId, + vesselIdentifier, + internalReferenceNumber, + fromDate, + ircs, + externalReferenceNumber, + ) + } logger.info("TIME_RECORD - 'findReportings' took $reportingsTimeTaken") val (current, currentTimeTaken) = @@ -73,7 +74,10 @@ class GetVesselReportings( } logger.info("TIME_RECORD - 'archivedYearsToReportings' took $archivedYearsToReportingsTimeTaken") - val (infractionSuspicionsSummary, infractionSuspicionsSummaryTimeTaken) = measureTimedValue { getInfractionSuspicionsSummary(reportings.filter { it.isArchived }) } + val (infractionSuspicionsSummary, infractionSuspicionsSummaryTimeTaken) = + measureTimedValue { + getInfractionSuspicionsSummary(reportings.filter { it.isArchived }) + } logger.info("TIME_RECORD - 'infractionSuspicionsSummary' took $infractionSuspicionsSummaryTimeTaken") val numberOfInfractionSuspicions = infractionSuspicionsSummary.sumOf { it.numberOfOccurrences } val numberOfObservation = @@ -124,7 +128,7 @@ class GetVesselReportings( } return@map ReportingTitleAndNumberOfOccurrences( - title = infraction?.infraction?.let {"$it (NATINF $natinfCode)"} ?: "NATINF $natinfCode", + title = infraction?.infraction?.let { "$it (NATINF $natinfCode)" } ?: "NATINF $natinfCode", numberOfOccurrences = reportings.size, ) } diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt index b82b4777ef..eda44fa9ac 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepository.kt @@ -168,11 +168,16 @@ class JpaReportingRepository( dbReportingRepository.archiveReporting(id) } - override fun findUnarchivedReportingsAfterNewVesselTrip(): List> { + override fun findUnarchivedReportings(): List> { return dbReportingRepository.findAllUnarchivedAfterDEPLogbookMessage().map { result -> Pair( result[0] as Int, - ReportingMapper.getReportingValueFromJSON(mapper, result[1] as String?, ReportingType.ALERT) as AlertType) + ReportingMapper.getReportingValueFromJSON( + mapper, + result[1] as String?, + ReportingType.ALERT, + ) as AlertType, + ) } } diff --git a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt index 065561a372..b750435157 100644 --- a/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt +++ b/backend/src/main/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/interfaces/DBReportingRepository.kt @@ -85,7 +85,7 @@ interface DBReportingRepository : CrudRepository { SELECT lr.cfr, lr.ircs, lr.external_identification, lr.operation_number, MAX(lr.operation_datetime_utc) as last_dep_date_time FROM logbook_reports lr WHERE - lr.operation_datetime_utc > NOW() - INTERVAL '1 day' AND + lr.operation_datetime_utc > NOW() - INTERVAL '1 hour' AND lr.log_type = 'DEP' GROUP BY lr.cfr, lr.ircs, lr.external_identification, lr.operation_number ), @@ -94,7 +94,7 @@ interface DBReportingRepository : CrudRepository { SELECT DISTINCT referenced_report_id FROM logbook_reports lr WHERE - lr.operation_datetime_utc > NOW() - INTERVAL '1 day' AND + lr.operation_datetime_utc > NOW() - INTERVAL '1 hour' AND lr.operation_type = 'RET' AND lr.value->>'returnStatus' = '000' ) @@ -115,16 +115,12 @@ interface DBReportingRepository : CrudRepository { WHERE r.archived is false AND r.deleted is false AND - rdp.last_dep_date_time >= r.validation_date - + rdp.last_dep_date_time >= r.validation_date AND + rdp.operation_number IN (SELECT referenced_report_id FROM acknowledged_report_ids) """, nativeQuery = true, ) fun findAllUnarchivedAfterDEPLogbookMessage(): List> - /* - - rdp.operation_number IN (SELECT referenced_report_id FROM acknowledged_report_ids) - */ @Modifying(clearAutomatically = true, flushAutomatically = true) @Query( diff --git a/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql b/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql index 759d9e1d70..7645dfa529 100644 --- a/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql +++ b/backend/src/main/resources/db/testdata/V666.5.1__Insert_more_pno_logbook_reports.sql @@ -238,6 +238,6 @@ UPDATE logbook_reports SET value = JSONB_SET(value, '{tripStartDate}', TO_JSONB( INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_120', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', 'FAKE_OPERATION_120_RET', 'RET', 'ERS', '{"returnStatus":"000"}'); -INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, trip_gears, trip_segments, vessel_name, value) VALUES (121, 'FAKE_OPERATION_121', NULL, 'ABC000180832', true, 'FRA', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'DEP', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'FAKE_OPERATION_121', 'DAT', NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes', 'TurboCatch (3.7-1)', 'ERS', NULL, NULL, 'MARIAGE ÎLE HASARD', '{"gearOnboard":[{"gear":"GTR","mesh":100}],"departurePort":"AEJAZ","anticipatedActivity":"FSH","tripStartDate":"NOW() AT TIME ZONE ''UTC'' - INTERVAL ''15 minutes''","departureDatetimeUtc":"NOW() AT TIME ZONE ''UTC'' - INTERVAL ''15 minutes''"}'); +INSERT INTO logbook_reports (id, report_id, referenced_report_id, cfr, enriched, flag_state, integration_datetime_utc, log_type, operation_datetime_utc, operation_number, operation_type, report_datetime_utc, software, transmission_format, trip_gears, trip_segments, vessel_name, value) VALUES (121, 'FAKE_OPERATION_121', NULL, 'ABC000180832', true, 'FRA', NOW() - INTERVAL '15 minutes', 'DEP', NOW() - INTERVAL '15 minutes', 'FAKE_OPERATION_121', 'DAT', NOW() - INTERVAL '15 minutes', 'TurboCatch (3.7-1)', 'ERS', NULL, NULL, 'MARIAGE ÎLE HASARD', '{"gearOnboard":[{"gear":"GTR","mesh":100}],"departurePort":"AEJAZ","anticipatedActivity":"FSH","tripStartDate":"NOW() - INTERVAL ''15 minutes''","departureDatetimeUtc":"NOW() - INTERVAL ''15 minutes''"}'); -INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_121', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes', 'FAKE_OPERATION_121_RET', 'RET', 'ERS', '{"returnStatus":"000"}'); +INSERT INTO logbook_reports (id, report_id, referenced_report_id, integration_datetime_utc, operation_datetime_utc, operation_number, operation_type, transmission_format, value) VALUES (1120, NULL, 'FAKE_OPERATION_121', NOW() - INTERVAL '14 minutes', NOW() - INTERVAL '14 minutes', 'FAKE_OPERATION_121_RET', 'RET', 'ERS', '{"returnStatus":"000"}'); diff --git a/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc b/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc index aefed329ac..8f7d2797fe 100644 --- a/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc +++ b/backend/src/main/resources/db/testdata/json/V666.5.1__Insert_more_pno_logbook_reports.jsonc @@ -1516,12 +1516,12 @@ "cfr": "ABC000180832", "enriched": true, "flag_state": "FRA", - "integration_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", + "integration_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'", "log_type": "DEP", - "operation_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", + "operation_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'", "operation_number": "FAKE_OPERATION_121", "operation_type": "DAT", - "report_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", + "report_datetime_utc:sql": "NOW() - INTERVAL '15 minutes'", "software": "TurboCatch (3.7-1)", "transmission_format": "ERS", "trip_gears": null, @@ -1531,16 +1531,16 @@ "gearOnboard": [{"gear": "GTR", "mesh": 100.0}], "departurePort": "AEJAZ", "anticipatedActivity": "FSH", - "tripStartDate": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'", - "departureDatetimeUtc": "NOW() AT TIME ZONE 'UTC' - INTERVAL '15 minutes'" + "tripStartDate": "NOW() - INTERVAL '15 minutes'", + "departureDatetimeUtc": "NOW() - INTERVAL '15 minutes'" } }, { "id": 1120, "report_id": null, "referenced_report_id": "FAKE_OPERATION_121", - "integration_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes'", - "operation_datetime_utc:sql": "NOW() AT TIME ZONE 'UTC' - INTERVAL '14 minutes'", + "integration_datetime_utc:sql": "NOW() - INTERVAL '14 minutes'", + "operation_datetime_utc:sql": "NOW() - INTERVAL '14 minutes'", "operation_number": "FAKE_OPERATION_121_RET", "operation_type": "RET", "transmission_format": "ERS", diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportingsUTests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportingsUTests.kt new file mode 100644 index 0000000000..e871428a04 --- /dev/null +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/ArchiveOutdatedReportingsUTests.kt @@ -0,0 +1,37 @@ +package fr.gouv.cnsp.monitorfish.domain.use_cases.reporting + +import com.nhaarman.mockitokotlin2.eq +import com.nhaarman.mockitokotlin2.verify +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.MissingFARAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.ThreeMilesTrawlingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.TwelveMilesFishingAlert +import fr.gouv.cnsp.monitorfish.domain.repositories.ReportingRepository +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith +import org.mockito.BDDMockito.given +import org.springframework.boot.test.mock.mockito.MockBean +import org.springframework.test.context.junit.jupiter.SpringExtension + +@ExtendWith(SpringExtension::class) +class ArchiveOutdatedReportingsUTests { + @MockBean + private lateinit var reportingRepository: ReportingRepository + + @Test + fun `execute Should archive outdated reportings`() { + // Given + given(reportingRepository.findUnarchivedReportings()).willReturn( + listOf( + Pair(1, TwelveMilesFishingAlert("NAMO")), + Pair(2, ThreeMilesTrawlingAlert("NAMO")), + Pair(3, MissingFARAlert("NAMO")), + ), + ) + + // When + ArchiveOutdatedReportings(reportingRepository).execute() + + // Then + verify(reportingRepository).archiveReportings(eq(listOf(2, 3))) + } +} diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt index 04d48ffc27..82caaccc3b 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/domain/use_cases/reporting/GetVesselReportingsUTests.kt @@ -413,9 +413,13 @@ class GetVesselReportingsUTests { val infractionSuspicionsSummary = result.summary.infractionSuspicionsSummary assertThat(result.summary.infractionSuspicionsSummary).hasSize(4) assertThat(infractionSuspicionsSummary[0].numberOfOccurrences).isEqualTo(2) - assertThat(infractionSuspicionsSummary[0].title).isEqualTo("12 milles - Pêche sans droits historiques (NATINF 2610)") + assertThat( + infractionSuspicionsSummary[0].title, + ).isEqualTo("12 milles - Pêche sans droits historiques (NATINF 2610)") assertThat(infractionSuspicionsSummary[1].numberOfOccurrences).isEqualTo(1) - assertThat(infractionSuspicionsSummary[1].title).isEqualTo("Non-emission de message \"FAR\" en 48h (NATINF 27689)") + assertThat( + infractionSuspicionsSummary[1].title, + ).isEqualTo("Non-emission de message \"FAR\" en 48h (NATINF 27689)") assertThat(infractionSuspicionsSummary[2].numberOfOccurrences).isEqualTo(1) assertThat( infractionSuspicionsSummary[2].title, diff --git a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt index f54dd0aac0..2066fc9c8e 100644 --- a/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt +++ b/backend/src/test/kotlin/fr/gouv/cnsp/monitorfish/infrastructure/database/repositories/JpaReportingRepositoryITests.kt @@ -3,6 +3,7 @@ package fr.gouv.cnsp.monitorfish.infrastructure.database.repositories import com.neovisionaries.i18n.CountryCode import fr.gouv.cnsp.monitorfish.config.MapperConfiguration import fr.gouv.cnsp.monitorfish.domain.entities.alerts.PendingAlert +import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.AlertTypeMapping import fr.gouv.cnsp.monitorfish.domain.entities.alerts.type.ThreeMilesTrawlingAlert import fr.gouv.cnsp.monitorfish.domain.entities.reporting.* import fr.gouv.cnsp.monitorfish.domain.entities.reporting.filters.ReportingFilter @@ -423,11 +424,23 @@ class JpaReportingRepositoryITests : AbstractDBTests() { @Test @Transactional - fun `findReportingUnarchivedAfterDEPLogbookMessage Should return archive candidates`() { + fun `findUnarchivedReportings Should return archive candidates`() { // When - val reportings = jpaReportingRepository.findUnarchivedReportingsAfterNewVesselTrip() + val reportings = jpaReportingRepository.findUnarchivedReportings() // Then assertThat(reportings).hasSize(1) + assertThat(reportings.first().first).isEqualTo(1) + assertThat(reportings.first().second.type).isEqualTo(AlertTypeMapping.THREE_MILES_TRAWLING_ALERT) + } + + @Test + @Transactional + fun `archiveReportings Should archive reportings`() { + // When + val archivedReportings = jpaReportingRepository.archiveReportings(listOf(1)) + + // Then + assertThat(archivedReportings).isEqualTo(1) } } From 328de65a62107215984de758bd8c3dd6a05881d6 Mon Sep 17 00:00:00 2001 From: Loup Theron Date: Tue, 17 Sep 2024 16:56:03 +0200 Subject: [PATCH 3/3] Remove scheduled use-cases from cypress tests --- infra/docker/docker-compose.cypress.yml | 1 + infra/docker/docker-compose.puppeteer.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/infra/docker/docker-compose.cypress.yml b/infra/docker/docker-compose.cypress.yml index b0c84d27fb..e1f6bda3df 100644 --- a/infra/docker/docker-compose.cypress.yml +++ b/infra/docker/docker-compose.cypress.yml @@ -42,6 +42,7 @@ services: - FRONTEND_OIDC_CLIENT_ID=monitorfish - MONITORFISH_OIDC_ENABLED=false - FRONTEND_OIDC_ENABLED=false + - MONITORFISH_SCHEDULING_ENABLED=false - FRONTEND_OIDC_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr - FRONTEND_OIDC_LOGOUT_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr - FRONTEND_MONITORFISH_VERSION= diff --git a/infra/docker/docker-compose.puppeteer.yml b/infra/docker/docker-compose.puppeteer.yml index 13414f13ea..885b2138c9 100644 --- a/infra/docker/docker-compose.puppeteer.yml +++ b/infra/docker/docker-compose.puppeteer.yml @@ -57,6 +57,7 @@ services: - FRONTEND_OIDC_AUTHORITY=https://authentification.recette.din.developpement-durable.gouv.fr/authSAML/oidc/monitorfish - FRONTEND_OIDC_CLIENT_ID=monitorfish - MONITORFISH_OIDC_ENABLED=false + - MONITORFISH_SCHEDULING_ENABLED=false - FRONTEND_OIDC_ENABLED=false - FRONTEND_OIDC_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr - FRONTEND_OIDC_LOGOUT_REDIRECT_URI=https://monitorfish.din.developpement-durable.gouv.fr