From eeaac302a6d1a605c0df5d9b7e4cde320e0ff7d0 Mon Sep 17 00:00:00 2001 From: Dmytro Svyrydenko Date: Sat, 14 Sep 2024 18:20:50 +0200 Subject: [PATCH 1/2] feat: Improve local setup MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit – use Docker for local setup – update README – update CI scripts --- .dockerignore | 3 + .github/actions/docker-build/action.yml | 8 +- README.md | 89 ++++++++++++------- docker/dev/Dockerfile | 17 ++++ docker/dev/docker-compose.yml | 31 +++++++ docker/dev/docker-destroy.sh | 6 ++ docker/dev/docker-entrypoint.sh | 17 ++++ Dockerfile => docker/prod/Dockerfile | 9 +- package.json | 19 ++-- post-install.sh | 13 ++- .../1664386509637-exchange-rates.js | 6 +- 11 files changed, 166 insertions(+), 52 deletions(-) create mode 100644 docker/dev/Dockerfile create mode 100644 docker/dev/docker-compose.yml create mode 100755 docker/dev/docker-destroy.sh create mode 100755 docker/dev/docker-entrypoint.sh rename Dockerfile => docker/prod/Dockerfile (54%) diff --git a/.dockerignore b/.dockerignore index cf709889..1590e2c3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,4 @@ **/node_modules +node_modules +npm-debug.log +.env* diff --git a/.github/actions/docker-build/action.yml b/.github/actions/docker-build/action.yml index 8391e29e..45f356b7 100644 --- a/.github/actions/docker-build/action.yml +++ b/.github/actions/docker-build/action.yml @@ -19,19 +19,19 @@ runs: using: composite steps: - name: Login to Docker Hub - uses: docker/login-action@v1 + uses: docker/login-action@v3 with: username: ${{ inputs.docker-hub-username }} password: ${{ inputs.docker-hub-access-token }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@v3 - name: Docker build id: docker-build - uses: docker/build-push-action@v2 + uses: docker/build-push-action@v6 with: context: ./ - file: ./Dockerfile + file: ./docker/prod/Dockerfile push: ${{ inputs.push == 'true' }} tags: ${{ inputs.docker-hub-username }}/budget-tracker-be:latest diff --git a/README.md b/README.md index b3a715c7..0104d8c8 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,15 @@ # budget-tracker-be -Budget tracker back-end +## First run instructions -### First run instructions +You need to make sure you have Docker installed. Current instruction is written +for the Docker v4.34. -Required stack (WIP): Postgres v11, Node version specified in `.nvmrc` +### 1. Install dependencies -1. Install dependencies +Use `npm ci` to install all the dependencies with correct versions. -```sh -npm i -``` - -2. Create corresponding env variables. +### 2. Create corresponding env variables. Project uses different `.env` files for each environment: `.env.development`, `.env.production`, `.env.test`. @@ -25,38 +22,36 @@ variables. since when tests are running, they're automatically filling and cleaning the DB, so all your data from the DB used for development might be lost. -3. Run migrations +### 3. Start dev server -```sh -npm run migrate:dev -``` +Run `npm run docker:dev` or `npm run docker:dev -- -d` to run it "in background". -If you have an error running this command, you probably need to install Postgres. Read [the guide below](#setup-postgres). +It will be working in HMR mode, so any changes to the codebase will be reflected. -If you encountered any errors during `npm run migrate:dev`, you can run -`npm run migrate-undo:dev` to undo migrations. If you still facing issues, you -can clear the DB using these two commands: +### 4. That's it ! πŸŽ‰πŸŽ‰πŸŽ‰ -```sh -drop schema public cascade; -create schema public; -``` +Now it should be accessible under the port that you defined in the `.env.development` file. -They will completely clean the DB and you will be able to run migrations again. +### Troubleshoting: -4. Start dev server +1. Sometimes when running `npm run docker:dev` it might stuck running migrations + due to DB connection issues. It's a very rare case, but if this happens, + _**simply run the command again**_. -```sh -npm run dev -``` +### Useful command for local development: -### If you didn't work on it for long time +1. `npm run docker:dev:down` to stop containers. All the data will still be stored in the DB. +2. `npm run docker:dev:destroy` stops containers, and _**Completely destroys all the images, container and volumes**_. It means all the data will be erased from the DB. Useful when you want to test new migrations, or DB structure was damaged. +3. Use `docker:dev:run-in-container -- ` to run any command inside running docker container. For example `docker:dev:run-in-container -- npm run migrate:dev` to run migrations and `docker:dev:run-in-container -- npm run migrate:dev:undo` to undo them. -1. Make sure Postres v11 is running -2. Run `nvm use` -3. That's it! +
-### Setup Postgres +### If you don't want to use Docker + +For whatever reason if you don't want not to use Docker, you still need to complete +first 2 steps described above, and then follow these instructions: + +### 3. Setup Postgres If you can access your user and you know how to create a DB, **you can ignore that section**. @@ -92,8 +87,36 @@ CREATE DATABASE "budget-tracker"; 7. That's it. -To install Redis (if you don't have one): +### 4. Install Redis (if you don't have one): 1. Install Redis via `brew install redis` 2. Then `brew services start redis` -3. You're done :) + +### 5. Run migrations + +```sh +npm run migrate:dev +``` + +If you have an error running this command, you probably need to install Postgres. Read [the guide below](#setup-postgres). + +If you encountered any errors during `npm run migrate:dev`, you can run +`npm run migrate-undo:dev` to undo migrations. If you still facing issues, you +can clear the DB using these two commands: + +```sh +drop schema public cascade; +create schema public; +``` + +They will completely clean the DB and you will be able to run migrations again. + +### 6. Start dev server + +```sh +npm run dev +``` + +### 7. That's it! πŸŽ‰πŸŽ‰πŸŽ‰ + +But better use Docker πŸ™ˆ diff --git a/docker/dev/Dockerfile b/docker/dev/Dockerfile new file mode 100644 index 00000000..96159919 --- /dev/null +++ b/docker/dev/Dockerfile @@ -0,0 +1,17 @@ +FROM node:21.7.3 +WORKDIR /app + +# Copy the rest of the application +COPY . . + +# Install dependencies +RUN chmod +x ./post-install.sh +RUN npm ci + +ENV NODE_ENV=development + +# Prepare and execute entrypoint script +RUN chmod +x /app/docker/dev/docker-entrypoint.sh +ENTRYPOINT ["/app/docker/dev/docker-entrypoint.sh"] + +CMD ["/bin/sh", "-c", "npm run dev"] diff --git a/docker/dev/docker-compose.yml b/docker/dev/docker-compose.yml new file mode 100644 index 00000000..3e106a19 --- /dev/null +++ b/docker/dev/docker-compose.yml @@ -0,0 +1,31 @@ +services: + app: + build: + context: ../.. + dockerfile: docker/dev/Dockerfile + container_name: dev-budget-tracker-app + volumes: ['../../:/app', '/app/node_modules'] + ports: ['${APPLICATION_PORT}:${APPLICATION_PORT}'] + depends_on: ['db', 'redis'] + env_file: ../../.env.development + + db: + image: postgres:16 + restart: always + container_name: dev-budget-tracker-db + volumes: ['db_data:/var/lib/postgresql/data'] + environment: + - POSTGRES_USER=${APPLICATION_DB_USERNAME} + - POSTGRES_PASSWORD=${APPLICATION_DB_PASSWORD} + - POSTGRES_DB=${APPLICATION_DB_DATABASE} + ports: ['${APPLICATION_DB_PORT}:5432'] + + redis: + image: redis:6 + container_name: dev-budget-tracker-redis + volumes: ['redis_data:/data'] + ports: ['6379:6379'] + +volumes: + db_data: + redis_data: diff --git a/docker/dev/docker-destroy.sh b/docker/dev/docker-destroy.sh new file mode 100755 index 00000000..d98b5837 --- /dev/null +++ b/docker/dev/docker-destroy.sh @@ -0,0 +1,6 @@ +#!/bin/sh + +echo "Starting removing all dev container completely..." + +npm run docker:dev -- -d +npm run docker:dev:down -- --rmi all --volumes --remove-orphans diff --git a/docker/dev/docker-entrypoint.sh b/docker/dev/docker-entrypoint.sh new file mode 100755 index 00000000..0f1294d3 --- /dev/null +++ b/docker/dev/docker-entrypoint.sh @@ -0,0 +1,17 @@ +#!/bin/sh +set -e + +echo "Starting entrypoint script" + +# Run migrations +echo "Running migrations..." +if npm run migrate:dev; then + echo "Migrations completed successfully" +else + echo "Migration failed. Exiting..." + exit 1 +fi + +# If we get here, migrations were successful +echo "Starting the application..." +exec "$@" diff --git a/Dockerfile b/docker/prod/Dockerfile similarity index 54% rename from Dockerfile rename to docker/prod/Dockerfile index 2e39f3a3..635288fc 100644 --- a/Dockerfile +++ b/docker/prod/Dockerfile @@ -1,6 +1,13 @@ FROM node:21.7.3 WORKDIR /app + +# Copy the rest of the application COPY . . -RUN npm ci + ENV NODE_ENV=production + +# Install dependencies +RUN chmod +x ./post-install.sh +RUN npm ci + CMD ["/bin/sh", "-c", "npm run prod"] diff --git a/package.json b/package.json index 144bf75d..7fe36d27 100644 --- a/package.json +++ b/package.json @@ -7,22 +7,25 @@ "dev": "cross-env NODE_ENV=development nodemon", "prod": "cross-env NODE_ENV=production nodemon", "migrate:dev": "cross-env NODE_ENV=development npx sequelize-cli db:migrate", + "migrate:dev:undo": "cross-env NODE_ENV=development npx sequelize-cli db:migrate:undo", "migrate": "cross-env NODE_ENV=production npx sequelize-cli db:migrate", - "migrate-undo:dev": "cross-env NODE_ENV=development npx sequelize-cli db:migrate:undo", - "migrate-undo": "cross-env NODE_ENV=production npx sequelize-cli db:migrate:undo", - "seed": "npx sequelize-cli db:seed:all", - "seed-undo": "npx sequelize-cli db:seed:undo:all", + "migrate:undo": "cross-env NODE_ENV=production npx sequelize-cli db:migrate:undo", "db:reset": "cross-env NODE_ENV=test npx sequelize-cli db:drop && npx sequelize-cli db:create && npx sequelize-cli db:migrate", "pretest": "cross-env NODE_ENV=test npm run db:reset", "test": "cross-env NODE_ENV=test npm run test:unit && npm run test:e2e", "test:unit": "cross-env NODE_ENV=test jest -c jest.config.unit.ts --passWithNoTests --forceExit --detectOpenHandles", "test:e2e": "cross-env NODE_ENV=test jest -c jest.config.e2e.ts --runInBand --passWithNoTests --forceExit --detectOpenHandles", "lint": "eslint .", - "docker-build": "docker build . -t letehaha/budget-tracker-be", - "docker-push": "docker push letehaha/budget-tracker-be", - "docker-compose": "npm run docker-build && npm run docker-push", + "docker:dev": "docker compose --env-file .env.development -f ./docker/dev/docker-compose.yml up --build", + "docker:dev:ps": "docker compose --env-file .env.development -f ./docker/dev/docker-compose.yml ps", + "docker:dev:down": "docker compose --env-file .env.development -f ./docker/dev/docker-compose.yml down", + "docker:dev:destroy": "./docker/dev/docker-destroy.sh", + "docker:dev:run-in-container": "docker compose --env-file .env.development -f ./docker/dev/docker-compose.yml exec app", + "docker:prod:build": "docker build . -t letehaha/budget-tracker-be -f ./docker/prod/Dockerfile", + "docker:prod:push": "docker push letehaha/budget-tracker-be", + "docker:prod:build-push": "npm run docker-build && npm run docker-push", "postinstall": "chmod +x ./post-install.sh && ./post-install.sh", - "prepare": "husky install" + "prepare": "node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky install" }, "repository": { "type": "git", diff --git a/post-install.sh b/post-install.sh index 569096eb..807e83da 100755 --- a/post-install.sh +++ b/post-install.sh @@ -1 +1,12 @@ -git config blame.ignoreRevsFile .git-blame-ignore-revs +#!/bin/sh +set -e + +# Check if we're in a git repository +if git rev-parse --is-inside-work-tree > /dev/null 2>&1; then + echo "Running git commands..." + git config blame.ignoreRevsFile .git-blame-ignore-revs +else + echo "Not in a git repository, skipping git commands." +fi + +chmod +x ./docker/dev/docker-destroy.sh diff --git a/src/migrations/1664386509637-exchange-rates.js b/src/migrations/1664386509637-exchange-rates.js index 3e7b07d5..c65515a3 100644 --- a/src/migrations/1664386509637-exchange-rates.js +++ b/src/migrations/1664386509637-exchange-rates.js @@ -9,11 +9,7 @@ module.exports = { const transaction = await queryInterface.sequelize.transaction(); try { - let data = {}; - - if (isTest) { - data.data = JSON.parse(fs.readFileSync('./src/tests/test-exchange-rates.json')); - } + let data = { data: JSON.parse(fs.readFileSync('./src/tests/test-exchange-rates.json')) }; const currencies = await queryInterface.sequelize.query('SELECT * FROM "Currencies"', { type: QueryTypes.SELECT, From f3ec58a1fc9a18e001d2ba4d77967d297ed4d1ee Mon Sep 17 00:00:00 2001 From: Dmytro Svyrydenko Date: Sun, 15 Sep 2024 20:46:05 +0200 Subject: [PATCH 2/2] feat: Setup e2e tests to be running in parallel by using Docker MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Additionally: – add README for e2e tests – update env variables template – cleanup npm scripts --- .env.template | 7 + .github/workflows/check-source-code.yml | 33 ++--- .github/workflows/image-to-docker-hub.yml | 5 +- docker/dev/docker-destroy.sh | 2 +- docker/test/Dockerfile | 19 +++ docker/test/docker-compose.yml | 33 +++++ jest.config.e2e.ts | 1 + package-lock.json | 134 ++++++++++++------- package.json | 6 +- src/app.ts | 22 +-- src/common/helpers.ts | 25 ++++ src/common/lib/redis/index.ts | 1 + src/common/lib/redis/key-formatter.ts | 8 ++ src/controllers/banks/monobank.controller.ts | 32 ++--- src/models/index.ts | 4 + src/redis.ts | 30 +++++ src/services/accounts.service.ts | 14 +- src/tests/README.md | 37 +++++ src/tests/setup-e2e-tests.sh | 52 +++++++ src/tests/setupIntegrationTests.ts | 17 ++- 20 files changed, 361 insertions(+), 121 deletions(-) create mode 100644 docker/test/Dockerfile create mode 100644 docker/test/docker-compose.yml create mode 100644 src/common/lib/redis/index.ts create mode 100644 src/common/lib/redis/key-formatter.ts create mode 100644 src/redis.ts create mode 100644 src/tests/README.md create mode 100755 src/tests/setup-e2e-tests.sh diff --git a/.env.template b/.env.template index 994a1206..0d36d9b9 100644 --- a/.env.template +++ b/.env.template @@ -4,10 +4,17 @@ API_LAYER_API_KEY=iVH7l3yBziOKwGSO7jYWYt1RDtb05oKf APPLICATION_HOST=127.0.0.1 APPLICATION_PORT=8081 APPLICATION_JWT_SECRET=development +# for .env.test use docker/test/docker-compose db service name APPLICATION_DB_HOST=127.0.0.1 APPLICATION_DB_USERNAME= APPLICATION_DB_PASSWORD= APPLICATION_DB_DATABASE=budget-tracker APPLICATION_DB_PORT=5432 APPLICATION_DB_DIALECT=postgres +# for .env.test use docker/test/docker-compose redis service name APPLICATION_REDIS_HOST=127.0.0.1 + +# Tests configurations +# e2e tests are running in parallel, so we need a strict amount of workers, +# so then we can dynamically create the same amount of DBs +JEST_WORKERS_AMOUNT=4 diff --git a/.github/workflows/check-source-code.yml b/.github/workflows/check-source-code.yml index 577bf2b8..e425532f 100644 --- a/.github/workflows/check-source-code.yml +++ b/.github/workflows/check-source-code.yml @@ -11,7 +11,7 @@ jobs: name: Prepare local deps runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - id: prepare-env uses: ./.github/actions/prepare-local-env - name: Install dependencies @@ -23,7 +23,7 @@ jobs: needs: prepare-dependencies runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/prepare-local-env - name: Lint source code run: npm run lint @@ -33,30 +33,11 @@ jobs: needs: prepare-dependencies runs-on: ubuntu-latest environment: test - services: - postgres: - image: postgres:11.12-stretch - env: - POSTGRES_DB: budget-tracker_test - POSTGRES_USER: ${{ secrets.CI_POSTGRES_USER }} - POSTGRES_PASSWORD: ${{ secrets.CI_POSTGRES_PASSWORD }} - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5432:5432 - redis: - image: redis:latest - ports: - - 6379:6379 - steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/prepare-local-env - name: Make envfile - uses: SpicyPizza/create-envfile@v1 + uses: SpicyPizza/create-envfile@v2 with: envkey_APPLICATION_HOST: ${{ secrets.APPLICATION_HOST }} envkey_APPLICATION_PORT: ${{ secrets.APPLICATION_PORT }} @@ -68,6 +49,7 @@ jobs: envkey_APPLICATION_DB_PORT: ${{ secrets.APPLICATION_DB_PORT }} envkey_APPLICATION_DB_DIALECT: ${{ secrets.APPLICATION_DB_DIALECT }} envkey_APPLICATION_REDIS_HOST: ${{ secrets.APPLICATION_REDIS_HOST }} + envkey_JEST_WORKERS_AMOUNT: ${{ secrets.JEST_WORKERS_AMOUNT }} directory: ./ file_name: .env.test - name: Unit and e2e testing @@ -78,9 +60,9 @@ jobs: runs-on: ubuntu-latest environment: production steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Make envfile - uses: SpicyPizza/create-envfile@v1 + uses: SpicyPizza/create-envfile@v2 with: envkey_APPLICATION_HOST: ${{ secrets.APPLICATION_HOST }} envkey_APPLICATION_PORT: ${{ secrets.APPLICATION_PORT }} @@ -92,6 +74,7 @@ jobs: envkey_APPLICATION_DB_PORT: ${{ secrets.APPLICATION_DB_PORT }} envkey_APPLICATION_DB_DIALECT: ${{ secrets.APPLICATION_DB_DIALECT }} envkey_APPLICATION_REDIS_HOST: ${{ secrets.APPLICATION_REDIS_HOST }} + envkey_JEST_WORKERS_AMOUNT: ${{ secrets.JEST_WORKERS_AMOUNT }} directory: ./ file_name: .env.production - uses: ./.github/actions/docker-build diff --git a/.github/workflows/image-to-docker-hub.yml b/.github/workflows/image-to-docker-hub.yml index 3a3ee0db..a5913c82 100644 --- a/.github/workflows/image-to-docker-hub.yml +++ b/.github/workflows/image-to-docker-hub.yml @@ -15,10 +15,10 @@ jobs: steps: - name: Check Out Repo - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Make envfile - uses: SpicyPizza/create-envfile@v1 + uses: SpicyPizza/create-envfile@v2 with: envkey_APPLICATION_HOST: ${{ secrets.APPLICATION_HOST }} envkey_APPLICATION_PORT: ${{ secrets.APPLICATION_PORT }} @@ -30,6 +30,7 @@ jobs: envkey_APPLICATION_DB_PORT: ${{ secrets.APPLICATION_DB_PORT }} envkey_APPLICATION_DB_DIALECT: ${{ secrets.APPLICATION_DB_DIALECT }} envkey_APPLICATION_REDIS_HOST: ${{ secrets.APPLICATION_REDIS_HOST }} + envkey_JEST_WORKERS_AMOUNT: ${{ secrets.JEST_WORKERS_AMOUNT }} directory: ./ file_name: .env.production diff --git a/docker/dev/docker-destroy.sh b/docker/dev/docker-destroy.sh index d98b5837..bd3bcbad 100755 --- a/docker/dev/docker-destroy.sh +++ b/docker/dev/docker-destroy.sh @@ -3,4 +3,4 @@ echo "Starting removing all dev container completely..." npm run docker:dev -- -d -npm run docker:dev:down -- --rmi all --volumes --remove-orphans +npm run docker:dev:down -- --rmi all --volumes diff --git a/docker/test/Dockerfile b/docker/test/Dockerfile new file mode 100644 index 00000000..ee279760 --- /dev/null +++ b/docker/test/Dockerfile @@ -0,0 +1,19 @@ +FROM node:21.7.3 + +WORKDIR /app + +# Copy package.json and package-lock.json files. This allows Docker to cache the +# npm dependencies as long as these files don't change. +COPY package*.json ./ + +# Install dependencies +COPY post-install.sh ./ +COPY docker ./docker +RUN chmod +x ./post-install.sh +RUN npm ci + +# Copy the rest of the application +COPY . . + +# Run this command to keep container alive. Without it will be demounted right after deps installation +CMD ["tail", "-f", "/dev/null"] diff --git a/docker/test/docker-compose.yml b/docker/test/docker-compose.yml new file mode 100644 index 00000000..066c5c69 --- /dev/null +++ b/docker/test/docker-compose.yml @@ -0,0 +1,33 @@ +services: + test-db: + image: postgres:16 + restart: always + container_name: test-budget-tracker-db + volumes: ['test_db_data:/var/lib/postgresql/data'] + environment: + - POSTGRES_USER=${APPLICATION_DB_USERNAME} + - POSTGRES_PASSWORD=${APPLICATION_DB_PASSWORD} + - POSTGRES_DB=${APPLICATION_DB_DATABASE} + ports: ['${APPLICATION_DB_PORT}:5432'] + env_file: ../../.env.test + + test-redis: + image: redis:6 + container_name: test-budget-tracker-redis + volumes: ['test_redis_data:/data'] + ports: ['6379:6379'] + + test-runner: + build: + context: ../.. + dockerfile: docker/test/Dockerfile + depends_on: + - test-db + - test-redis + environment: + - NODE_ENV=test + env_file: ../../.env.test + +volumes: + test_db_data: + test_redis_data: diff --git a/jest.config.e2e.ts b/jest.config.e2e.ts index e5ba97c7..38548eb1 100644 --- a/jest.config.e2e.ts +++ b/jest.config.e2e.ts @@ -5,6 +5,7 @@ console.log('❗ RUNNING INTEGRATION TESTS'); /** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ export default { ...baseConfig, + maxWorkers: Number(process.env.JEST_WORKERS_AMOUNT), testMatch: ['/src/**/?(*.)+(e2e).[jt]s?(x)'], setupFilesAfterEnv: ['/src/tests/setupIntegrationTests.ts'], }; diff --git a/package-lock.json b/package-lock.json index 864ac23c..e36d1351 100644 --- a/package-lock.json +++ b/package-lock.json @@ -31,7 +31,7 @@ "passport-jwt": "4.0.1", "pg": "8.11.5", "pg-hstore": "2.3.4", - "redis": "3.1.1", + "redis": "4.7.0", "sequelize": "6.37.3", "sequelize-cli": "6.6.2", "sequelize-typescript": "2.1.6", @@ -1540,6 +1540,64 @@ "node": ">=14" } }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.6.0.tgz", + "integrity": "sha512-aR0uffYI700OEEH4gYnitAnv3vzVGXCFvYfdpu/CJKvk4pHfLPEy/JSZyrpQ+15WhXe1yJRXLtfQ84s4mEXnPg==", + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/client/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + }, + "node_modules/@redis/graph": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.1.tgz", + "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.7.tgz", + "integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.2.0.tgz", + "integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-1.1.0.tgz", + "integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -2865,6 +2923,14 @@ "semver": "bin/semver" } }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -3232,14 +3298,6 @@ "node": ">=0.4.0" } }, - "node_modules/denque": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", - "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", - "engines": { - "node": ">=0.10" - } - }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -4574,6 +4632,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "engines": { + "node": ">= 4" + } + }, "node_modules/gensync": { "version": "1.0.0-beta.2", "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", @@ -7495,45 +7561,19 @@ } }, "node_modules/redis": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/redis/-/redis-3.1.1.tgz", - "integrity": "sha512-QhkKhOuzhogR1NDJfBD34TQJz2ZJwDhhIC6ZmvpftlmfYShHHQXjjNspAJ+Z2HH5NwSBVYBVganbiZ8bgFMHjg==", - "dependencies": { - "denque": "^1.5.0", - "redis-commands": "^1.7.0", - "redis-errors": "^1.2.0", - "redis-parser": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/node-redis" - } - }, - "node_modules/redis-commands": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.7.0.tgz", - "integrity": "sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ==" - }, - "node_modules/redis-errors": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/redis-errors/-/redis-errors-1.2.0.tgz", - "integrity": "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==", - "engines": { - "node": ">=4" - } - }, - "node_modules/redis-parser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-3.0.0.tgz", - "integrity": "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==", + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.7.0.tgz", + "integrity": "sha512-zvmkHEAdGMn+hMRXuMBtu4Vo5P6rHQjLoHftu+lBqq8ZTA3RCVC/WzD790bkKKiNFp7d5/9PcSD19fJyyRvOdQ==", + "workspaces": [ + "./packages/*" + ], "dependencies": { - "redis-errors": "^1.0.0" - }, - "engines": { - "node": ">=4" + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.0", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" } }, "node_modules/reflect-metadata": { diff --git a/package.json b/package.json index 7fe36d27..84477e41 100644 --- a/package.json +++ b/package.json @@ -10,11 +10,9 @@ "migrate:dev:undo": "cross-env NODE_ENV=development npx sequelize-cli db:migrate:undo", "migrate": "cross-env NODE_ENV=production npx sequelize-cli db:migrate", "migrate:undo": "cross-env NODE_ENV=production npx sequelize-cli db:migrate:undo", - "db:reset": "cross-env NODE_ENV=test npx sequelize-cli db:drop && npx sequelize-cli db:create && npx sequelize-cli db:migrate", - "pretest": "cross-env NODE_ENV=test npm run db:reset", "test": "cross-env NODE_ENV=test npm run test:unit && npm run test:e2e", "test:unit": "cross-env NODE_ENV=test jest -c jest.config.unit.ts --passWithNoTests --forceExit --detectOpenHandles", - "test:e2e": "cross-env NODE_ENV=test jest -c jest.config.e2e.ts --runInBand --passWithNoTests --forceExit --detectOpenHandles", + "test:e2e": "chmod +x ./src/tests/setup-e2e-tests.sh && ./src/tests/setup-e2e-tests.sh", "lint": "eslint .", "docker:dev": "docker compose --env-file .env.development -f ./docker/dev/docker-compose.yml up --build", "docker:dev:ps": "docker compose --env-file .env.development -f ./docker/dev/docker-compose.yml ps", @@ -75,7 +73,7 @@ "passport-jwt": "4.0.1", "pg": "8.11.5", "pg-hstore": "2.3.4", - "redis": "3.1.1", + "redis": "4.7.0", "sequelize": "6.37.3", "sequelize-cli": "6.6.2", "sequelize-typescript": "2.1.6", diff --git a/src/app.ts b/src/app.ts index af47697c..45abadcb 100644 --- a/src/app.ts +++ b/src/app.ts @@ -3,13 +3,11 @@ import 'module-alias/register'; dotenv.config({ path: `.env.${process.env.NODE_ENV}` }); import config from 'config'; -import express, { Request } from 'express'; +import express from 'express'; import cors from 'cors'; import morgan from 'morgan'; -import { createClient } from 'redis'; import locale from 'locale'; import passport from 'passport'; -import { promisify } from 'util'; import { logger } from '@js/utils/logger'; /** @@ -30,24 +28,10 @@ import { supportedLocales } from './translations'; import middlewarePassword from './middlewares/passport'; +import './redis'; + export const app = express(); const apiPrefix = config.get('apiPrefix'); -export const redisClient = createClient({ - host: config.get('redis.host'), -}); - -redisClient.on('error', (error: Error) => { - logger.error({ message: 'Redis Client Error', error }); -}); - -['get', 'set', 'del', 'expire'].forEach((item) => { - redisClient[item] = promisify(redisClient[item]); -}); - -app.use((req, res, next) => { - (req as Request & typeof redisClient).redisClient = redisClient; - next(); -}); app.use(passport.initialize()); middlewarePassword(passport); diff --git a/src/common/helpers.ts b/src/common/helpers.ts index 3152d550..e051f342 100644 --- a/src/common/helpers.ts +++ b/src/common/helpers.ts @@ -3,3 +3,28 @@ export const getQueryBooleanValue = (value: string): boolean => { if (value === 'false') return false; return Boolean(value); }; + +// To wait until `fn` returns true +export const until = async ( + fn: () => Promise | T, + timeout: number = 30_000, + interval: number = 500, +): Promise => { + const startTime = Date.now(); + + const poll = async (resolve: () => void, reject: (reason: Error) => void): Promise => { + try { + if (await fn()) { + resolve(); + } else if (Date.now() - startTime > timeout) { + reject(new Error('Timeout exceeded')); + } else { + setTimeout(() => poll(resolve, reject), interval); + } + } catch (error) { + reject(error as Error); + } + }; + + return new Promise(poll); +}; diff --git a/src/common/lib/redis/index.ts b/src/common/lib/redis/index.ts new file mode 100644 index 00000000..3e16e843 --- /dev/null +++ b/src/common/lib/redis/index.ts @@ -0,0 +1 @@ +export * from './key-formatter'; diff --git a/src/common/lib/redis/key-formatter.ts b/src/common/lib/redis/key-formatter.ts new file mode 100644 index 00000000..cb9b9041 --- /dev/null +++ b/src/common/lib/redis/key-formatter.ts @@ -0,0 +1,8 @@ +// This helper became required with parallel Jest tests. Without this change different tests +// will override Redis in their processes that will lead to broken tests +export const redisKeyFormatter = (key) => { + if (process.env.JEST_WORKER_ID) { + return `${process.env.JEST_WORKER_ID}-${key}`; + } + return key; +}; diff --git a/src/controllers/banks/monobank.controller.ts b/src/controllers/banks/monobank.controller.ts index 37e1e031..8cced39a 100644 --- a/src/controllers/banks/monobank.controller.ts +++ b/src/controllers/banks/monobank.controller.ts @@ -15,6 +15,7 @@ import { TRANSACTION_TRANSFER_NATURE, } from 'shared-types'; import { CustomResponse } from '@common/types'; +import { redisClient } from '@root/redis'; import * as accountsService from '@services/accounts.service'; import * as transactionsService from '@services/transactions'; @@ -29,6 +30,7 @@ import * as Users from '@models/Users.model'; import { logger } from '@js/utils/logger'; import { errorHandler } from '@controllers/helpers'; import { ERROR_CODES, ValidationError } from '@js/errors'; +import { redisKeyFormatter } from '@common/lib/redis'; const usersQuery = new Map(); @@ -281,10 +283,10 @@ export const updateWebhook = async (req, res: CustomResponse) => { const { clientId }: endpointsTypes.UpdateWebhookBody = req.body; const { id } = req.user; - const token = `monobank-${id}-update-webhook`; - const tempToken = await req.redisClient.get(token); + const token = redisKeyFormatter(`monobank-${id}-update-webhook`); + const tempToken = await redisClient.get(token); - if (!tempToken) { + if (tempToken !== 'true') { await monobankUsersService.updateUser({ systemUserId: id, clientId, @@ -294,8 +296,8 @@ export const updateWebhook = async (req, res: CustomResponse) => { // TODO: why here we don't pass userToken? await updateWebhookAxios(); - await req.redisClient.set(token, true); - await req.redisClient.expire(token, 60); + await redisClient.set(token, 'true'); + await redisClient.expire(token, 60); return res.status(200).json({ status: API_RESPONSE_STATUS.success }); } @@ -326,10 +328,10 @@ export const loadTransactions = async (req, res: CustomResponse) => { if (!to || !Number(to)) throw new ValidationError({ message: '"to" field is invalid' }); if (!accountId) throw new ValidationError({ message: '"accountId" field is required' }); - const redisToken = `monobank-${systemUserId}-load-transactions`; - const tempRedisToken = await req.redisClient.get(redisToken); + const redisToken = redisKeyFormatter(`monobank-${systemUserId}-load-transactions`); + const tempRedisToken = await redisClient.get(redisToken); - if (tempRedisToken) { + if (tempRedisToken === 'true') { return res.status(ERROR_CODES.TooManyRequests).json({ status: API_RESPONSE_STATUS.error, response: { @@ -421,8 +423,8 @@ export const loadTransactions = async (req, res: CustomResponse) => { } } catch (err) { if (err?.response?.status === ERROR_CODES.TooManyRequests) { - await req.redisClient.set(redisToken, true); - await req.redisClient.expire(redisToken, 60); + await redisClient.set(redisToken, 'true'); + await redisClient.expire(redisToken, 60); } else { logger.error(err); } @@ -476,10 +478,10 @@ export const refreshAccounts = async (req, res) => { }); } - const token = `monobank-${systemUserId}-client-info`; - const tempToken = await req.redisClient.get(token); + const token = redisKeyFormatter(`monobank-${systemUserId}-client-info`); + const tempToken = await redisClient.get(token); - if (!tempToken) { + if (tempToken !== 'true') { let clientInfo: ExternalMonobankClientInfoResponse; try { clientInfo = ( @@ -517,8 +519,8 @@ export const refreshAccounts = async (req, res) => { }); } - await req.redisClient.set(token, true); - await req.redisClient.expire(token, 60); + await redisClient.set(token, 'true'); + await redisClient.expire(token, 60); const existingAccounts = await accountsService.getAccountsByExternalIds({ userId: monoUser.systemUserId, diff --git a/src/models/index.ts b/src/models/index.ts index 638f51aa..d0315132 100644 --- a/src/models/index.ts +++ b/src/models/index.ts @@ -16,6 +16,10 @@ const DBConfig: Record = config.get('db'); const sequelize = new Sequelize({ ...DBConfig, + database: + process.env.NODE_ENV === 'test' + ? `${DBConfig.database}-${process.env.JEST_WORKER_ID}` + : (DBConfig.database as string), models: [__dirname + '/**/*.model.ts'], pool: { max: 50, diff --git a/src/redis.ts b/src/redis.ts new file mode 100644 index 00000000..7b7c2947 --- /dev/null +++ b/src/redis.ts @@ -0,0 +1,30 @@ +import dotenv from 'dotenv'; +import 'module-alias/register'; +import { createClient } from '@redis/client'; + +dotenv.config({ path: `.env.${process.env.NODE_ENV}` }); +import config from 'config'; + +import { logger } from '@js/utils/logger'; + +export const redisClient = createClient({ + socket: { + host: config.get('redis.host'), + connectTimeout: 5000, + }, +}); + +console.time('connect-to-redis'); +redisClient + .connect() + .then(() => { + console.log('App connected to Redis! Took: '); + console.timeEnd('connect-to-redis'); + }) + .catch((err) => { + console.error('Cannot connect to Redis!', err); + }); + +redisClient.on('error', (error: Error) => { + logger.error({ message: 'Redis Client Error', error }); +}); diff --git a/src/services/accounts.service.ts b/src/services/accounts.service.ts index dd19626d..e33116e2 100644 --- a/src/services/accounts.service.ts +++ b/src/services/accounts.service.ts @@ -12,11 +12,12 @@ import * as Accounts from '@models/Accounts.model'; import * as monobankUsersService from '@services/banks/monobank/users'; import * as Currencies from '@models/Currencies.model'; import * as userService from '@services/user.service'; -import { redisClient } from '@root/app'; +import { redisClient } from '@root/redis'; import { NotFoundError } from '@js/errors'; import Balances from '@models/Balances.model'; import { calculateRefAmount } from '@services/calculate-ref-amount.service'; import { withTransaction } from './common'; +import { redisKeyFormatter } from '@common/lib/redis'; export const getAccounts = withTransaction( async (payload: Accounts.GetAccountsPayload): Promise => @@ -96,8 +97,10 @@ export const pairMonobankAccount = withTransaction( return { connected: true }; } + const redisToken = redisKeyFormatter(token); + // Otherwise begin user connection - const response: string = await redisClient.get(token); + const response: string = await redisClient.get(redisToken); let clientInfo: ExternalMonobankClientInfoResponse; if (!response) { @@ -122,8 +125,11 @@ export const pairMonobankAccount = withTransaction( clientInfo = result.data; - await redisClient.set(token, JSON.stringify(response)); - await redisClient.expire(token, 60); + await redisClient + .multi() + .set(redisToken, JSON.stringify(response)) + .expire(redisToken, 60) + .exec(); } else { clientInfo = JSON.parse(response); } diff --git a/src/tests/README.md b/src/tests/README.md new file mode 100644 index 00000000..3929c8d2 --- /dev/null +++ b/src/tests/README.md @@ -0,0 +1,37 @@ +## End-to-End (E2E) Tests Setup + +The e2e tests setup is designed to efficiently run tests in parallel using multiple databases. Below is a detailed description of the setup process and necessary configurations. + +### Overview + +The current implementation of E2E tests uses multiple databases to facilitate parallel test execution. Since each test expects that it will work with the fresh empty DB, we need to empty it before each test suite. Without multiple DBs, it means we can run tests only sequentially. + +### Jest Configuration + +We use Jest as our testing framework and have defined `JEST_WORKERS_AMOUNT` workers to run the tests in parallel. Each worker requires a separate database instance. + +### Database Setup + +For each Jest worker, a corresponding database is created with the naming convention `{APPLICATION_DB_DATABASE}-{n}`, where `n` is the worker ID. This worker ID ranges exactly as `{1...JEST_WORKERS_AMOUNT}`. + +### Database Connection + +The database connection is specified in the src/models/index.ts file. Here, we dynamically assign the database name based on the Jest worker ID. + +```ts +database: process.env.NODE_ENV === 'test' + ? `${DBConfig.database}-${process.env.JEST_WORKER_ID}` + : (DBConfig.database as string), +``` + +### Redis Connection + +To run tests correctly we also need to set keys per-worker and empty worker-related keys before each test. Prefix for worker is being managed by `redisKeyFormatter`, and emptying logic is stored in the `src/tests/setupIntegrationTests.ts`. Info is just FYI, no additional actions required. + +### Docker Integration + +To simplify the setup and avoid conflicts with the local environment, we use Docker to manage our databases and the application. + +The application is also containerized. We run our tests from within this Docker container to ensure it can communicate with the database containers. + +All Docker configs for tests are stored under `./docker/test/` directory. diff --git a/src/tests/setup-e2e-tests.sh b/src/tests/setup-e2e-tests.sh new file mode 100755 index 00000000..a00e6b2f --- /dev/null +++ b/src/tests/setup-e2e-tests.sh @@ -0,0 +1,52 @@ +#!/bin/bash + +# Source environment variables from .env.test file +if [ -f .env.test ]; then + export $(cat .env.test | grep -v '#' | awk '/=/ {print $1}') +else + echo ".env.test file not found" + exit 1 +fi + +# Start the containers and run tests +docker compose -f ./docker/test/docker-compose.yml up --build -d + +echo "Waiting a bit..." +sleep 3 + +# Additional checks for better debugging +echo "Waiting for Postgres to response for health check..." +docker compose -f ./docker/test/docker-compose.yml exec -T test-db pg_isready -U "${APPLICATION_DB_USERNAME}" -d "${APPLICATION_DB_DATABASE}" +echo "Waiting for Redis to response for health check..." +docker compose -f ./docker/test/docker-compose.yml exec -T test-redis redis-cli ping + +echo "Creating databases..." + +# Drop and create back databases. Amount of them based on JEST_WORKERS_AMOUNT env variable +docker compose -f ./docker/test/docker-compose.yml exec -T test-db bash -c " +for i in \$(seq 1 \$JEST_WORKERS_AMOUNT); do + psql -U \"${APPLICATION_DB_USERNAME}\" -d postgres -c \"DROP DATABASE IF EXISTS \\\"${APPLICATION_DB_DATABASE}-\$i\\\";\" + psql -U \"${APPLICATION_DB_USERNAME}\" -d postgres -c \"CREATE DATABASE \\\"${APPLICATION_DB_DATABASE}-\$i\\\";\" +done +" + +echo "Running tests..." +# Run tests +docker compose -f ./docker/test/docker-compose.yml exec -T test-runner \ + npx jest -c jest.config.e2e.ts --passWithNoTests --forceExit --colors "$@" + +# Capture the exit code +TEST_EXIT_CODE=$? + +# Clean up +docker compose -f ./docker/test/docker-compose.yml down -v --remove-orphans --volumes + +# Check the exit code and display an error message if it's 1 +if [ $TEST_EXIT_CODE -eq 1 ]; then + echo -e "\n\n$(tput setaf 1)ERROR: Tests failed!$(tput sgr0)" +else + echo -e "\n\n$(tput setaf 2)Tests passed successfully.$(tput sgr0)" +fi + +# Exit with the test exit code +exit $TEST_EXIT_CODE diff --git a/src/tests/setupIntegrationTests.ts b/src/tests/setupIntegrationTests.ts index 29c0e193..542ec5ee 100644 --- a/src/tests/setupIntegrationTests.ts +++ b/src/tests/setupIntegrationTests.ts @@ -1,8 +1,10 @@ import path from 'path'; import Umzug from 'umzug'; -import { serverInstance, redisClient } from '@root/app'; +import { serverInstance } from '@root/app'; +import { redisClient } from '@root/redis'; import { connection } from '@models/index'; import { makeRequest, extractResponse } from '@tests/helpers'; +import { until } from '@common/helpers'; jest.mock('axios'); @@ -58,9 +60,17 @@ expect.extend({ beforeEach(async () => { try { + await until(async () => { + // Wait until connection is established + const result = await redisClient.hello(); + return !!result; + }); await connection.sequelize.drop({ cascade: true }); await dropAllEnums(connection.sequelize); - redisClient.FLUSHALL('SYNC'); + const workerKeys = await redisClient.keys(`${process.env.JEST_WORKER_ID}*`); + if (workerKeys.length) { + await redisClient.del(workerKeys); + } await umzug.up(); await makeRequest({ @@ -104,12 +114,11 @@ beforeEach(async () => { } catch (err) { console.log(err); } -}); +}, 10_000); afterAll(async () => { try { await redisClient.quit(); - // await connection.sequelize.close(); await serverInstance.close(); } catch (err) { console.log('afterAll', err);