mirror of
https://github.com/standardnotes/server
synced 2026-01-17 05:04:27 -05:00
Compare commits
20 Commits
@standardn
...
@standardn
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a0e4926135 | ||
|
|
b1b244a2cf | ||
|
|
b4a38d9dcc | ||
|
|
71e2a4187e | ||
|
|
c94f9c1a60 | ||
|
|
b9907924be | ||
|
|
d4c188c096 | ||
|
|
2ccaf490df | ||
|
|
cd893b41d7 | ||
|
|
3838358081 | ||
|
|
1a388f00c3 | ||
|
|
bd71422fab | ||
|
|
3e1697f47f | ||
|
|
032cde7723 | ||
|
|
34b956b482 | ||
|
|
681e0378ae | ||
|
|
609e85f926 | ||
|
|
e4ca310707 | ||
|
|
d606493356 | ||
|
|
5ef6c5c14a |
168
.github/workflows/common-e2e.yml
vendored
168
.github/workflows/common-e2e.yml
vendored
@@ -7,6 +7,10 @@ on:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
@@ -18,158 +22,16 @@ on:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
name: (Self Hosting) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
suite: ['base', 'vaults']
|
||||
secondary_db_enabled: [true, false]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Run Server
|
||||
run: docker compose -f docker-compose.ci.yml up -d
|
||||
env:
|
||||
DB_TYPE: mysql
|
||||
CACHE_TYPE: redis
|
||||
SECONDARY_DB_ENABLED: ${{ matrix.secondary_db_enabled }}
|
||||
|
||||
- name: Wait for server to start
|
||||
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ matrix.suite }}
|
||||
|
||||
- name: Archive failed run logs
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: self-hosted-failure-logs-${{ matrix.suite }}-${{ matrix.secondary_db_enabled }}
|
||||
retention-days: 5
|
||||
path: |
|
||||
logs/*.err
|
||||
logs/*.log
|
||||
e2e-self-hosted:
|
||||
uses: standardnotes/server/.github/workflows/e2e-self-hosted.yml@main
|
||||
with:
|
||||
snjs_image_tag: ${{ inputs.snjs_image_tag }}
|
||||
suite: ${{ inputs.suite }}
|
||||
secrets: inherit
|
||||
|
||||
e2e-home-server:
|
||||
name: (Home Server) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
suite: ['base', 'vaults']
|
||||
db_type: [mysql, sqlite]
|
||||
cache_type: [redis, memory]
|
||||
secondary_db_enabled: [true, false]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
cache:
|
||||
image: redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
db:
|
||||
image: mysql
|
||||
ports:
|
||||
- 3306:3306
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
MYSQL_DATABASE: standardnotes
|
||||
MYSQL_USER: standardnotes
|
||||
MYSQL_PASSWORD: standardnotes
|
||||
secondary_db:
|
||||
image: mongo:5.0
|
||||
ports:
|
||||
- 27017:27017
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: standardnotes
|
||||
MONGO_INITDB_ROOT_PASSWORD: standardnotes
|
||||
MONGO_INITDB_DATABASE: standardnotes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build
|
||||
run: yarn build
|
||||
|
||||
- name: Copy dotenv file
|
||||
run: cp packages/home-server/.env.sample packages/home-server/.env
|
||||
|
||||
- name: Fill in env variables
|
||||
run: |
|
||||
sed -i "s/JWT_SECRET=/JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/AUTH_JWT_SECRET=/AUTH_JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/ENCRYPTION_SERVER_KEY=/ENCRYPTION_SERVER_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/PSEUDO_KEY_PARAMS_KEY=/PSEUDO_KEY_PARAMS_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/VALET_TOKEN_SECRET=/VALET_TOKEN_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
echo "ACCESS_TOKEN_AGE=4" >> packages/home-server/.env
|
||||
echo "REFRESH_TOKEN_AGE=10" >> packages/home-server/.env
|
||||
echo "REVISIONS_FREQUENCY=2" >> packages/home-server/.env
|
||||
echo "DB_HOST=localhost" >> packages/home-server/.env
|
||||
echo "DB_PORT=3306" >> packages/home-server/.env
|
||||
echo "DB_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_SQLITE_DATABASE_PATH=homeserver.db" >> packages/home-server/.env
|
||||
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
|
||||
echo "DB_DEBUG_LEVEL=all" >> packages/home-server/.env
|
||||
echo "REDIS_URL=redis://localhost:6379" >> packages/home-server/.env
|
||||
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
|
||||
echo "SECONDARY_DB_ENABLED=${{ matrix.secondary_db_enabled }}" >> packages/home-server/.env
|
||||
echo "MONGO_HOST=localhost" >> packages/home-server/.env
|
||||
echo "MONGO_PORT=27017" >> packages/home-server/.env
|
||||
echo "MONGO_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "FILES_SERVER_URL=http://localhost:3123" >> packages/home-server/.env
|
||||
echo "E2E_TESTING=true" >> packages/home-server/.env
|
||||
|
||||
- name: Run Server
|
||||
run: nohup yarn workspace @standardnotes/home-server start > logs/output.log 2>&1 &
|
||||
env:
|
||||
PORT: 3123
|
||||
|
||||
- name: Wait for server to start
|
||||
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ matrix.suite }}
|
||||
|
||||
- name: Archive failed run logs
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: home-server-failure-logs-${{ matrix.suite }}-${{ matrix.db_type }}-${{ matrix.cache_type }}-${{ matrix.secondary_db_enabled }}
|
||||
retention-days: 5
|
||||
path: |
|
||||
logs/output.log
|
||||
uses: standardnotes/server/.github/workflows/e2e-home-server.yml@main
|
||||
with:
|
||||
snjs_image_tag: ${{ inputs.snjs_image_tag }}
|
||||
suite: ${{ inputs.suite }}
|
||||
secrets: inherit
|
||||
|
||||
128
.github/workflows/e2e-home-server.yml
vendored
Normal file
128
.github/workflows/e2e-home-server.yml
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
name: E2E Home Server Test Suite
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
snjs_image_tag:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
AWS_ACCESS_KEY_ID:
|
||||
required: true
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
e2e-home-server:
|
||||
name: (Home Server) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
db_type: [mysql, sqlite]
|
||||
cache_type: [redis, memory]
|
||||
secondary_db_enabled: [true, false]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
cache:
|
||||
image: redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
db:
|
||||
image: mysql
|
||||
ports:
|
||||
- 3306:3306
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
MYSQL_DATABASE: standardnotes
|
||||
MYSQL_USER: standardnotes
|
||||
MYSQL_PASSWORD: standardnotes
|
||||
secondary_db:
|
||||
image: mongo:5.0
|
||||
ports:
|
||||
- 27017:27017
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: standardnotes
|
||||
MONGO_INITDB_ROOT_PASSWORD: standardnotes
|
||||
MONGO_INITDB_DATABASE: standardnotes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build
|
||||
run: yarn build
|
||||
|
||||
- name: Copy dotenv file
|
||||
run: cp packages/home-server/.env.sample packages/home-server/.env
|
||||
|
||||
- name: Fill in env variables
|
||||
run: |
|
||||
sed -i "s/JWT_SECRET=/JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/AUTH_JWT_SECRET=/AUTH_JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/ENCRYPTION_SERVER_KEY=/ENCRYPTION_SERVER_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/PSEUDO_KEY_PARAMS_KEY=/PSEUDO_KEY_PARAMS_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/VALET_TOKEN_SECRET=/VALET_TOKEN_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
echo "ACCESS_TOKEN_AGE=4" >> packages/home-server/.env
|
||||
echo "REFRESH_TOKEN_AGE=10" >> packages/home-server/.env
|
||||
echo "REVISIONS_FREQUENCY=2" >> packages/home-server/.env
|
||||
echo "DB_HOST=localhost" >> packages/home-server/.env
|
||||
echo "DB_PORT=3306" >> packages/home-server/.env
|
||||
echo "DB_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_SQLITE_DATABASE_PATH=homeserver.db" >> packages/home-server/.env
|
||||
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
|
||||
echo "DB_DEBUG_LEVEL=all" >> packages/home-server/.env
|
||||
echo "REDIS_URL=redis://localhost:6379" >> packages/home-server/.env
|
||||
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
|
||||
echo "SECONDARY_DB_ENABLED=${{ matrix.secondary_db_enabled }}" >> packages/home-server/.env
|
||||
echo "MONGO_HOST=localhost" >> packages/home-server/.env
|
||||
echo "MONGO_PORT=27017" >> packages/home-server/.env
|
||||
echo "MONGO_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "FILES_SERVER_URL=http://localhost:3123" >> packages/home-server/.env
|
||||
echo "E2E_TESTING=true" >> packages/home-server/.env
|
||||
|
||||
- name: Run Server
|
||||
run: nohup yarn workspace @standardnotes/home-server start > logs/output.log 2>&1 &
|
||||
env:
|
||||
PORT: 3123
|
||||
|
||||
- name: Wait for server to start
|
||||
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
|
||||
|
||||
- name: Archive failed run logs
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: home-server-failure-logs-${{ inputs.suite }}-${{ matrix.db_type }}-${{ matrix.cache_type }}-${{ matrix.secondary_db_enabled }}
|
||||
retention-days: 5
|
||||
path: |
|
||||
logs/output.log
|
||||
72
.github/workflows/e2e-self-hosted.yml
vendored
Normal file
72
.github/workflows/e2e-self-hosted.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: E2E Self Hosted Test Suite
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
snjs_image_tag:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
AWS_ACCESS_KEY_ID:
|
||||
required: true
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
name: (Self Hosting) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
secondary_db_enabled: [true, false]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Run Server
|
||||
run: docker compose -f docker-compose.ci.yml up -d
|
||||
env:
|
||||
DB_TYPE: mysql
|
||||
CACHE_TYPE: redis
|
||||
SECONDARY_DB_ENABLED: ${{ matrix.secondary_db_enabled }}
|
||||
|
||||
- name: Wait for server to start
|
||||
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
|
||||
|
||||
- name: Archive failed run logs
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: self-hosted-failure-logs-${{ inputs.suite }}-${{ matrix.secondary_db_enabled }}
|
||||
retention-days: 5
|
||||
path: |
|
||||
logs/*.err
|
||||
logs/*.log
|
||||
9
.github/workflows/e2e-test-suite.yml
vendored
9
.github/workflows/e2e-test-suite.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: E2E Test Suite On Self Hosted Server
|
||||
name: E2E Test Suite
|
||||
|
||||
run-name: E2E Test Suite against ${{ inputs.ref_name }} by ${{ inputs.author }}
|
||||
run-name: E2E ${{ inputs.suite }} Test Suite against ${{ inputs.ref_name }} by ${{ inputs.author }}
|
||||
|
||||
on:
|
||||
schedule:
|
||||
@@ -11,6 +11,10 @@ on:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
author:
|
||||
type: string
|
||||
default: unknown
|
||||
@@ -26,4 +30,5 @@ jobs:
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: ${{ inputs.snjs_image_tag || 'latest' }}
|
||||
suite: ${{ inputs.suite || 'all' }}
|
||||
secrets: inherit
|
||||
|
||||
17
.github/workflows/pr.yml
vendored
17
.github/workflows/pr.yml
vendored
@@ -95,9 +95,20 @@ jobs:
|
||||
- name: Test
|
||||
run: yarn test
|
||||
|
||||
e2e:
|
||||
e2e-base:
|
||||
needs: build
|
||||
|
||||
name: E2E
|
||||
name: E2E Base Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'base'
|
||||
secrets: inherit
|
||||
|
||||
e2e-vaults:
|
||||
needs: build
|
||||
name: E2E Vaults Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'vaults'
|
||||
secrets: inherit
|
||||
|
||||
20
.github/workflows/publish.yml
vendored
20
.github/workflows/publish.yml
vendored
@@ -95,20 +95,32 @@ jobs:
|
||||
- name: Test
|
||||
run: yarn test
|
||||
|
||||
e2e:
|
||||
e2e-base:
|
||||
needs: build
|
||||
name: E2E
|
||||
name: E2E Base Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'base'
|
||||
secrets: inherit
|
||||
|
||||
e2e-vaults:
|
||||
needs: build
|
||||
name: E2E Vaults Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'vaults'
|
||||
secrets: inherit
|
||||
|
||||
publish-self-hosting:
|
||||
needs: [ test, lint, e2e ]
|
||||
needs: [ test, lint, e2e-base, e2e-vaults ]
|
||||
name: Publish Self Hosting Docker Image
|
||||
uses: standardnotes/server/.github/workflows/common-self-hosting.yml@main
|
||||
secrets: inherit
|
||||
|
||||
publish-services:
|
||||
needs: [ test, lint, e2e ]
|
||||
needs: [ test, lint, e2e-base, e2e-vaults ]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
319
.pnp.cjs
generated
319
.pnp.cjs
generated
@@ -1605,6 +1605,13 @@ const RAW_RUNTIME_STATE =
|
||||
["@aws-sdk/service-error-classification", "npm:3.342.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:3.370.0", {\
|
||||
"packageLocation": "./.yarn/cache/@aws-sdk-service-error-classification-npm-3.370.0-0d5b615252-500f067ba1.zip/node_modules/@aws-sdk/service-error-classification/",\
|
||||
"packageDependencies": [\
|
||||
["@aws-sdk/service-error-classification", "npm:3.370.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@aws-sdk/shared-ini-file-loader", [\
|
||||
@@ -1788,6 +1795,15 @@ const RAW_RUNTIME_STATE =
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:3.418.0", {\
|
||||
"packageLocation": "./.yarn/cache/@aws-sdk-types-npm-3.418.0-451c0cadd0-627955c2c9.zip/node_modules/@aws-sdk/types/",\
|
||||
"packageDependencies": [\
|
||||
["@aws-sdk/types", "npm:3.418.0"],\
|
||||
["@smithy/types", "npm:2.3.4"],\
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@aws-sdk/url-parser", [\
|
||||
@@ -5488,6 +5504,14 @@ const RAW_RUNTIME_STATE =
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:2.3.4", {\
|
||||
"packageLocation": "./.yarn/cache/@smithy-types-npm-2.3.4-7d0b3a2a2f-8a5ad3b47e.zip/node_modules/@smithy/types/",\
|
||||
"packageDependencies": [\
|
||||
["@smithy/types", "npm:2.3.4"],\
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@smithy/url-parser", [\
|
||||
@@ -5835,6 +5859,7 @@ const RAW_RUNTIME_STATE =
|
||||
["@types/uuid", "npm:9.0.3"],\
|
||||
["@typescript-eslint/eslint-plugin", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["@typescript-eslint/parser", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["aws-xray-sdk", "npm:3.5.2"],\
|
||||
["axios", "npm:1.4.0"],\
|
||||
["bcryptjs", "npm:2.4.3"],\
|
||||
["cors", "npm:2.8.5"],\
|
||||
@@ -5938,6 +5963,7 @@ const RAW_RUNTIME_STATE =
|
||||
["@types/newrelic", "npm:9.14.0"],\
|
||||
["@typescript-eslint/eslint-plugin", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["@typescript-eslint/parser", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["aws-xray-sdk", "npm:3.5.2"],\
|
||||
["eslint", "npm:8.41.0"],\
|
||||
["eslint-plugin-prettier", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:5.0.0"],\
|
||||
["ioredis", "npm:5.3.2"],\
|
||||
@@ -6609,6 +6635,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/cls-hooked", [\
|
||||
["npm:4.3.6", {\
|
||||
"packageLocation": "./.yarn/cache/@types-cls-hooked-npm-4.3.6-8787b64e86-f5b9864348.zip/node_modules/@types/cls-hooked/",\
|
||||
"packageDependencies": [\
|
||||
["@types/cls-hooked", "npm:4.3.6"],\
|
||||
["@types/node", "npm:20.2.5"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/concat-stream", [\
|
||||
["npm:1.6.1", {\
|
||||
"packageLocation": "./.yarn/cache/@types-concat-stream-npm-1.6.1-42cd06b019-7d211e7433.zip/node_modules/@types/concat-stream/",\
|
||||
@@ -6868,6 +6904,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/mysql", [\
|
||||
["npm:2.15.22", {\
|
||||
"packageLocation": "./.yarn/cache/@types-mysql-npm-2.15.22-d643eb999e-6be0aac58f.zip/node_modules/@types/mysql/",\
|
||||
"packageDependencies": [\
|
||||
["@types/mysql", "npm:2.15.22"],\
|
||||
["@types/node", "npm:20.2.5"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/newrelic", [\
|
||||
["npm:9.14.0", {\
|
||||
"packageLocation": "./.yarn/cache/@types-newrelic-npm-9.14.0-4668da51a1-3a54ea75a4.zip/node_modules/@types/newrelic/",\
|
||||
@@ -6936,6 +6982,18 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/pg", [\
|
||||
["npm:8.10.3", {\
|
||||
"packageLocation": "./.yarn/cache/@types-pg-npm-8.10.3-3fc3365c7b-22d4836bd9.zip/node_modules/@types/pg/",\
|
||||
"packageDependencies": [\
|
||||
["@types/pg", "npm:8.10.3"],\
|
||||
["@types/node", "npm:20.2.5"],\
|
||||
["pg-protocol", "npm:1.6.0"],\
|
||||
["pg-types", "npm:4.0.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/prettier", [\
|
||||
["npm:2.7.3", {\
|
||||
"packageLocation": "./.yarn/cache/@types-prettier-npm-2.7.3-497316f37c-cda84c19ac.zip/node_modules/@types/prettier/",\
|
||||
@@ -7946,6 +8004,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["async-hook-jl", [\
|
||||
["npm:1.7.6", {\
|
||||
"packageLocation": "./.yarn/cache/async-hook-jl-npm-1.7.6-9999815029-f61a3bd4c3.zip/node_modules/async-hook-jl/",\
|
||||
"packageDependencies": [\
|
||||
["async-hook-jl", "npm:1.7.6"],\
|
||||
["stack-chain", "npm:1.3.7"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["asynckit", [\
|
||||
["npm:0.4.0", {\
|
||||
"packageLocation": "./.yarn/cache/asynckit-npm-0.4.0-c718858525-3ce727cbc7.zip/node_modules/asynckit/",\
|
||||
@@ -7955,6 +8023,112 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["atomic-batcher", [\
|
||||
["npm:1.0.2", {\
|
||||
"packageLocation": "./.yarn/cache/atomic-batcher-npm-1.0.2-6fcd3a3097-025e334f1f.zip/node_modules/atomic-batcher/",\
|
||||
"packageDependencies": [\
|
||||
["atomic-batcher", "npm:1.0.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-npm-3.5.2-15fc4e54ee-576d0d5ccb.zip/node_modules/aws-xray-sdk/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk", "npm:3.5.2"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"],\
|
||||
["aws-xray-sdk-express", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["aws-xray-sdk-mysql", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["aws-xray-sdk-postgres", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-core", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-core-npm-3.5.2-9083a0c00f-a643998187.zip/node_modules/aws-xray-sdk-core/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"],\
|
||||
["@aws-sdk/service-error-classification", "npm:3.370.0"],\
|
||||
["@aws-sdk/types", "npm:3.418.0"],\
|
||||
["@types/cls-hooked", "npm:4.3.6"],\
|
||||
["atomic-batcher", "npm:1.0.2"],\
|
||||
["cls-hooked", "npm:4.2.2"],\
|
||||
["semver", "npm:7.5.4"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-express", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip/node_modules/aws-xray-sdk-express/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-express", "npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/__virtual__/aws-xray-sdk-express-virtual-36027c3d91/0/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip/node_modules/aws-xray-sdk-express/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-express", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["@types/aws-xray-sdk-core", null],\
|
||||
["@types/express", "npm:4.17.17"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/aws-xray-sdk-core",\
|
||||
"aws-xray-sdk-core"\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-mysql", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip/node_modules/aws-xray-sdk-mysql/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-mysql", "npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/__virtual__/aws-xray-sdk-mysql-virtual-d8a5f29b75/0/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip/node_modules/aws-xray-sdk-mysql/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-mysql", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["@types/aws-xray-sdk-core", null],\
|
||||
["@types/mysql", "npm:2.15.22"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/aws-xray-sdk-core",\
|
||||
"aws-xray-sdk-core"\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-postgres", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip/node_modules/aws-xray-sdk-postgres/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-postgres", "npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/__virtual__/aws-xray-sdk-postgres-virtual-c6edb063fc/0/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip/node_modules/aws-xray-sdk-postgres/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-postgres", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["@types/aws-xray-sdk-core", null],\
|
||||
["@types/pg", "npm:8.10.3"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/aws-xray-sdk-core",\
|
||||
"aws-xray-sdk-core"\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["axios", [\
|
||||
["npm:0.21.4", {\
|
||||
"packageLocation": "./.yarn/cache/axios-npm-0.21.4-e278873748-da644592cb.zip/node_modules/axios/",\
|
||||
@@ -8745,6 +8919,18 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["cls-hooked", [\
|
||||
["npm:4.2.2", {\
|
||||
"packageLocation": "./.yarn/cache/cls-hooked-npm-4.2.2-db33b9b95f-59081fcc0f.zip/node_modules/cls-hooked/",\
|
||||
"packageDependencies": [\
|
||||
["cls-hooked", "npm:4.2.2"],\
|
||||
["async-hook-jl", "npm:1.7.6"],\
|
||||
["emitter-listener", "npm:1.1.2"],\
|
||||
["semver", "npm:5.7.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["cluster-key-slot", [\
|
||||
["npm:1.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/cluster-key-slot-npm-1.1.2-0571a28825-516ed8b5e1.zip/node_modules/cluster-key-slot/",\
|
||||
@@ -9635,6 +9821,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["emitter-listener", [\
|
||||
["npm:1.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/emitter-listener-npm-1.1.2-0fe118d0b3-697f53c308.zip/node_modules/emitter-listener/",\
|
||||
"packageDependencies": [\
|
||||
["emitter-listener", "npm:1.1.2"],\
|
||||
["shimmer", "npm:1.2.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["emittery", [\
|
||||
["npm:0.13.1", {\
|
||||
"packageLocation": "./.yarn/cache/emittery-npm-0.13.1-cb6cd1bb03-fbe214171d.zip/node_modules/emittery/",\
|
||||
@@ -14007,6 +14203,15 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["obuf", [\
|
||||
["npm:1.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/obuf-npm-1.1.2-8db5fae8dd-53ff4ab3a1.zip/node_modules/obuf/",\
|
||||
"packageDependencies": [\
|
||||
["obuf", "npm:1.1.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["on-finished", [\
|
||||
["npm:2.4.1", {\
|
||||
"packageLocation": "./.yarn/cache/on-finished-npm-2.4.1-907af70f88-8e81472c50.zip/node_modules/on-finished/",\
|
||||
@@ -14467,6 +14672,49 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-int8", [\
|
||||
["npm:1.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/pg-int8-npm-1.0.1-5cd67f3e22-a1e3a05a69.zip/node_modules/pg-int8/",\
|
||||
"packageDependencies": [\
|
||||
["pg-int8", "npm:1.0.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-numeric", [\
|
||||
["npm:1.0.2", {\
|
||||
"packageLocation": "./.yarn/cache/pg-numeric-npm-1.0.2-9026ec3427-8899f8200c.zip/node_modules/pg-numeric/",\
|
||||
"packageDependencies": [\
|
||||
["pg-numeric", "npm:1.0.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-protocol", [\
|
||||
["npm:1.6.0", {\
|
||||
"packageLocation": "./.yarn/cache/pg-protocol-npm-1.6.0-089a4b1d3c-995864cc2a.zip/node_modules/pg-protocol/",\
|
||||
"packageDependencies": [\
|
||||
["pg-protocol", "npm:1.6.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-types", [\
|
||||
["npm:4.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/pg-types-npm-4.0.1-8f922557d3-2c686ef361.zip/node_modules/pg-types/",\
|
||||
"packageDependencies": [\
|
||||
["pg-types", "npm:4.0.1"],\
|
||||
["pg-int8", "npm:1.0.1"],\
|
||||
["pg-numeric", "npm:1.0.2"],\
|
||||
["postgres-array", "npm:3.0.2"],\
|
||||
["postgres-bytea", "npm:3.0.0"],\
|
||||
["postgres-date", "npm:2.0.1"],\
|
||||
["postgres-interval", "npm:3.0.0"],\
|
||||
["postgres-range", "npm:1.1.3"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["picocolors", [\
|
||||
["npm:1.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/picocolors-npm-1.0.0-d81e0b1927-a2e8092dd8.zip/node_modules/picocolors/",\
|
||||
@@ -14538,6 +14786,52 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-array", [\
|
||||
["npm:3.0.2", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-array-npm-3.0.2-da6a3f1fed-0159517e4e.zip/node_modules/postgres-array/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-array", "npm:3.0.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-bytea", [\
|
||||
["npm:3.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-bytea-npm-3.0.0-5de4c664f6-f5c01758fd.zip/node_modules/postgres-bytea/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-bytea", "npm:3.0.0"],\
|
||||
["obuf", "npm:1.1.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-date", [\
|
||||
["npm:2.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-date-npm-2.0.1-00e0e0bc9e-908eacec35.zip/node_modules/postgres-date/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-date", "npm:2.0.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-interval", [\
|
||||
["npm:3.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-interval-npm-3.0.0-936c769b98-c7a1cf006d.zip/node_modules/postgres-interval/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-interval", "npm:3.0.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-range", [\
|
||||
["npm:1.1.3", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-range-npm-1.1.3-46f68e1a9e-356a46d97e.zip/node_modules/postgres-range/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-range", "npm:1.1.3"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["prelude-ls", [\
|
||||
["npm:1.2.1", {\
|
||||
"packageLocation": "./.yarn/cache/prelude-ls-npm-1.2.1-3e4d272a55-0b9d2c7680.zip/node_modules/prelude-ls/",\
|
||||
@@ -15297,6 +15591,13 @@ const RAW_RUNTIME_STATE =
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:5.7.2", {\
|
||||
"packageLocation": "./.yarn/cache/semver-npm-5.7.2-938ee91eaa-fca14418a1.zip/node_modules/semver/",\
|
||||
"packageDependencies": [\
|
||||
["semver", "npm:5.7.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:6.3.0", {\
|
||||
"packageLocation": "./.yarn/cache/semver-npm-6.3.0-b3eace8bfd-8dd72e7c7c.zip/node_modules/semver/",\
|
||||
"packageDependencies": [\
|
||||
@@ -15441,6 +15742,15 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["shimmer", [\
|
||||
["npm:1.2.1", {\
|
||||
"packageLocation": "./.yarn/cache/shimmer-npm-1.2.1-8b50bf3206-aa0d6252ad.zip/node_modules/shimmer/",\
|
||||
"packageDependencies": [\
|
||||
["shimmer", "npm:1.2.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["side-channel", [\
|
||||
["npm:1.0.4", {\
|
||||
"packageLocation": "./.yarn/cache/side-channel-npm-1.0.4-e1f38b9e06-c4998d9fc5.zip/node_modules/side-channel/",\
|
||||
@@ -15765,6 +16075,15 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["stack-chain", [\
|
||||
["npm:1.3.7", {\
|
||||
"packageLocation": "./.yarn/cache/stack-chain-npm-1.3.7-c803ef2abb-6420637b76.zip/node_modules/stack-chain/",\
|
||||
"packageDependencies": [\
|
||||
["stack-chain", "npm:1.3.7"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["stack-trace", [\
|
||||
["npm:0.0.10", {\
|
||||
"packageLocation": "./.yarn/cache/stack-trace-npm-0.0.10-9460b173e1-7bd633f0e9.zip/node_modules/stack-trace/",\
|
||||
|
||||
BIN
.yarn/cache/@aws-sdk-service-error-classification-npm-3.370.0-0d5b615252-500f067ba1.zip
vendored
Normal file
BIN
.yarn/cache/@aws-sdk-service-error-classification-npm-3.370.0-0d5b615252-500f067ba1.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@aws-sdk-types-npm-3.418.0-451c0cadd0-627955c2c9.zip
vendored
Normal file
BIN
.yarn/cache/@aws-sdk-types-npm-3.418.0-451c0cadd0-627955c2c9.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@smithy-types-npm-2.3.4-7d0b3a2a2f-8a5ad3b47e.zip
vendored
Normal file
BIN
.yarn/cache/@smithy-types-npm-2.3.4-7d0b3a2a2f-8a5ad3b47e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-cls-hooked-npm-4.3.6-8787b64e86-f5b9864348.zip
vendored
Normal file
BIN
.yarn/cache/@types-cls-hooked-npm-4.3.6-8787b64e86-f5b9864348.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-mysql-npm-2.15.22-d643eb999e-6be0aac58f.zip
vendored
Normal file
BIN
.yarn/cache/@types-mysql-npm-2.15.22-d643eb999e-6be0aac58f.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-pg-npm-8.10.3-3fc3365c7b-22d4836bd9.zip
vendored
Normal file
BIN
.yarn/cache/@types-pg-npm-8.10.3-3fc3365c7b-22d4836bd9.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/async-hook-jl-npm-1.7.6-9999815029-f61a3bd4c3.zip
vendored
Normal file
BIN
.yarn/cache/async-hook-jl-npm-1.7.6-9999815029-f61a3bd4c3.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/atomic-batcher-npm-1.0.2-6fcd3a3097-025e334f1f.zip
vendored
Normal file
BIN
.yarn/cache/atomic-batcher-npm-1.0.2-6fcd3a3097-025e334f1f.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-core-npm-3.5.2-9083a0c00f-a643998187.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-core-npm-3.5.2-9083a0c00f-a643998187.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-npm-3.5.2-15fc4e54ee-576d0d5ccb.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-npm-3.5.2-15fc4e54ee-576d0d5ccb.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/cls-hooked-npm-4.2.2-db33b9b95f-59081fcc0f.zip
vendored
Normal file
BIN
.yarn/cache/cls-hooked-npm-4.2.2-db33b9b95f-59081fcc0f.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/emitter-listener-npm-1.1.2-0fe118d0b3-697f53c308.zip
vendored
Normal file
BIN
.yarn/cache/emitter-listener-npm-1.1.2-0fe118d0b3-697f53c308.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/obuf-npm-1.1.2-8db5fae8dd-53ff4ab3a1.zip
vendored
Normal file
BIN
.yarn/cache/obuf-npm-1.1.2-8db5fae8dd-53ff4ab3a1.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-int8-npm-1.0.1-5cd67f3e22-a1e3a05a69.zip
vendored
Normal file
BIN
.yarn/cache/pg-int8-npm-1.0.1-5cd67f3e22-a1e3a05a69.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-numeric-npm-1.0.2-9026ec3427-8899f8200c.zip
vendored
Normal file
BIN
.yarn/cache/pg-numeric-npm-1.0.2-9026ec3427-8899f8200c.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-protocol-npm-1.6.0-089a4b1d3c-995864cc2a.zip
vendored
Normal file
BIN
.yarn/cache/pg-protocol-npm-1.6.0-089a4b1d3c-995864cc2a.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-types-npm-4.0.1-8f922557d3-2c686ef361.zip
vendored
Normal file
BIN
.yarn/cache/pg-types-npm-4.0.1-8f922557d3-2c686ef361.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-array-npm-3.0.2-da6a3f1fed-0159517e4e.zip
vendored
Normal file
BIN
.yarn/cache/postgres-array-npm-3.0.2-da6a3f1fed-0159517e4e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-bytea-npm-3.0.0-5de4c664f6-f5c01758fd.zip
vendored
Normal file
BIN
.yarn/cache/postgres-bytea-npm-3.0.0-5de4c664f6-f5c01758fd.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-date-npm-2.0.1-00e0e0bc9e-908eacec35.zip
vendored
Normal file
BIN
.yarn/cache/postgres-date-npm-2.0.1-00e0e0bc9e-908eacec35.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-interval-npm-3.0.0-936c769b98-c7a1cf006d.zip
vendored
Normal file
BIN
.yarn/cache/postgres-interval-npm-3.0.0-936c769b98-c7a1cf006d.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-range-npm-1.1.3-46f68e1a9e-356a46d97e.zip
vendored
Normal file
BIN
.yarn/cache/postgres-range-npm-1.1.3-46f68e1a9e-356a46d97e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/semver-npm-5.7.2-938ee91eaa-fca14418a1.zip
vendored
Normal file
BIN
.yarn/cache/semver-npm-5.7.2-938ee91eaa-fca14418a1.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/shimmer-npm-1.2.1-8b50bf3206-aa0d6252ad.zip
vendored
Normal file
BIN
.yarn/cache/shimmer-npm-1.2.1-8b50bf3206-aa0d6252ad.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/stack-chain-npm-1.3.7-c803ef2abb-6420637b76.zip
vendored
Normal file
BIN
.yarn/cache/stack-chain-npm-1.3.7-c803ef2abb-6420637b76.zip
vendored
Normal file
Binary file not shown.
@@ -3,6 +3,10 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [2.27.3](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.2...@standardnotes/analytics@2.27.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
## [2.27.2](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.1...@standardnotes/analytics@2.27.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/analytics",
|
||||
"version": "2.27.2",
|
||||
"version": "2.27.3",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.75.8](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.7...@standardnotes/api-gateway@1.75.8) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
## [1.75.7](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.6...@standardnotes/api-gateway@1.75.7) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/api-gateway",
|
||||
"version": "1.75.7",
|
||||
"version": "1.75.8",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,41 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
# [1.150.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.149.1...@standardnotes/auth-server@1.150.0) (2023-10-03)
|
||||
|
||||
### Features
|
||||
|
||||
* add xray segment tracing on auth-worker ([b1b244a](https://github.com/standardnotes/server/commit/b1b244a2cf1e17ddf67fc9b238b4b25a1bc5a190))
|
||||
|
||||
## [1.149.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.149.0...@standardnotes/auth-server@1.149.1) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** enable xray only for server ([71e2a41](https://github.com/standardnotes/server/commit/71e2a4187e8f79b2a3055f502310ed0b6a6d82ea))
|
||||
|
||||
# [1.149.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.148.2...@standardnotes/auth-server@1.149.0) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** add aws xray ecs and ec2 plugins ([3838358](https://github.com/standardnotes/server/commit/383835808165aba58d15e79f03b5ba4e1c899f4c))
|
||||
* **auth:** prevent from loop disabling of email settings ([#858](https://github.com/standardnotes/server/issues/858)) ([bd71422](https://github.com/standardnotes/server/commit/bd71422fabc4b5b47cca6dd071e3332943adc81d))
|
||||
|
||||
### Features
|
||||
|
||||
* **auth:** introduce AWS X-Ray SDK ([#859](https://github.com/standardnotes/server/issues/859)) ([1a388f0](https://github.com/standardnotes/server/commit/1a388f00c3897d1ebb1411793cfb23c3d305ac2e))
|
||||
|
||||
## [1.148.2](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.148.1...@standardnotes/auth-server@1.148.2) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** increase ttl for in progress duration of transitions ([681e037](https://github.com/standardnotes/server/commit/681e0378ae6b97c838b0b34ccc630194b304b81a))
|
||||
|
||||
## [1.148.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.148.0...@standardnotes/auth-server@1.148.1) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** disable transitions retriggering if they are in progress ([5ef6c5c](https://github.com/standardnotes/server/commit/5ef6c5c14a9f7a558de7ac9ff0ab99a5f831c127))
|
||||
|
||||
# [1.148.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.147.1...@standardnotes/auth-server@1.148.0) (2023-09-28)
|
||||
|
||||
### Features
|
||||
|
||||
@@ -24,11 +24,13 @@ import { urlencoded, json, Request, Response, NextFunction } from 'express'
|
||||
import * as winston from 'winston'
|
||||
import * as dayjs from 'dayjs'
|
||||
import * as utc from 'dayjs/plugin/utc'
|
||||
import * as AWSXRay from 'aws-xray-sdk'
|
||||
|
||||
import { InversifyExpressServer } from 'inversify-express-utils'
|
||||
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
|
||||
import TYPES from '../src/Bootstrap/Types'
|
||||
import { Env } from '../src/Bootstrap/Env'
|
||||
import { ServiceIdentifier } from '@standardnotes/domain-core'
|
||||
|
||||
const container = new ContainerConfigLoader()
|
||||
void container.load().then((container) => {
|
||||
@@ -37,9 +39,20 @@ void container.load().then((container) => {
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const isConfiguredForAWSProduction =
|
||||
env.get('MODE', true) !== 'home-server' && env.get('MODE', true) !== 'self-hosted'
|
||||
|
||||
if (isConfiguredForAWSProduction) {
|
||||
AWSXRay.config([AWSXRay.plugins.EC2Plugin, AWSXRay.plugins.ECSPlugin])
|
||||
}
|
||||
|
||||
const server = new InversifyExpressServer(container)
|
||||
|
||||
server.setConfig((app) => {
|
||||
if (isConfiguredForAWSProduction) {
|
||||
app.use(AWSXRay.express.openSegment(ServiceIdentifier.NAMES.Auth))
|
||||
}
|
||||
|
||||
app.use((_request: Request, response: Response, next: NextFunction) => {
|
||||
response.setHeader('X-Auth-Version', container.get(TYPES.Auth_VERSION))
|
||||
next()
|
||||
@@ -66,6 +79,10 @@ void container.load().then((container) => {
|
||||
|
||||
const serverInstance = server.build()
|
||||
|
||||
if (isConfiguredForAWSProduction) {
|
||||
serverInstance.use(AWSXRay.express.closeSegment())
|
||||
}
|
||||
|
||||
serverInstance.listen(env.get('PORT'))
|
||||
|
||||
logger.info(`Server started on port ${process.env.PORT}`)
|
||||
|
||||
@@ -55,7 +55,7 @@ const requestTransition = async (
|
||||
|
||||
let wasTransitionRequested = false
|
||||
|
||||
if (itemsTransitionStatus?.value !== TransitionStatus.STATUSES.Verified) {
|
||||
if (itemsTransitionStatus === null || itemsTransitionStatus.value === TransitionStatus.STATUSES.Failed) {
|
||||
wasTransitionRequested = true
|
||||
await transitionStatusRepository.remove(user.uuid, 'items')
|
||||
|
||||
@@ -68,7 +68,7 @@ const requestTransition = async (
|
||||
)
|
||||
}
|
||||
|
||||
if (revisionsTransitionStatus?.value !== TransitionStatus.STATUSES.Verified) {
|
||||
if (revisionsTransitionStatus === null || revisionsTransitionStatus.value === TransitionStatus.STATUSES.Failed) {
|
||||
wasTransitionRequested = true
|
||||
await transitionStatusRepository.remove(user.uuid, 'revisions')
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import { Env } from '../src/Bootstrap/Env'
|
||||
import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-events'
|
||||
import * as dayjs from 'dayjs'
|
||||
import * as utc from 'dayjs/plugin/utc'
|
||||
import * as AWSXRay from 'aws-xray-sdk'
|
||||
|
||||
const container = new ContainerConfigLoader('worker')
|
||||
void container.load().then((container) => {
|
||||
@@ -16,6 +17,14 @@ void container.load().then((container) => {
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const isConfiguredForAWSProduction =
|
||||
env.get('MODE', true) !== 'home-server' && env.get('MODE', true) !== 'self-hosted'
|
||||
|
||||
if (isConfiguredForAWSProduction) {
|
||||
AWSXRay.enableManualMode()
|
||||
AWSXRay.config([AWSXRay.plugins.EC2Plugin, AWSXRay.plugins.ECSPlugin])
|
||||
}
|
||||
|
||||
const logger: Logger = container.get(TYPES.Auth_Logger)
|
||||
|
||||
logger.info('Starting worker...')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/auth-server",
|
||||
"version": "1.148.0",
|
||||
"version": "1.150.0",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
@@ -54,6 +54,7 @@
|
||||
"@standardnotes/sncrypto-common": "^1.13.4",
|
||||
"@standardnotes/sncrypto-node": "workspace:*",
|
||||
"@standardnotes/time": "workspace:*",
|
||||
"aws-xray-sdk": "^3.5.2",
|
||||
"axios": "^1.1.3",
|
||||
"bcryptjs": "2.4.3",
|
||||
"cors": "2.8.5",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as winston from 'winston'
|
||||
import Redis from 'ioredis'
|
||||
import { captureAWSv3Client } from 'aws-xray-sdk'
|
||||
import { SNSClient, SNSClientConfig } from '@aws-sdk/client-sns'
|
||||
import { SQSClient, SQSClientConfig } from '@aws-sdk/client-sqs'
|
||||
import { Container } from 'inversify'
|
||||
@@ -92,7 +93,7 @@ import {
|
||||
SNSDomainEventPublisher,
|
||||
SQSDomainEventSubscriberFactory,
|
||||
SQSEventMessageHandler,
|
||||
SQSNewRelicEventMessageHandler,
|
||||
SQSXRayEventMessageHandler,
|
||||
} from '@standardnotes/domain-events-infra'
|
||||
import { GetUserSubscription } from '../Domain/UseCase/GetUserSubscription/GetUserSubscription'
|
||||
import { ChangeCredentials } from '../Domain/UseCase/ChangeCredentials/ChangeCredentials'
|
||||
@@ -273,6 +274,7 @@ import { UserAddedToSharedVaultEventHandler } from '../Domain/Handler/UserAddedT
|
||||
import { UserRemovedFromSharedVaultEventHandler } from '../Domain/Handler/UserRemovedFromSharedVaultEventHandler'
|
||||
import { DesignateSurvivor } from '../Domain/UseCase/DesignateSurvivor/DesignateSurvivor'
|
||||
import { UserDesignatedAsSurvivorInSharedVaultEventHandler } from '../Domain/Handler/UserDesignatedAsSurvivorInSharedVaultEventHandler'
|
||||
import { DisableEmailSettingBasedOnEmailSubscription } from '../Domain/UseCase/DisableEmailSettingBasedOnEmailSubscription/DisableEmailSettingBasedOnEmailSubscription'
|
||||
|
||||
export class ContainerConfigLoader {
|
||||
constructor(private mode: 'server' | 'worker' = 'server') {}
|
||||
@@ -319,6 +321,8 @@ export class ContainerConfigLoader {
|
||||
logger.debug('Database initialized')
|
||||
|
||||
const isConfiguredForHomeServer = env.get('MODE', true) === 'home-server'
|
||||
const isConfiguredForSelfHosting = env.get('MODE', true) === 'self-hosted'
|
||||
const isConfiguredForAWSProduction = !isConfiguredForHomeServer && !isConfiguredForSelfHosting
|
||||
const isConfiguredForInMemoryCache = env.get('CACHE_TYPE', true) === 'memory'
|
||||
|
||||
if (!isConfiguredForInMemoryCache) {
|
||||
@@ -349,7 +353,11 @@ export class ContainerConfigLoader {
|
||||
secretAccessKey: env.get('SNS_SECRET_ACCESS_KEY', true),
|
||||
}
|
||||
}
|
||||
container.bind<SNSClient>(TYPES.Auth_SNS).toConstantValue(new SNSClient(snsConfig))
|
||||
let snsClient = new SNSClient(snsConfig)
|
||||
if (isConfiguredForAWSProduction && this.mode === 'server') {
|
||||
snsClient = captureAWSv3Client(snsClient)
|
||||
}
|
||||
container.bind<SNSClient>(TYPES.Auth_SNS).toConstantValue(snsClient)
|
||||
|
||||
const sqsConfig: SQSClientConfig = {
|
||||
region: env.get('SQS_AWS_REGION', true),
|
||||
@@ -363,7 +371,11 @@ export class ContainerConfigLoader {
|
||||
secretAccessKey: env.get('SQS_SECRET_ACCESS_KEY', true),
|
||||
}
|
||||
}
|
||||
container.bind<SQSClient>(TYPES.Auth_SQS).toConstantValue(new SQSClient(sqsConfig))
|
||||
let sqsClient = new SQSClient(sqsConfig)
|
||||
if (isConfiguredForAWSProduction && this.mode === 'server') {
|
||||
sqsClient = captureAWSv3Client(sqsClient)
|
||||
}
|
||||
container.bind<SQSClient>(TYPES.Auth_SQS).toConstantValue(sqsClient)
|
||||
}
|
||||
|
||||
// Mapping
|
||||
@@ -498,20 +510,6 @@ export class ContainerConfigLoader {
|
||||
),
|
||||
)
|
||||
|
||||
// Middleware
|
||||
container.bind<SessionMiddleware>(TYPES.Auth_SessionMiddleware).to(SessionMiddleware)
|
||||
container.bind<LockMiddleware>(TYPES.Auth_LockMiddleware).to(LockMiddleware)
|
||||
container
|
||||
.bind<RequiredCrossServiceTokenMiddleware>(TYPES.Auth_RequiredCrossServiceTokenMiddleware)
|
||||
.to(RequiredCrossServiceTokenMiddleware)
|
||||
container
|
||||
.bind<OptionalCrossServiceTokenMiddleware>(TYPES.Auth_OptionalCrossServiceTokenMiddleware)
|
||||
.to(OptionalCrossServiceTokenMiddleware)
|
||||
container
|
||||
.bind<ApiGatewayOfflineAuthMiddleware>(TYPES.Auth_ApiGatewayOfflineAuthMiddleware)
|
||||
.to(ApiGatewayOfflineAuthMiddleware)
|
||||
container.bind<OfflineUserAuthMiddleware>(TYPES.Auth_OfflineUserAuthMiddleware).to(OfflineUserAuthMiddleware)
|
||||
|
||||
// Projectors
|
||||
container.bind<SessionProjector>(TYPES.Auth_SessionProjector).to(SessionProjector)
|
||||
container.bind<UserProjector>(TYPES.Auth_UserProjector).to(UserProjector)
|
||||
@@ -740,6 +738,32 @@ export class ContainerConfigLoader {
|
||||
: new SNSDomainEventPublisher(container.get(TYPES.Auth_SNS), container.get(TYPES.Auth_SNS_TOPIC_ARN)),
|
||||
)
|
||||
|
||||
// Middleware
|
||||
container.bind<SessionMiddleware>(TYPES.Auth_SessionMiddleware).to(SessionMiddleware)
|
||||
container.bind<LockMiddleware>(TYPES.Auth_LockMiddleware).to(LockMiddleware)
|
||||
container
|
||||
.bind<RequiredCrossServiceTokenMiddleware>(TYPES.Auth_RequiredCrossServiceTokenMiddleware)
|
||||
.toConstantValue(
|
||||
new RequiredCrossServiceTokenMiddleware(
|
||||
container.get<TokenDecoderInterface<CrossServiceTokenData>>(TYPES.Auth_CrossServiceTokenDecoder),
|
||||
isConfiguredForAWSProduction && this.mode === 'server',
|
||||
container.get<winston.Logger>(TYPES.Auth_Logger),
|
||||
),
|
||||
)
|
||||
container
|
||||
.bind<OptionalCrossServiceTokenMiddleware>(TYPES.Auth_OptionalCrossServiceTokenMiddleware)
|
||||
.toConstantValue(
|
||||
new OptionalCrossServiceTokenMiddleware(
|
||||
container.get<TokenDecoderInterface<CrossServiceTokenData>>(TYPES.Auth_CrossServiceTokenDecoder),
|
||||
isConfiguredForAWSProduction && this.mode === 'server',
|
||||
container.get<winston.Logger>(TYPES.Auth_Logger),
|
||||
),
|
||||
)
|
||||
container
|
||||
.bind<ApiGatewayOfflineAuthMiddleware>(TYPES.Auth_ApiGatewayOfflineAuthMiddleware)
|
||||
.to(ApiGatewayOfflineAuthMiddleware)
|
||||
container.bind<OfflineUserAuthMiddleware>(TYPES.Auth_OfflineUserAuthMiddleware).to(OfflineUserAuthMiddleware)
|
||||
|
||||
// use cases
|
||||
container
|
||||
.bind<TraceSession>(TYPES.Auth_TraceSession)
|
||||
@@ -965,6 +989,15 @@ export class ContainerConfigLoader {
|
||||
container.get<TimerInterface>(TYPES.Auth_Timer),
|
||||
),
|
||||
)
|
||||
container
|
||||
.bind<DisableEmailSettingBasedOnEmailSubscription>(TYPES.Auth_DisableEmailSettingBasedOnEmailSubscription)
|
||||
.toConstantValue(
|
||||
new DisableEmailSettingBasedOnEmailSubscription(
|
||||
container.get<UserRepositoryInterface>(TYPES.Auth_UserRepository),
|
||||
container.get<SettingRepositoryInterface>(TYPES.Auth_SettingRepository),
|
||||
container.get<SettingFactoryInterface>(TYPES.Auth_SettingFactory),
|
||||
),
|
||||
)
|
||||
|
||||
// Controller
|
||||
container
|
||||
@@ -1102,8 +1135,10 @@ export class ContainerConfigLoader {
|
||||
.bind<EmailSubscriptionUnsubscribedEventHandler>(TYPES.Auth_EmailSubscriptionUnsubscribedEventHandler)
|
||||
.toConstantValue(
|
||||
new EmailSubscriptionUnsubscribedEventHandler(
|
||||
container.get(TYPES.Auth_UserRepository),
|
||||
container.get(TYPES.Auth_SettingService),
|
||||
container.get<DisableEmailSettingBasedOnEmailSubscription>(
|
||||
TYPES.Auth_DisableEmailSettingBasedOnEmailSubscription,
|
||||
),
|
||||
container.get<winston.Logger>(TYPES.Auth_Logger),
|
||||
),
|
||||
)
|
||||
container
|
||||
@@ -1201,8 +1236,8 @@ export class ContainerConfigLoader {
|
||||
container
|
||||
.bind<DomainEventMessageHandlerInterface>(TYPES.Auth_DomainEventMessageHandler)
|
||||
.toConstantValue(
|
||||
env.get('NEW_RELIC_ENABLED', true) === 'true'
|
||||
? new SQSNewRelicEventMessageHandler(eventHandlers, container.get(TYPES.Auth_Logger))
|
||||
isConfiguredForAWSProduction
|
||||
? new SQSXRayEventMessageHandler(eventHandlers, container.get(TYPES.Auth_Logger))
|
||||
: new SQSEventMessageHandler(eventHandlers, container.get(TYPES.Auth_Logger)),
|
||||
)
|
||||
|
||||
|
||||
@@ -161,6 +161,7 @@ const TYPES = {
|
||||
Auth_AddSharedVaultUser: Symbol.for('Auth_AddSharedVaultUser'),
|
||||
Auth_RemoveSharedVaultUser: Symbol.for('Auth_RemoveSharedVaultUser'),
|
||||
Auth_DesignateSurvivor: Symbol.for('Auth_DesignateSurvivor'),
|
||||
Auth_DisableEmailSettingBasedOnEmailSubscription: Symbol.for('Auth_DisableEmailSettingBasedOnEmailSubscription'),
|
||||
// Handlers
|
||||
Auth_UserRegisteredEventHandler: Symbol.for('Auth_UserRegisteredEventHandler'),
|
||||
Auth_AccountDeletionRequestedEventHandler: Symbol.for('Auth_AccountDeletionRequestedEventHandler'),
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
import { EmailLevel } from '@standardnotes/domain-core'
|
||||
import { EmailSubscriptionUnsubscribedEvent } from '@standardnotes/domain-events'
|
||||
|
||||
import { SettingServiceInterface } from '../Setting/SettingServiceInterface'
|
||||
import { User } from '../User/User'
|
||||
import { UserRepositoryInterface } from '../User/UserRepositoryInterface'
|
||||
import { EmailSubscriptionUnsubscribedEventHandler } from './EmailSubscriptionUnsubscribedEventHandler'
|
||||
|
||||
describe('EmailSubscriptionUnsubscribedEventHandler', () => {
|
||||
let userRepository: UserRepositoryInterface
|
||||
let settingsService: SettingServiceInterface
|
||||
let event: EmailSubscriptionUnsubscribedEvent
|
||||
|
||||
const createHandler = () => new EmailSubscriptionUnsubscribedEventHandler(userRepository, settingsService)
|
||||
|
||||
beforeEach(() => {
|
||||
userRepository = {} as jest.Mocked<UserRepositoryInterface>
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockReturnValue({} as jest.Mocked<User>)
|
||||
|
||||
settingsService = {} as jest.Mocked<SettingServiceInterface>
|
||||
settingsService.createOrReplace = jest.fn()
|
||||
|
||||
event = {
|
||||
payload: {
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
},
|
||||
} as jest.Mocked<EmailSubscriptionUnsubscribedEvent>
|
||||
})
|
||||
|
||||
it('should not do anything if username is invalid', async () => {
|
||||
event.payload.userEmail = ''
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not do anything if user is not found', async () => {
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockReturnValue(null)
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should update user marketing email settings', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_MARKETING_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should update user sign in email settings', async () => {
|
||||
event.payload.level = EmailLevel.LEVELS.SignIn
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_SIGN_IN_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should update user email backup email settings', async () => {
|
||||
event.payload.level = EmailLevel.LEVELS.FailedEmailBackup
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_FAILED_BACKUPS_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should update user email backup email settings', async () => {
|
||||
event.payload.level = EmailLevel.LEVELS.FailedCloudBackup
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_FAILED_CLOUD_BACKUPS_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw error for unrecognized level', async () => {
|
||||
event.payload.level = 'foobar'
|
||||
|
||||
let caughtError = null
|
||||
try {
|
||||
await createHandler().handle(event)
|
||||
} catch (error) {
|
||||
caughtError = error
|
||||
}
|
||||
|
||||
expect(caughtError).not.toBeNull()
|
||||
})
|
||||
})
|
||||
@@ -1,50 +1,21 @@
|
||||
import { EmailLevel, Username } from '@standardnotes/domain-core'
|
||||
import { DomainEventHandlerInterface, EmailSubscriptionUnsubscribedEvent } from '@standardnotes/domain-events'
|
||||
import { SettingName } from '@standardnotes/settings'
|
||||
|
||||
import { SettingServiceInterface } from '../Setting/SettingServiceInterface'
|
||||
import { UserRepositoryInterface } from '../User/UserRepositoryInterface'
|
||||
import { Logger } from 'winston'
|
||||
import { DisableEmailSettingBasedOnEmailSubscription } from '../UseCase/DisableEmailSettingBasedOnEmailSubscription/DisableEmailSettingBasedOnEmailSubscription'
|
||||
|
||||
export class EmailSubscriptionUnsubscribedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
private userRepository: UserRepositoryInterface,
|
||||
private settingsService: SettingServiceInterface,
|
||||
private disableEmailSettingBasedOnEmailSubscription: DisableEmailSettingBasedOnEmailSubscription,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: EmailSubscriptionUnsubscribedEvent): Promise<void> {
|
||||
const usernameOrError = Username.create(event.payload.userEmail)
|
||||
if (usernameOrError.isFailed()) {
|
||||
return
|
||||
}
|
||||
const username = usernameOrError.getValue()
|
||||
|
||||
const user = await this.userRepository.findOneByUsernameOrEmail(username)
|
||||
if (user === null) {
|
||||
return
|
||||
}
|
||||
|
||||
await this.settingsService.createOrReplace({
|
||||
user,
|
||||
props: {
|
||||
name: this.getSettingNameFromLevel(event.payload.level),
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
const result = await this.disableEmailSettingBasedOnEmailSubscription.execute({
|
||||
userEmail: event.payload.userEmail,
|
||||
level: event.payload.level,
|
||||
})
|
||||
}
|
||||
|
||||
private getSettingNameFromLevel(level: string): string {
|
||||
switch (level) {
|
||||
case EmailLevel.LEVELS.FailedCloudBackup:
|
||||
return SettingName.NAMES.MuteFailedCloudBackupsEmails
|
||||
case EmailLevel.LEVELS.FailedEmailBackup:
|
||||
return SettingName.NAMES.MuteFailedBackupsEmails
|
||||
case EmailLevel.LEVELS.Marketing:
|
||||
return SettingName.NAMES.MuteMarketingEmails
|
||||
case EmailLevel.LEVELS.SignIn:
|
||||
return SettingName.NAMES.MuteSignInEmails
|
||||
default:
|
||||
throw new Error(`Unknown level: ${level}`)
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(`Failed to disable email setting for user ${event.payload.userEmail}: ${result.getError()}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
import { EmailLevel } from '@standardnotes/domain-core'
|
||||
import { Setting } from '../../Setting/Setting'
|
||||
import { SettingFactoryInterface } from '../../Setting/SettingFactoryInterface'
|
||||
import { SettingRepositoryInterface } from '../../Setting/SettingRepositoryInterface'
|
||||
import { User } from '../../User/User'
|
||||
import { UserRepositoryInterface } from '../../User/UserRepositoryInterface'
|
||||
import { DisableEmailSettingBasedOnEmailSubscription } from './DisableEmailSettingBasedOnEmailSubscription'
|
||||
|
||||
describe('DisableEmailSettingBasedOnEmailSubscription', () => {
|
||||
let userRepository: UserRepositoryInterface
|
||||
let settingRepository: SettingRepositoryInterface
|
||||
let factory: SettingFactoryInterface
|
||||
let user: User
|
||||
|
||||
const createUseCase = () =>
|
||||
new DisableEmailSettingBasedOnEmailSubscription(userRepository, settingRepository, factory)
|
||||
|
||||
beforeEach(() => {
|
||||
user = {} as jest.Mocked<User>
|
||||
user.uuid = 'userUuid'
|
||||
|
||||
userRepository = {} as jest.Mocked<UserRepositoryInterface>
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockResolvedValue(user)
|
||||
|
||||
settingRepository = {} as jest.Mocked<SettingRepositoryInterface>
|
||||
settingRepository.findLastByNameAndUserUuid = jest.fn().mockResolvedValue({} as jest.Mocked<Setting>)
|
||||
settingRepository.save = jest.fn()
|
||||
|
||||
factory = {} as jest.Mocked<SettingFactoryInterface>
|
||||
factory.create = jest.fn().mockResolvedValue({} as jest.Mocked<Setting>)
|
||||
factory.createReplacement = jest.fn().mockResolvedValue({} as jest.Mocked<Setting>)
|
||||
})
|
||||
|
||||
it('should fail if the username is empty', async () => {
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: '',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should fail if the user is not found', async () => {
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockResolvedValue(null)
|
||||
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should fail if the setting name cannot be determined', async () => {
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: 'invalid',
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should create a new setting if it does not exist', async () => {
|
||||
settingRepository.findLastByNameAndUserUuid = jest.fn().mockResolvedValue(null)
|
||||
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeFalsy()
|
||||
expect(factory.create).toHaveBeenCalled()
|
||||
expect(factory.createReplacement).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should replace the setting if it exists', async () => {
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeFalsy()
|
||||
expect(factory.create).not.toHaveBeenCalled()
|
||||
expect(factory.createReplacement).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,72 @@
|
||||
import { EmailLevel, Result, UseCaseInterface, Username } from '@standardnotes/domain-core'
|
||||
import { SettingName } from '@standardnotes/settings'
|
||||
|
||||
import { DisableEmailSettingBasedOnEmailSubscriptionDTO } from './DisableEmailSettingBasedOnEmailSubscriptionDTO'
|
||||
import { UserRepositoryInterface } from '../../User/UserRepositoryInterface'
|
||||
import { SettingRepositoryInterface } from '../../Setting/SettingRepositoryInterface'
|
||||
import { SettingFactoryInterface } from '../../Setting/SettingFactoryInterface'
|
||||
|
||||
export class DisableEmailSettingBasedOnEmailSubscription implements UseCaseInterface<void> {
|
||||
constructor(
|
||||
private userRepository: UserRepositoryInterface,
|
||||
private settingRepository: SettingRepositoryInterface,
|
||||
private factory: SettingFactoryInterface,
|
||||
) {}
|
||||
|
||||
async execute(dto: DisableEmailSettingBasedOnEmailSubscriptionDTO): Promise<Result<void>> {
|
||||
const usernameOrError = Username.create(dto.userEmail)
|
||||
if (usernameOrError.isFailed()) {
|
||||
return Result.fail(usernameOrError.getError())
|
||||
}
|
||||
const username = usernameOrError.getValue()
|
||||
|
||||
const user = await this.userRepository.findOneByUsernameOrEmail(username)
|
||||
if (user === null) {
|
||||
return Result.fail(`User not found for email ${dto.userEmail}`)
|
||||
}
|
||||
|
||||
const settingNameOrError = this.getSettingNameFromLevel(dto.level)
|
||||
if (settingNameOrError.isFailed()) {
|
||||
return Result.fail(settingNameOrError.getError())
|
||||
}
|
||||
const settingName = settingNameOrError.getValue()
|
||||
|
||||
let setting = await this.settingRepository.findLastByNameAndUserUuid(settingName, user.uuid)
|
||||
if (!setting) {
|
||||
setting = await this.factory.create(
|
||||
{
|
||||
name: settingName,
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
user,
|
||||
)
|
||||
} else {
|
||||
setting = await this.factory.createReplacement(setting, {
|
||||
name: settingName,
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
})
|
||||
}
|
||||
|
||||
await this.settingRepository.save(setting)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
private getSettingNameFromLevel(level: string): Result<string> {
|
||||
/* istanbul ignore next */
|
||||
switch (level) {
|
||||
case EmailLevel.LEVELS.FailedCloudBackup:
|
||||
return Result.ok(SettingName.NAMES.MuteFailedCloudBackupsEmails)
|
||||
case EmailLevel.LEVELS.FailedEmailBackup:
|
||||
return Result.ok(SettingName.NAMES.MuteFailedBackupsEmails)
|
||||
case EmailLevel.LEVELS.Marketing:
|
||||
return Result.ok(SettingName.NAMES.MuteMarketingEmails)
|
||||
case EmailLevel.LEVELS.SignIn:
|
||||
return Result.ok(SettingName.NAMES.MuteSignInEmails)
|
||||
default:
|
||||
return Result.fail(`Unknown level: ${level}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
export interface DisableEmailSettingBasedOnEmailSubscriptionDTO {
|
||||
userEmail: string
|
||||
level: string
|
||||
}
|
||||
@@ -2,10 +2,12 @@ import { CrossServiceTokenData, TokenDecoderInterface } from '@standardnotes/sec
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { BaseMiddleware } from 'inversify-express-utils'
|
||||
import { Logger } from 'winston'
|
||||
import { Segment, getSegment } from 'aws-xray-sdk'
|
||||
|
||||
export abstract class ApiGatewayAuthMiddleware extends BaseMiddleware {
|
||||
constructor(
|
||||
private tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
private isConfiguredForAWSProduction: boolean,
|
||||
private logger: Logger,
|
||||
) {
|
||||
super()
|
||||
@@ -39,6 +41,13 @@ export abstract class ApiGatewayAuthMiddleware extends BaseMiddleware {
|
||||
response.locals.session = token.session
|
||||
response.locals.readOnlyAccess = token.session?.readonly_access ?? false
|
||||
|
||||
if (this.isConfiguredForAWSProduction) {
|
||||
const segment = getSegment()
|
||||
if (segment instanceof Segment) {
|
||||
segment.setUser(token.user.uuid)
|
||||
}
|
||||
}
|
||||
|
||||
return next()
|
||||
} catch (error) {
|
||||
return next(error)
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import { CrossServiceTokenData, TokenDecoderInterface } from '@standardnotes/security'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../../Bootstrap/Types'
|
||||
import { ApiGatewayAuthMiddleware } from './ApiGatewayAuthMiddleware'
|
||||
|
||||
@injectable()
|
||||
export class OptionalCrossServiceTokenMiddleware extends ApiGatewayAuthMiddleware {
|
||||
constructor(
|
||||
@inject(TYPES.Auth_CrossServiceTokenDecoder) tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
@inject(TYPES.Auth_Logger) logger: Logger,
|
||||
tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
isConfiguredForAWSProduction: boolean,
|
||||
logger: Logger,
|
||||
) {
|
||||
super(tokenDecoder, logger)
|
||||
super(tokenDecoder, isConfiguredForAWSProduction, logger)
|
||||
}
|
||||
|
||||
protected override handleMissingToken(request: Request, _response: Response, next: NextFunction): boolean {
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import { CrossServiceTokenData, TokenDecoderInterface } from '@standardnotes/security'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../../Bootstrap/Types'
|
||||
import { ApiGatewayAuthMiddleware } from './ApiGatewayAuthMiddleware'
|
||||
|
||||
@injectable()
|
||||
export class RequiredCrossServiceTokenMiddleware extends ApiGatewayAuthMiddleware {
|
||||
constructor(
|
||||
@inject(TYPES.Auth_CrossServiceTokenDecoder) tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
@inject(TYPES.Auth_Logger) logger: Logger,
|
||||
tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
isConfiguredForAWSProduction: boolean,
|
||||
logger: Logger,
|
||||
) {
|
||||
super(tokenDecoder, logger)
|
||||
super(tokenDecoder, isConfiguredForAWSProduction, logger)
|
||||
}
|
||||
|
||||
protected override handleMissingToken(request: Request, response: Response, _next: NextFunction): boolean {
|
||||
|
||||
@@ -19,8 +19,8 @@ export class RedisTransitionStatusRepository implements TransitionStatusReposito
|
||||
await this.redisClient.set(`${this.PREFIX}:${transitionType}:${userUuid}`, status.value)
|
||||
break
|
||||
case TransitionStatus.STATUSES.InProgress: {
|
||||
const ttl2Hourse = 7_200
|
||||
await this.redisClient.setex(`${this.PREFIX}:${transitionType}:${userUuid}`, ttl2Hourse, status.value)
|
||||
const ttl24Hours = 86_400
|
||||
await this.redisClient.setex(`${this.PREFIX}:${transitionType}:${userUuid}`, ttl24Hours, status.value)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,12 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
# [1.14.0](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.13.1...@standardnotes/domain-events-infra@1.14.0) (2023-10-03)
|
||||
|
||||
### Features
|
||||
|
||||
* add xray segment tracing on auth-worker ([b1b244a](https://github.com/standardnotes/server/commit/b1b244a2cf1e17ddf67fc9b238b4b25a1bc5a190))
|
||||
|
||||
## [1.13.1](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.13.0...@standardnotes/domain-events-infra@1.13.1) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/domain-events-infra
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/domain-events-infra",
|
||||
"version": "1.13.1",
|
||||
"version": "1.14.0",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
@@ -27,6 +27,7 @@
|
||||
"@aws-sdk/client-sns": "^3.332.0",
|
||||
"@aws-sdk/client-sqs": "^3.332.0",
|
||||
"@standardnotes/domain-events": "workspace:*",
|
||||
"aws-xray-sdk": "^3.5.2",
|
||||
"ioredis": "^5.2.4",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"sqs-consumer": "^6.2.1",
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
import { Logger } from 'winston'
|
||||
import * as zlib from 'zlib'
|
||||
import { Segment, Subsegment, captureAsyncFunc } from 'aws-xray-sdk'
|
||||
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventInterface,
|
||||
DomainEventMessageHandlerInterface,
|
||||
} from '@standardnotes/domain-events'
|
||||
|
||||
export class SQSXRayEventMessageHandler implements DomainEventMessageHandlerInterface {
|
||||
constructor(
|
||||
private handlers: Map<string, DomainEventHandlerInterface>,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handleMessage(message: string): Promise<void> {
|
||||
const messageParsed = JSON.parse(message)
|
||||
|
||||
const domainEventJson = zlib.unzipSync(Buffer.from(messageParsed.Message, 'base64')).toString()
|
||||
|
||||
const domainEvent: DomainEventInterface = JSON.parse(domainEventJson)
|
||||
|
||||
domainEvent.createdAt = new Date(domainEvent.createdAt)
|
||||
|
||||
const handler = this.handlers.get(domainEvent.type)
|
||||
if (!handler) {
|
||||
this.logger.debug(`Event handler for event type ${domainEvent.type} does not exist`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
this.logger.debug(`Received event: ${domainEvent.type}`)
|
||||
|
||||
const xRaySegment = new Segment(domainEvent.type)
|
||||
|
||||
if (domainEvent.meta.correlation.userIdentifierType === 'uuid') {
|
||||
xRaySegment.setUser(domainEvent.meta.correlation.userIdentifier)
|
||||
}
|
||||
|
||||
await captureAsyncFunc(
|
||||
`${handler.constructor.name}.handle}`,
|
||||
async (subsegment?: Subsegment) => {
|
||||
await handler.handle(domainEvent)
|
||||
|
||||
if (subsegment) {
|
||||
subsegment.close()
|
||||
}
|
||||
},
|
||||
xRaySegment,
|
||||
)
|
||||
|
||||
xRaySegment.close()
|
||||
xRaySegment.flush()
|
||||
}
|
||||
|
||||
async handleError(error: Error): Promise<void> {
|
||||
this.logger.error('Error occured while handling SQS message: %O', error)
|
||||
}
|
||||
}
|
||||
@@ -12,3 +12,4 @@ export * from './SQS/SQSNewRelicBounceNotificiationHandler'
|
||||
export * from './SQS/SQSDomainEventSubscriberFactory'
|
||||
export * from './SQS/SQSEventMessageHandler'
|
||||
export * from './SQS/SQSNewRelicEventMessageHandler'
|
||||
export * from './SQS/SQSXRayEventMessageHandler'
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.12.3](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.2...@standardnotes/event-store@1.12.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
## [1.12.2](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.1...@standardnotes/event-store@1.12.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/event-store",
|
||||
"version": "1.12.2",
|
||||
"version": "1.12.3",
|
||||
"description": "Event Store Service",
|
||||
"private": true,
|
||||
"main": "dist/src/index.js",
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.25.1](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.25.0...@standardnotes/files-server@1.25.1) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/files-server
|
||||
|
||||
# [1.25.0](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.24.1...@standardnotes/files-server@1.25.0) (2023-09-28)
|
||||
|
||||
### Features
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/files-server",
|
||||
"version": "1.25.0",
|
||||
"version": "1.25.1",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,30 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.16.15](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.14...@standardnotes/home-server@1.16.15) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.14](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.13...@standardnotes/home-server@1.16.14) (2023-10-02)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.13](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.12...@standardnotes/home-server@1.16.13) (2023-10-02)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.12](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.11...@standardnotes/home-server@1.16.12) (2023-09-29)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.11](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.10...@standardnotes/home-server@1.16.11) (2023-09-29)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.10](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.9...@standardnotes/home-server@1.16.10) (2023-09-29)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.9](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.8...@standardnotes/home-server@1.16.9) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/home-server",
|
||||
"version": "1.16.9",
|
||||
"version": "1.16.15",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,28 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.38.6](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.5...@standardnotes/revisions-server@1.38.6) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
## [1.38.5](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.4...@standardnotes/revisions-server@1.38.5) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* temproarily disable transitions to empty overpopulated queues ([cd893b4](https://github.com/standardnotes/server/commit/cd893b41d7371bdc32acc111f7cea797ec33cad5))
|
||||
|
||||
## [1.38.4](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.3...@standardnotes/revisions-server@1.38.4) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* keep transition in-progress status alive ([032cde7](https://github.com/standardnotes/server/commit/032cde77233076814b1de5791850b4ee4c8dc1f4))
|
||||
|
||||
## [1.38.3](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.2...@standardnotes/revisions-server@1.38.3) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add paging memory to integrity check ([e4ca310](https://github.com/standardnotes/server/commit/e4ca310707b12b1c08073a391e8857ee52acd92b))
|
||||
|
||||
## [1.38.2](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.1...@standardnotes/revisions-server@1.38.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/revisions-server",
|
||||
"version": "1.38.2",
|
||||
"version": "1.38.6",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -390,6 +390,8 @@ export class ContainerConfigLoader {
|
||||
container.get<TimerInterface>(TYPES.Revisions_Timer),
|
||||
container.get<winston.Logger>(TYPES.Revisions_Logger),
|
||||
env.get('MIGRATION_BATCH_SIZE', true) ? +env.get('MIGRATION_BATCH_SIZE', true) : 100,
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Revisions_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Revisions_DomainEventFactory),
|
||||
),
|
||||
)
|
||||
container
|
||||
@@ -470,12 +472,10 @@ export class ContainerConfigLoader {
|
||||
.bind<TransitionRequestedEventHandler>(TYPES.Revisions_TransitionRequestedEventHandler)
|
||||
.toConstantValue(
|
||||
new TransitionRequestedEventHandler(
|
||||
true,
|
||||
container.get<TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser>(
|
||||
TYPES.Revisions_TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser,
|
||||
),
|
||||
container.get<RevisionRepositoryInterface>(TYPES.Revisions_SQLRevisionRepository),
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Revisions_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Revisions_DomainEventFactory),
|
||||
container.get<winston.Logger>(TYPES.Revisions_Logger),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1,116 +1,30 @@
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventPublisherInterface,
|
||||
TransitionRequestedEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { DomainEventHandlerInterface, TransitionRequestedEvent } from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser } from '../UseCase/Transition/TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser/TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { RevisionRepositoryInterface } from '../Revision/RevisionRepositoryInterface'
|
||||
import { TransitionStatus, Uuid } from '@standardnotes/domain-core'
|
||||
|
||||
export class TransitionRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
private disabled: boolean,
|
||||
private transitionRevisionsFromPrimaryToSecondaryDatabaseForUser: TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser,
|
||||
private primaryRevisionsRepository: RevisionRepositoryInterface,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: TransitionRequestedEvent): Promise<void> {
|
||||
if (this.disabled) {
|
||||
return
|
||||
}
|
||||
|
||||
if (event.payload.type !== 'revisions') {
|
||||
return
|
||||
}
|
||||
|
||||
const userUuid = await this.getUserUuidFromEvent(event)
|
||||
if (!userUuid) {
|
||||
return
|
||||
}
|
||||
|
||||
if (await this.isAlreadyMigrated(userUuid)) {
|
||||
this.logger.info(`[${event.payload.userUuid}] User already migrated.`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Verified,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
this.logger.info(`[${event.payload.userUuid}] Handling transition requested event`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.InProgress,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
const result = await this.transitionRevisionsFromPrimaryToSecondaryDatabaseForUser.execute({
|
||||
userUuid: event.payload.userUuid,
|
||||
timestamp: event.payload.timestamp,
|
||||
})
|
||||
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(`[${event.payload.userUuid}] Failed to transition: ${result.getError()}`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Failed,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Verified,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
private async isAlreadyMigrated(userUuid: Uuid): Promise<boolean> {
|
||||
const totalRevisionsCountForUserInPrimary = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
|
||||
if (totalRevisionsCountForUserInPrimary > 0) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] User has ${totalRevisionsCountForUserInPrimary} revisions in primary database.`,
|
||||
)
|
||||
}
|
||||
|
||||
return totalRevisionsCountForUserInPrimary === 0
|
||||
}
|
||||
|
||||
private async getUserUuidFromEvent(event: TransitionRequestedEvent): Promise<Uuid | null> {
|
||||
const userUuidOrError = Uuid.create(event.payload.userUuid)
|
||||
if (userUuidOrError.isFailed()) {
|
||||
this.logger.error(`[${event.payload.userUuid}] Failed to transition revisions: ${userUuidOrError.getError()}`)
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Failed,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
return userUuidOrError.getValue()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
export interface TransitionRepositoryInterface {
|
||||
getPagingProgress(userUuid: string): Promise<number>
|
||||
setPagingProgress(userUuid: string, progress: number): Promise<void>
|
||||
getIntegrityProgress(userUuid: string): Promise<number>
|
||||
setIntegrityProgress(userUuid: string, progress: number): Promise<void>
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
/* istanbul ignore file */
|
||||
import { Result, UseCaseInterface, Uuid } from '@standardnotes/domain-core'
|
||||
import { Result, TransitionStatus, UseCaseInterface, Uuid } from '@standardnotes/domain-core'
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import { TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO } from './TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO'
|
||||
import { RevisionRepositoryInterface } from '../../../Revision/RevisionRepositoryInterface'
|
||||
import { TransitionRepositoryInterface } from '../../../Transition/TransitionRepositoryInterface'
|
||||
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
|
||||
import { DomainEventFactoryInterface } from '../../../Event/DomainEventFactoryInterface'
|
||||
|
||||
export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements UseCaseInterface<void> {
|
||||
constructor(
|
||||
@@ -15,6 +17,8 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
private timer: TimerInterface,
|
||||
private logger: Logger,
|
||||
private pageSize: number,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
) {}
|
||||
|
||||
async execute(dto: TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO): Promise<Result<void>> {
|
||||
@@ -34,15 +38,26 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
}
|
||||
const userUuid = userUuidOrError.getValue()
|
||||
|
||||
if (await this.isAlreadyMigrated(userUuid)) {
|
||||
this.logger.info(`[${userUuid.value}] User already migrated.`)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Verified, dto.timestamp)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.InProgress, dto.timestamp)
|
||||
|
||||
const migrationTimeStart = this.timer.getTimestampInMicroseconds()
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Migrating revisions`)
|
||||
|
||||
const migrationResult = await this.migrateRevisionsForUser(userUuid)
|
||||
const migrationResult = await this.migrateRevisionsForUser(userUuid, dto.timestamp)
|
||||
if (migrationResult.isFailed()) {
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
return Result.fail(migrationResult.getError())
|
||||
}
|
||||
const revisionsToSkipInIntegrityCheck = migrationResult.getValue()
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Revisions migrated`)
|
||||
|
||||
@@ -50,16 +65,20 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Checking integrity between primary and secondary database`)
|
||||
|
||||
const integrityCheckResult = await this.checkIntegrityBetweenPrimaryAndSecondaryDatabase(
|
||||
userUuid,
|
||||
revisionsToSkipInIntegrityCheck,
|
||||
)
|
||||
const integrityCheckResult = await this.checkIntegrityBetweenPrimaryAndSecondaryDatabase(userUuid)
|
||||
if (integrityCheckResult.isFailed()) {
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setPagingProgress(userUuid.value, 1)
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setIntegrityProgress(userUuid.value, 1)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
return Result.fail(integrityCheckResult.getError())
|
||||
}
|
||||
|
||||
const cleanupResult = await this.deleteRevisionsForUser(userUuid, this.primaryRevisionsRepository)
|
||||
if (cleanupResult.isFailed()) {
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
this.logger.error(`[${dto.userUuid}] Failed to clean up primary database revisions: ${cleanupResult.getError()}`)
|
||||
}
|
||||
|
||||
@@ -72,10 +91,12 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
`[${dto.userUuid}] Transitioned revisions in ${migrationDurationTimeStructure.hours}h ${migrationDurationTimeStructure.minutes}m ${migrationDurationTimeStructure.seconds}s ${migrationDurationTimeStructure.milliseconds}ms`,
|
||||
)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Verified, dto.timestamp)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
private async migrateRevisionsForUser(userUuid: Uuid): Promise<Result<string[]>> {
|
||||
private async migrateRevisionsForUser(userUuid: Uuid, timestamp: number): Promise<Result<void>> {
|
||||
try {
|
||||
const initialPage = await (this.transitionStatusRepository as TransitionRepositoryInterface).getPagingProgress(
|
||||
userUuid.value,
|
||||
@@ -85,8 +106,17 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
|
||||
const totalRevisionsCountForUser = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
const totalPages = Math.ceil(totalRevisionsCountForUser / this.pageSize)
|
||||
const revisionsToSkipInIntegrityCheck = []
|
||||
for (let currentPage = initialPage; currentPage <= totalPages; currentPage++) {
|
||||
const isPageInEvery10Percent = currentPage % Math.ceil(totalPages / 10) === 0
|
||||
if (isPageInEvery10Percent) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] Migrating revisions for user: ${Math.round(
|
||||
(currentPage / totalPages) * 100,
|
||||
)}% completed`,
|
||||
)
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.InProgress, timestamp)
|
||||
}
|
||||
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setPagingProgress(
|
||||
userUuid.value,
|
||||
currentPage,
|
||||
@@ -113,7 +143,6 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] Revision ${revision.id.toString()} is older than revision in secondary database`,
|
||||
)
|
||||
revisionsToSkipInIntegrityCheck.push(revision.id.toString())
|
||||
|
||||
continue
|
||||
}
|
||||
@@ -145,7 +174,7 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
}
|
||||
}
|
||||
|
||||
return Result.ok(revisionsToSkipInIntegrityCheck)
|
||||
return Result.ok()
|
||||
} catch (error) {
|
||||
return Result.fail(`Errored when migrating revisions for user ${userUuid.value}: ${(error as Error).message}`)
|
||||
}
|
||||
@@ -171,11 +200,14 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
await this.timer.sleep(twoSecondsInMilliseconds)
|
||||
}
|
||||
|
||||
private async checkIntegrityBetweenPrimaryAndSecondaryDatabase(
|
||||
userUuid: Uuid,
|
||||
revisionsToSkipInIntegrityCheck: string[],
|
||||
): Promise<Result<boolean>> {
|
||||
private async checkIntegrityBetweenPrimaryAndSecondaryDatabase(userUuid: Uuid): Promise<Result<boolean>> {
|
||||
try {
|
||||
const initialPage = await (this.transitionStatusRepository as TransitionRepositoryInterface).getIntegrityProgress(
|
||||
userUuid.value,
|
||||
)
|
||||
|
||||
this.logger.info(`[${userUuid.value}] Checking integrity from page ${initialPage}`)
|
||||
|
||||
const totalRevisionsCountForUserInPrimary = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
const totalRevisionsCountForUserInSecondary = await (
|
||||
this.secondRevisionsRepository as RevisionRepositoryInterface
|
||||
@@ -188,7 +220,12 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
}
|
||||
|
||||
const totalPages = Math.ceil(totalRevisionsCountForUserInPrimary / this.pageSize)
|
||||
for (let currentPage = 1; currentPage <= totalPages; currentPage++) {
|
||||
for (let currentPage = initialPage; currentPage <= totalPages; currentPage++) {
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setIntegrityProgress(
|
||||
userUuid.value,
|
||||
currentPage,
|
||||
)
|
||||
|
||||
const query = {
|
||||
userUuid: userUuid,
|
||||
offset: (currentPage - 1) * this.pageSize,
|
||||
@@ -212,17 +249,25 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
return Result.fail(`Revision ${revision.id.toString()} not found in secondary database`)
|
||||
}
|
||||
|
||||
if (revisionsToSkipInIntegrityCheck.includes(revision.id.toString())) {
|
||||
if (revision.isIdenticalTo(revisionInSecondary)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!revision.isIdenticalTo(revisionInSecondary)) {
|
||||
return Result.fail(
|
||||
`Revision ${revision.id.toString()} is not identical in primary and secondary database. Revision in primary database: ${JSON.stringify(
|
||||
revision,
|
||||
)}, revision in secondary database: ${JSON.stringify(revisionInSecondary)}`,
|
||||
if (revisionInSecondary.props.dates.updatedAt > revision.props.dates.updatedAt) {
|
||||
this.logger.info(
|
||||
`[${
|
||||
userUuid.value
|
||||
}] Integrity check of revision ${revision.id.toString()} - is older than revision in secondary database`,
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
return Result.fail(
|
||||
`Revision ${revision.id.toString()} is not identical in primary and secondary database. Revision in primary database: ${JSON.stringify(
|
||||
revision,
|
||||
)}, revision in secondary database: ${JSON.stringify(revisionInSecondary)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,4 +278,27 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private async updateTransitionStatus(userUuid: Uuid, status: string, timestamp: number): Promise<void> {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: userUuid.value,
|
||||
status,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: timestamp,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
private async isAlreadyMigrated(userUuid: Uuid): Promise<boolean> {
|
||||
const totalRevisionsCountForUserInPrimary = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
|
||||
if (totalRevisionsCountForUserInPrimary > 0) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] User has ${totalRevisionsCountForUserInPrimary} revisions in primary database.`,
|
||||
)
|
||||
}
|
||||
|
||||
return totalRevisionsCountForUserInPrimary === 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export interface TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO {
|
||||
userUuid: string
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
@@ -3,10 +3,25 @@ import * as IORedis from 'ioredis'
|
||||
import { TransitionRepositoryInterface } from '../../Domain/Transition/TransitionRepositoryInterface'
|
||||
|
||||
export class RedisTransitionRepository implements TransitionRepositoryInterface {
|
||||
private readonly PREFIX = 'transition-revisions-paging-progress'
|
||||
private readonly PREFIX = 'transition-revisions-migration-progress'
|
||||
private readonly INTEGRITY_PREFIX = 'transition-revisions-integrity-progress'
|
||||
|
||||
constructor(private redisClient: IORedis.Redis) {}
|
||||
|
||||
async getIntegrityProgress(userUuid: string): Promise<number> {
|
||||
const progress = await this.redisClient.get(`${this.INTEGRITY_PREFIX}:${userUuid}`)
|
||||
|
||||
if (progress === null) {
|
||||
return 1
|
||||
}
|
||||
|
||||
return parseInt(progress)
|
||||
}
|
||||
|
||||
async setIntegrityProgress(userUuid: string, progress: number): Promise<void> {
|
||||
await this.redisClient.setex(`${this.INTEGRITY_PREFIX}:${userUuid}`, 172_800, progress.toString())
|
||||
}
|
||||
|
||||
async getPagingProgress(userUuid: string): Promise<number> {
|
||||
const progress = await this.redisClient.get(`${this.PREFIX}:${userUuid}`)
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ export class SQLLegacyRevisionRepository implements RevisionRepositoryInterface
|
||||
const queryBuilder = this.ormRepository
|
||||
.createQueryBuilder('revision')
|
||||
.where('revision.user_uuid = :userUuid', { userUuid: dto.userUuid.value })
|
||||
.orderBy('revision.uuid', 'ASC')
|
||||
.orderBy('revision.created_at', 'ASC')
|
||||
|
||||
if (dto.offset !== undefined) {
|
||||
queryBuilder.skip(dto.offset)
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.21.3](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.2...@standardnotes/scheduler-server@1.21.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
## [1.21.2](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.1...@standardnotes/scheduler-server@1.21.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/scheduler-server",
|
||||
"version": "1.21.2",
|
||||
"version": "1.21.3",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,32 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.110.3](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.2...@standardnotes/syncing-server@1.110.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/syncing-server
|
||||
|
||||
## [1.110.2](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.1...@standardnotes/syncing-server@1.110.2) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* temproarily disable transitions to empty overpopulated queues ([cd893b4](https://github.com/standardnotes/syncing-server-js/commit/cd893b41d7371bdc32acc111f7cea797ec33cad5))
|
||||
|
||||
## [1.110.1](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.0...@standardnotes/syncing-server@1.110.1) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* keep transition in-progress status alive ([032cde7](https://github.com/standardnotes/syncing-server-js/commit/032cde77233076814b1de5791850b4ee4c8dc1f4))
|
||||
|
||||
# [1.110.0](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.109.2...@standardnotes/syncing-server@1.110.0) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add paging memory to integrity check ([e4ca310](https://github.com/standardnotes/syncing-server-js/commit/e4ca310707b12b1c08073a391e8857ee52acd92b))
|
||||
|
||||
### Features
|
||||
|
||||
* **syncing-server:** allow surviving only upon account deletion ([#857](https://github.com/standardnotes/syncing-server-js/issues/857)) ([609e85f](https://github.com/standardnotes/syncing-server-js/commit/609e85f926ebbc2887656c46df18471c68d70185))
|
||||
|
||||
## [1.109.2](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.109.1...@standardnotes/syncing-server@1.109.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/syncing-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/syncing-server",
|
||||
"version": "1.109.2",
|
||||
"version": "1.110.3",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -879,6 +879,8 @@ export class ContainerConfigLoader {
|
||||
container.get<TimerInterface>(TYPES.Sync_Timer),
|
||||
container.get<Logger>(TYPES.Sync_Logger),
|
||||
env.get('MIGRATION_BATCH_SIZE', true) ? +env.get('MIGRATION_BATCH_SIZE', true) : 100,
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Sync_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Sync_DomainEventFactory),
|
||||
),
|
||||
)
|
||||
container
|
||||
@@ -1049,12 +1051,10 @@ export class ContainerConfigLoader {
|
||||
.bind<TransitionRequestedEventHandler>(TYPES.Sync_TransitionRequestedEventHandler)
|
||||
.toConstantValue(
|
||||
new TransitionRequestedEventHandler(
|
||||
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
|
||||
true,
|
||||
container.get<TransitionItemsFromPrimaryToSecondaryDatabaseForUser>(
|
||||
TYPES.Sync_TransitionItemsFromPrimaryToSecondaryDatabaseForUser,
|
||||
),
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Sync_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Sync_DomainEventFactory),
|
||||
container.get<Logger>(TYPES.Sync_Logger),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -37,6 +37,7 @@ export class AccountDeletionRequestedEventHandler implements DomainEventHandlerI
|
||||
|
||||
const deletingVaultsResult = await this.deleteSharedVaults.execute({
|
||||
ownerUuid: event.payload.userUuid,
|
||||
allowSurviving: true,
|
||||
})
|
||||
if (deletingVaultsResult.isFailed()) {
|
||||
this.logger.error(
|
||||
|
||||
@@ -1,118 +1,31 @@
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventPublisherInterface,
|
||||
TransitionRequestedEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { DomainEventHandlerInterface, TransitionRequestedEvent } from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { TransitionStatus, Uuid } from '@standardnotes/domain-core'
|
||||
|
||||
import { TransitionItemsFromPrimaryToSecondaryDatabaseForUser } from '../UseCase/Transition/TransitionItemsFromPrimaryToSecondaryDatabaseForUser/TransitionItemsFromPrimaryToSecondaryDatabaseForUser'
|
||||
import { ItemRepositoryInterface } from '../Item/ItemRepositoryInterface'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
|
||||
export class TransitionRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
private primaryItemRepository: ItemRepositoryInterface,
|
||||
private disabled: boolean,
|
||||
private transitionItemsFromPrimaryToSecondaryDatabaseForUser: TransitionItemsFromPrimaryToSecondaryDatabaseForUser,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: TransitionRequestedEvent): Promise<void> {
|
||||
if (this.disabled) {
|
||||
return
|
||||
}
|
||||
|
||||
if (event.payload.type !== 'items') {
|
||||
return
|
||||
}
|
||||
|
||||
const userUuid = await this.getUserUuidFromEvent(event)
|
||||
if (!userUuid) {
|
||||
return
|
||||
}
|
||||
|
||||
if (await this.isAlreadyMigrated(userUuid)) {
|
||||
this.logger.info(`[${event.payload.userUuid}] User already migrated.`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Verified,
|
||||
transitionType: 'items',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
this.logger.info(`[${event.payload.userUuid}] Handling transition requested event`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.InProgress,
|
||||
transitionType: 'items',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
const result = await this.transitionItemsFromPrimaryToSecondaryDatabaseForUser.execute({
|
||||
userUuid: event.payload.userUuid,
|
||||
timestamp: event.payload.timestamp,
|
||||
})
|
||||
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(`[${event.payload.userUuid}] Failed to trigger transition: ${result.getError()}`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Failed,
|
||||
transitionType: 'items',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Verified,
|
||||
transitionType: 'items',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
private async isAlreadyMigrated(userUuid: Uuid): Promise<boolean> {
|
||||
const totalItemsCountForUserInPrimary = await this.primaryItemRepository.countAll({
|
||||
userUuid: userUuid.value,
|
||||
})
|
||||
|
||||
if (totalItemsCountForUserInPrimary > 0) {
|
||||
this.logger.info(`[${userUuid.value}] User has ${totalItemsCountForUserInPrimary} items in primary database.`)
|
||||
}
|
||||
|
||||
return totalItemsCountForUserInPrimary === 0
|
||||
}
|
||||
|
||||
private async getUserUuidFromEvent(event: TransitionRequestedEvent): Promise<Uuid | null> {
|
||||
const userUuidOrError = Uuid.create(event.payload.userUuid)
|
||||
if (userUuidOrError.isFailed()) {
|
||||
this.logger.error(`[${event.payload.userUuid}] Failed to transition items: ${userUuidOrError.getError()}`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Failed,
|
||||
transitionType: 'items',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
return userUuidOrError.getValue()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
export interface TransitionRepositoryInterface {
|
||||
getPagingProgress(userUuid: string): Promise<number>
|
||||
setPagingProgress(userUuid: string, progress: number): Promise<void>
|
||||
getIntegrityProgress(userUuid: string): Promise<number>
|
||||
setIntegrityProgress(userUuid: string, progress: number): Promise<void>
|
||||
}
|
||||
|
||||
@@ -96,6 +96,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeFalsy()
|
||||
@@ -111,6 +112,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
@@ -125,6 +127,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: 'invalid',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
@@ -139,6 +142,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: 'invalid',
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
@@ -159,6 +163,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
@@ -174,6 +179,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
@@ -188,6 +194,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
@@ -207,6 +214,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: true,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeFalsy()
|
||||
@@ -223,6 +231,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: true,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
@@ -239,6 +248,7 @@ describe('DeleteSharedVault', () => {
|
||||
const result = await useCase.execute({
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: true,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
|
||||
@@ -56,30 +56,32 @@ export class DeleteSharedVault implements UseCaseInterface<{ status: 'deleted' |
|
||||
}
|
||||
}
|
||||
|
||||
const sharedVaultDesignatedSurvivor =
|
||||
await this.sharedVaultUserRepository.findDesignatedSurvivorBySharedVaultUuid(sharedVaultUuid)
|
||||
if (sharedVaultDesignatedSurvivor) {
|
||||
const result = await this.transferSharedVault.execute({
|
||||
sharedVaultUid: sharedVaultUuid.value,
|
||||
fromUserUuid: originatorUuid.value,
|
||||
toUserUuid: sharedVaultDesignatedSurvivor.props.userUuid.value,
|
||||
})
|
||||
if (dto.allowSurviving) {
|
||||
const sharedVaultDesignatedSurvivor =
|
||||
await this.sharedVaultUserRepository.findDesignatedSurvivorBySharedVaultUuid(sharedVaultUuid)
|
||||
if (sharedVaultDesignatedSurvivor) {
|
||||
const result = await this.transferSharedVault.execute({
|
||||
sharedVaultUid: sharedVaultUuid.value,
|
||||
fromUserUuid: originatorUuid.value,
|
||||
toUserUuid: sharedVaultDesignatedSurvivor.props.userUuid.value,
|
||||
})
|
||||
|
||||
if (result.isFailed()) {
|
||||
return Result.fail(result.getError())
|
||||
if (result.isFailed()) {
|
||||
return Result.fail(result.getError())
|
||||
}
|
||||
|
||||
const removingOwnerFromSharedVaultResult = await this.removeUserFromSharedVault.execute({
|
||||
originatorUuid: originatorUuid.value,
|
||||
sharedVaultUuid: sharedVaultUuid.value,
|
||||
userUuid: originatorUuid.value,
|
||||
forceRemoveOwner: true,
|
||||
})
|
||||
if (removingOwnerFromSharedVaultResult.isFailed()) {
|
||||
return Result.fail(removingOwnerFromSharedVaultResult.getError())
|
||||
}
|
||||
|
||||
return Result.ok({ status: 'transitioned' })
|
||||
}
|
||||
|
||||
const removingOwnerFromSharedVaultResult = await this.removeUserFromSharedVault.execute({
|
||||
originatorUuid: originatorUuid.value,
|
||||
sharedVaultUuid: sharedVaultUuid.value,
|
||||
userUuid: originatorUuid.value,
|
||||
forceRemoveOwner: true,
|
||||
})
|
||||
if (removingOwnerFromSharedVaultResult.isFailed()) {
|
||||
return Result.fail(removingOwnerFromSharedVaultResult.getError())
|
||||
}
|
||||
|
||||
return Result.ok({ status: 'transitioned' })
|
||||
}
|
||||
|
||||
const sharedVaultUsers = await this.sharedVaultUserRepository.findBySharedVaultUuid(sharedVaultUuid)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export interface DeleteSharedVaultDTO {
|
||||
originatorUuid: string
|
||||
sharedVaultUuid: string
|
||||
allowSurviving: boolean
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ describe('DeleteSharedVaults', () => {
|
||||
|
||||
const result = await useCase.execute({
|
||||
ownerUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: true,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBe(false)
|
||||
@@ -39,6 +40,7 @@ describe('DeleteSharedVaults', () => {
|
||||
expect(deleteSharedVaultUseCase.execute).toHaveBeenCalledWith({
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: true,
|
||||
})
|
||||
})
|
||||
|
||||
@@ -48,6 +50,7 @@ describe('DeleteSharedVaults', () => {
|
||||
|
||||
const result = await useCase.execute({
|
||||
ownerUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: true,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBe(true)
|
||||
@@ -55,6 +58,7 @@ describe('DeleteSharedVaults', () => {
|
||||
expect(deleteSharedVaultUseCase.execute).toHaveBeenCalledWith({
|
||||
originatorUuid: '00000000-0000-0000-0000-000000000000',
|
||||
sharedVaultUuid: '00000000-0000-0000-0000-000000000000',
|
||||
allowSurviving: true,
|
||||
})
|
||||
})
|
||||
|
||||
@@ -63,6 +67,7 @@ describe('DeleteSharedVaults', () => {
|
||||
|
||||
const result = await useCase.execute({
|
||||
ownerUuid: 'invalid',
|
||||
allowSurviving: true,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
|
||||
@@ -24,6 +24,7 @@ export class DeleteSharedVaults implements UseCaseInterface<Map<Uuid, 'deleted'
|
||||
const result = await this.deleteSharedVaultUseCase.execute({
|
||||
originatorUuid: ownerUuid.value,
|
||||
sharedVaultUuid: sharedVault.id.toString(),
|
||||
allowSurviving: dto.allowSurviving,
|
||||
})
|
||||
if (result.isFailed()) {
|
||||
return Result.fail(result.getError())
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export interface DeleteSharedVaultsDTO {
|
||||
ownerUuid: string
|
||||
allowSurviving: boolean
|
||||
}
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
/* istanbul ignore file */
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
import { Result, UseCaseInterface, Uuid } from '@standardnotes/domain-core'
|
||||
import { Result, TransitionStatus, UseCaseInterface, Uuid } from '@standardnotes/domain-core'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import { TransitionItemsFromPrimaryToSecondaryDatabaseForUserDTO } from './TransitionItemsFromPrimaryToSecondaryDatabaseForUserDTO'
|
||||
import { ItemRepositoryInterface } from '../../../Item/ItemRepositoryInterface'
|
||||
import { ItemQuery } from '../../../Item/ItemQuery'
|
||||
import { TransitionRepositoryInterface } from '../../../Transition/TransitionRepositoryInterface'
|
||||
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
|
||||
import { DomainEventFactoryInterface } from '../../../Event/DomainEventFactoryInterface'
|
||||
|
||||
export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements UseCaseInterface<void> {
|
||||
constructor(
|
||||
@@ -16,6 +18,8 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
private timer: TimerInterface,
|
||||
private logger: Logger,
|
||||
private pageSize: number,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
) {}
|
||||
|
||||
async execute(dto: TransitionItemsFromPrimaryToSecondaryDatabaseForUserDTO): Promise<Result<void>> {
|
||||
@@ -35,15 +39,24 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
}
|
||||
const userUuid = userUuidOrError.getValue()
|
||||
|
||||
if (await this.isAlreadyMigrated(userUuid)) {
|
||||
this.logger.info(`[${userUuid.value}] User already migrated.`)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Verified, dto.timestamp)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
const migrationTimeStart = this.timer.getTimestampInMicroseconds()
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Migrating items`)
|
||||
|
||||
const migrationResult = await this.migrateItemsForUser(userUuid)
|
||||
const migrationResult = await this.migrateItemsForUser(userUuid, dto.timestamp)
|
||||
if (migrationResult.isFailed()) {
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
return Result.fail(migrationResult.getError())
|
||||
}
|
||||
const itemsToSkipInIntegrityCheck = migrationResult.getValue()
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Items migrated`)
|
||||
|
||||
@@ -51,16 +64,20 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Checking integrity between primary and secondary database`)
|
||||
|
||||
const integrityCheckResult = await this.checkIntegrityBetweenPrimaryAndSecondaryDatabase(
|
||||
userUuid,
|
||||
itemsToSkipInIntegrityCheck,
|
||||
)
|
||||
const integrityCheckResult = await this.checkIntegrityBetweenPrimaryAndSecondaryDatabase(userUuid)
|
||||
if (integrityCheckResult.isFailed()) {
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setPagingProgress(userUuid.value, 1)
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setIntegrityProgress(userUuid.value, 1)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
return Result.fail(integrityCheckResult.getError())
|
||||
}
|
||||
|
||||
const cleanupResult = await this.deleteItemsForUser(userUuid, this.primaryItemRepository)
|
||||
if (cleanupResult.isFailed()) {
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
this.logger.error(`[${dto.userUuid}] Failed to clean up primary database items: ${cleanupResult.getError()}`)
|
||||
}
|
||||
|
||||
@@ -73,6 +90,8 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
`[${dto.userUuid}] Transitioned items in ${migrationDurationTimeStructure.hours}h ${migrationDurationTimeStructure.minutes}m ${migrationDurationTimeStructure.seconds}s ${migrationDurationTimeStructure.milliseconds}ms`,
|
||||
)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Verified, dto.timestamp)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
@@ -81,7 +100,7 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
await this.timer.sleep(twoSecondsInMilliseconds)
|
||||
}
|
||||
|
||||
private async migrateItemsForUser(userUuid: Uuid): Promise<Result<string[]>> {
|
||||
private async migrateItemsForUser(userUuid: Uuid, timestamp: number): Promise<Result<void>> {
|
||||
try {
|
||||
const initialPage = await (this.transitionStatusRepository as TransitionRepositoryInterface).getPagingProgress(
|
||||
userUuid.value,
|
||||
@@ -91,8 +110,15 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
|
||||
const totalItemsCountForUser = await this.primaryItemRepository.countAll({ userUuid: userUuid.value })
|
||||
const totalPages = Math.ceil(totalItemsCountForUser / this.pageSize)
|
||||
const itemsToSkipInIntegrityCheck = []
|
||||
for (let currentPage = initialPage; currentPage <= totalPages; currentPage++) {
|
||||
const isPageInEvery10Percent = currentPage % Math.ceil(totalPages / 10) === 0
|
||||
if (isPageInEvery10Percent) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] Migrating items for user: ${Math.round((currentPage / totalPages) * 100)}% completed`,
|
||||
)
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.InProgress, timestamp)
|
||||
}
|
||||
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setPagingProgress(
|
||||
userUuid.value,
|
||||
currentPage,
|
||||
@@ -102,7 +128,7 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
userUuid: userUuid.value,
|
||||
offset: (currentPage - 1) * this.pageSize,
|
||||
limit: this.pageSize,
|
||||
sortBy: 'uuid',
|
||||
sortBy: 'created_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
}
|
||||
|
||||
@@ -120,7 +146,6 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
}
|
||||
if (itemInSecondary.props.timestamps.updatedAt > item.props.timestamps.updatedAt) {
|
||||
this.logger.info(`[${userUuid.value}] Item ${item.uuid.value} is older than item in secondary database`)
|
||||
itemsToSkipInIntegrityCheck.push(item.uuid.value)
|
||||
|
||||
continue
|
||||
}
|
||||
@@ -143,7 +168,7 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
}
|
||||
}
|
||||
|
||||
return Result.ok(itemsToSkipInIntegrityCheck)
|
||||
return Result.ok()
|
||||
} catch (error) {
|
||||
return Result.fail((error as Error).message)
|
||||
}
|
||||
@@ -161,11 +186,14 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
}
|
||||
}
|
||||
|
||||
private async checkIntegrityBetweenPrimaryAndSecondaryDatabase(
|
||||
userUuid: Uuid,
|
||||
itemsToSkipInIntegrityCheck: string[],
|
||||
): Promise<Result<boolean>> {
|
||||
private async checkIntegrityBetweenPrimaryAndSecondaryDatabase(userUuid: Uuid): Promise<Result<boolean>> {
|
||||
try {
|
||||
const initialPage = await (this.transitionStatusRepository as TransitionRepositoryInterface).getIntegrityProgress(
|
||||
userUuid.value,
|
||||
)
|
||||
|
||||
this.logger.info(`[${userUuid.value}] Checking integrity from page ${initialPage}`)
|
||||
|
||||
const totalItemsCountForUserInPrimary = await this.primaryItemRepository.countAll({ userUuid: userUuid.value })
|
||||
const totalItemsCountForUserInSecondary = await (
|
||||
this.secondaryItemRepository as ItemRepositoryInterface
|
||||
@@ -180,12 +208,17 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
}
|
||||
|
||||
const totalPages = Math.ceil(totalItemsCountForUserInPrimary / this.pageSize)
|
||||
for (let currentPage = 1; currentPage <= totalPages; currentPage++) {
|
||||
for (let currentPage = initialPage; currentPage <= totalPages; currentPage++) {
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setIntegrityProgress(
|
||||
userUuid.value,
|
||||
currentPage,
|
||||
)
|
||||
|
||||
const query: ItemQuery = {
|
||||
userUuid: userUuid.value,
|
||||
offset: (currentPage - 1) * this.pageSize,
|
||||
limit: this.pageSize,
|
||||
sortBy: 'uuid',
|
||||
sortBy: 'created_at_timestamp',
|
||||
sortOrder: 'ASC',
|
||||
}
|
||||
|
||||
@@ -197,19 +230,25 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
return Result.fail(`Item ${item.uuid.value} not found in secondary database`)
|
||||
}
|
||||
|
||||
if (itemsToSkipInIntegrityCheck.includes(item.id.toString())) {
|
||||
if (item.isIdenticalTo(itemInSecondary)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!item.isIdenticalTo(itemInSecondary)) {
|
||||
return Result.fail(
|
||||
`Item ${
|
||||
item.uuid.value
|
||||
} is not identical in primary and secondary database. Item in primary database: ${JSON.stringify(
|
||||
item,
|
||||
)}, item in secondary database: ${JSON.stringify(itemInSecondary)}`,
|
||||
if (itemInSecondary.props.timestamps.updatedAt > item.props.timestamps.updatedAt) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] Integrity check of Item ${item.uuid.value} - is older than item in secondary database`,
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
return Result.fail(
|
||||
`Item ${
|
||||
item.uuid.value
|
||||
} is not identical in primary and secondary database. Item in primary database: ${JSON.stringify(
|
||||
item,
|
||||
)}, item in secondary database: ${JSON.stringify(itemInSecondary)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,4 +257,27 @@ export class TransitionItemsFromPrimaryToSecondaryDatabaseForUser implements Use
|
||||
return Result.fail((error as Error).message)
|
||||
}
|
||||
}
|
||||
|
||||
private async updateTransitionStatus(userUuid: Uuid, status: string, timestamp: number): Promise<void> {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: userUuid.value,
|
||||
status,
|
||||
transitionType: 'items',
|
||||
transitionTimestamp: timestamp,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
private async isAlreadyMigrated(userUuid: Uuid): Promise<boolean> {
|
||||
const totalItemsCountForUserInPrimary = await this.primaryItemRepository.countAll({
|
||||
userUuid: userUuid.value,
|
||||
})
|
||||
|
||||
if (totalItemsCountForUserInPrimary > 0) {
|
||||
this.logger.info(`[${userUuid.value}] User has ${totalItemsCountForUserInPrimary} items in primary database.`)
|
||||
}
|
||||
|
||||
return totalItemsCountForUserInPrimary === 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export interface TransitionItemsFromPrimaryToSecondaryDatabaseForUserDTO {
|
||||
userUuid: string
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
@@ -92,6 +92,7 @@ export class BaseSharedVaultsController extends BaseHttpController {
|
||||
const result = await this.deleteSharedVaultUseCase.execute({
|
||||
sharedVaultUuid: request.params.sharedVaultUuid,
|
||||
originatorUuid: response.locals.user.uuid,
|
||||
allowSurviving: false,
|
||||
})
|
||||
|
||||
if (result.isFailed()) {
|
||||
|
||||
@@ -3,10 +3,25 @@ import * as IORedis from 'ioredis'
|
||||
import { TransitionRepositoryInterface } from '../../Domain/Transition/TransitionRepositoryInterface'
|
||||
|
||||
export class RedisTransitionRepository implements TransitionRepositoryInterface {
|
||||
private readonly PREFIX = 'transition-items-paging-progress'
|
||||
private readonly PREFIX = 'transition-items-migration-progress'
|
||||
private readonly INTEGRITY_PREFIX = 'transition-items-integrity-progress'
|
||||
|
||||
constructor(private redisClient: IORedis.Redis) {}
|
||||
|
||||
async getIntegrityProgress(userUuid: string): Promise<number> {
|
||||
const progress = await this.redisClient.get(`${this.INTEGRITY_PREFIX}:${userUuid}`)
|
||||
|
||||
if (progress === null) {
|
||||
return 1
|
||||
}
|
||||
|
||||
return parseInt(progress)
|
||||
}
|
||||
|
||||
async setIntegrityProgress(userUuid: string, progress: number): Promise<void> {
|
||||
await this.redisClient.setex(`${this.INTEGRITY_PREFIX}:${userUuid}`, 172_800, progress.toString())
|
||||
}
|
||||
|
||||
async getPagingProgress(userUuid: string): Promise<number> {
|
||||
const progress = await this.redisClient.get(`${this.PREFIX}:${userUuid}`)
|
||||
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.11.3](https://github.com/standardnotes/server/compare/@standardnotes/websockets-server@1.11.2...@standardnotes/websockets-server@1.11.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/websockets-server
|
||||
|
||||
## [1.11.2](https://github.com/standardnotes/server/compare/@standardnotes/websockets-server@1.11.1...@standardnotes/websockets-server@1.11.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/websockets-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/websockets-server",
|
||||
"version": "1.11.2",
|
||||
"version": "1.11.3",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
257
yarn.lock
257
yarn.lock
@@ -1426,6 +1426,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@aws-sdk/service-error-classification@npm:^3.4.1":
|
||||
version: 3.370.0
|
||||
resolution: "@aws-sdk/service-error-classification@npm:3.370.0"
|
||||
checksum: 500f067ba170c8801437cb317078e6cbb3bc523e476cf773b9cdb21af76bc78d2daf37d63aa9ea3af720c24fac3fb387e073dd4865ee02ac356ae0fb9efd39cd
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@aws-sdk/shared-ini-file-loader@npm:3.342.0":
|
||||
version: 3.342.0
|
||||
resolution: "@aws-sdk/shared-ini-file-loader@npm:3.342.0"
|
||||
@@ -1598,6 +1605,16 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@aws-sdk/types@npm:^3.4.1":
|
||||
version: 3.418.0
|
||||
resolution: "@aws-sdk/types@npm:3.418.0"
|
||||
dependencies:
|
||||
"@smithy/types": "npm:^2.3.3"
|
||||
tslib: "npm:^2.5.0"
|
||||
checksum: 627955c2c92f7dd80ab5ac0fd23b6f5d5ff7a8cbc3dcc6f8b86b702f73b844219c3192990dc7048bbca9b36e2e46cdb48d21a8dc3eaf36861623348c1c1427a1
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@aws-sdk/url-parser@npm:3.342.0":
|
||||
version: 3.342.0
|
||||
resolution: "@aws-sdk/url-parser@npm:3.342.0"
|
||||
@@ -4385,6 +4402,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@smithy/types@npm:^2.3.3":
|
||||
version: 2.3.4
|
||||
resolution: "@smithy/types@npm:2.3.4"
|
||||
dependencies:
|
||||
tslib: "npm:^2.5.0"
|
||||
checksum: 8a5ad3b47e6318215786bc61787e1ff7a11b002c9d27b4af2d307edbfa522d21097b2a6bd7f83657736f6c646a61e03cd2d1be3c3f8f7353860e976e64323584
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@smithy/url-parser@npm:^2.0.5":
|
||||
version: 2.0.5
|
||||
resolution: "@smithy/url-parser@npm:2.0.5"
|
||||
@@ -4723,6 +4749,7 @@ __metadata:
|
||||
"@types/uuid": "npm:^9.0.3"
|
||||
"@typescript-eslint/eslint-plugin": "npm:^6.5.0"
|
||||
"@typescript-eslint/parser": "npm:^6.5.0"
|
||||
aws-xray-sdk: "npm:^3.5.2"
|
||||
axios: "npm:^1.1.3"
|
||||
bcryptjs: "npm:2.4.3"
|
||||
cors: "npm:2.8.5"
|
||||
@@ -4807,6 +4834,7 @@ __metadata:
|
||||
"@types/newrelic": "npm:^9.14.0"
|
||||
"@typescript-eslint/eslint-plugin": "npm:^6.5.0"
|
||||
"@typescript-eslint/parser": "npm:^6.5.0"
|
||||
aws-xray-sdk: "npm:^3.5.2"
|
||||
eslint: "npm:^8.39.0"
|
||||
eslint-plugin-prettier: "npm:^5.0.0"
|
||||
ioredis: "npm:^5.2.4"
|
||||
@@ -5491,6 +5519,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/cls-hooked@npm:^4.3.3":
|
||||
version: 4.3.6
|
||||
resolution: "@types/cls-hooked@npm:4.3.6"
|
||||
dependencies:
|
||||
"@types/node": "npm:*"
|
||||
checksum: f5b9864348c132fef6983b6b3aab233599ed3d93849c28e73d7498d664991e5675cf052899bb7a81991f7d9450b6c6c895b7464c4797a31ab3b6f52502f44e8c
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/concat-stream@npm:^1.6.0":
|
||||
version: 1.6.1
|
||||
resolution: "@types/concat-stream@npm:1.6.1"
|
||||
@@ -5716,6 +5753,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/mysql@npm:*":
|
||||
version: 2.15.22
|
||||
resolution: "@types/mysql@npm:2.15.22"
|
||||
dependencies:
|
||||
"@types/node": "npm:*"
|
||||
checksum: 6be0aac58fe5c0f20ebf149d2ab228c620f751569a24fda33df457e0520b3c2f071bda06973ad54815ef54b0e0fa2176e56aba96b65b5990054930f4e2b7bb4e
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/newrelic@npm:^9.14.0":
|
||||
version: 9.14.0
|
||||
resolution: "@types/newrelic@npm:9.14.0"
|
||||
@@ -5776,6 +5822,17 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/pg@npm:*":
|
||||
version: 8.10.3
|
||||
resolution: "@types/pg@npm:8.10.3"
|
||||
dependencies:
|
||||
"@types/node": "npm:*"
|
||||
pg-protocol: "npm:*"
|
||||
pg-types: "npm:^4.0.1"
|
||||
checksum: 22d4836bd9a272e047e068b2bd9278287137ad191651952474fc13e3dd8e70df6b3fc1211bf1cd353cd7591fb6b9754a0755929e8a2a584a5f76921a91cff488
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/prettier@npm:^2.1.5":
|
||||
version: 2.7.3
|
||||
resolution: "@types/prettier@npm:2.7.3"
|
||||
@@ -6552,6 +6609,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"async-hook-jl@npm:^1.7.6":
|
||||
version: 1.7.6
|
||||
resolution: "async-hook-jl@npm:1.7.6"
|
||||
dependencies:
|
||||
stack-chain: "npm:^1.3.7"
|
||||
checksum: f61a3bd4c34c01dfdf7f571a22b5308b5c4cfc1574879bf57d86384e1944f50d4dc4873dbb31e718801dd1121604b22c316f88e5abd0f44b8ba15c97b4b73388
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"async@npm:^3.2.3":
|
||||
version: 3.2.4
|
||||
resolution: "async@npm:3.2.4"
|
||||
@@ -6566,6 +6632,72 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"atomic-batcher@npm:^1.0.2":
|
||||
version: 1.0.2
|
||||
resolution: "atomic-batcher@npm:1.0.2"
|
||||
checksum: 025e334f1f2d0e47cb0703c6c4ae626457fc0d542f55cdf7ccb7dc9068198c743f05a96e0d6d5d2c08a58496acf536fb37751ed84d94aba1b2e76493b160d2b7
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"aws-xray-sdk-core@npm:3.5.2":
|
||||
version: 3.5.2
|
||||
resolution: "aws-xray-sdk-core@npm:3.5.2"
|
||||
dependencies:
|
||||
"@aws-sdk/service-error-classification": "npm:^3.4.1"
|
||||
"@aws-sdk/types": "npm:^3.4.1"
|
||||
"@types/cls-hooked": "npm:^4.3.3"
|
||||
atomic-batcher: "npm:^1.0.2"
|
||||
cls-hooked: "npm:^4.2.2"
|
||||
semver: "npm:^7.5.3"
|
||||
checksum: a643998187ac38d49b2a79c3633223a261a8a6290e64ba450359c965bd4aed070ae48634f9d4c9a835f0b7cdc2da1498399b891a6c2aa5cbd2a5a8308a83940f
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"aws-xray-sdk-express@npm:3.5.2":
|
||||
version: 3.5.2
|
||||
resolution: "aws-xray-sdk-express@npm:3.5.2"
|
||||
dependencies:
|
||||
"@types/express": "npm:*"
|
||||
peerDependencies:
|
||||
aws-xray-sdk-core: ^3.5.2
|
||||
checksum: 62a07d0f3ba647d02d597ee749a56669dcd62b16c3eccd2ff661e2cfad3135b4af78fa4cf66fc6a32d636914713f89c399387c64575b9022b6d9e86002e6c61a
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"aws-xray-sdk-mysql@npm:3.5.2":
|
||||
version: 3.5.2
|
||||
resolution: "aws-xray-sdk-mysql@npm:3.5.2"
|
||||
dependencies:
|
||||
"@types/mysql": "npm:*"
|
||||
peerDependencies:
|
||||
aws-xray-sdk-core: ^3.5.2
|
||||
checksum: f910a96630169fa1591b68aaac591d24d116ea749dc49e66340bb70915d474d45ddeead3c46b49b1fec4516d5a586e10960b5ff1e5553e6fe2d287caff98e03e
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"aws-xray-sdk-postgres@npm:3.5.2":
|
||||
version: 3.5.2
|
||||
resolution: "aws-xray-sdk-postgres@npm:3.5.2"
|
||||
dependencies:
|
||||
"@types/pg": "npm:*"
|
||||
peerDependencies:
|
||||
aws-xray-sdk-core: ^3.5.2
|
||||
checksum: f2d6da22c74b65ba30d700776924ab7a5037e11f428545ba5b3bbdbacac095e958ceb829db2aa8445f1c718c8e375ab98d1dd9c7a2f5eb486c53b82ea414c768
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"aws-xray-sdk@npm:^3.5.2":
|
||||
version: 3.5.2
|
||||
resolution: "aws-xray-sdk@npm:3.5.2"
|
||||
dependencies:
|
||||
aws-xray-sdk-core: "npm:3.5.2"
|
||||
aws-xray-sdk-express: "npm:3.5.2"
|
||||
aws-xray-sdk-mysql: "npm:3.5.2"
|
||||
aws-xray-sdk-postgres: "npm:3.5.2"
|
||||
checksum: 576d0d5ccb0f069e8da0afd041a47f1516ab5f8e637fa9595f711da136a49db57aff37b662b3c123234b4934a0c582c8deee677856b3c82884541e12b63df377
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"axios@npm:0.21.4":
|
||||
version: 0.21.4
|
||||
resolution: "axios@npm:0.21.4"
|
||||
@@ -7246,6 +7378,17 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"cls-hooked@npm:^4.2.2":
|
||||
version: 4.2.2
|
||||
resolution: "cls-hooked@npm:4.2.2"
|
||||
dependencies:
|
||||
async-hook-jl: "npm:^1.7.6"
|
||||
emitter-listener: "npm:^1.0.1"
|
||||
semver: "npm:^5.4.1"
|
||||
checksum: 59081fcc0f8b7ed929ac8eb0d16bd96946c82b3dd6a89213013e70874e5e7e202c09b07fc0ef0e2dd91b375c3f86d8d57b695e6a3e3bb9e6e25b20f144d712e8
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"cluster-key-slot@npm:^1.1.0":
|
||||
version: 1.1.2
|
||||
resolution: "cluster-key-slot@npm:1.1.2"
|
||||
@@ -7982,6 +8125,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"emitter-listener@npm:^1.0.1":
|
||||
version: 1.1.2
|
||||
resolution: "emitter-listener@npm:1.1.2"
|
||||
dependencies:
|
||||
shimmer: "npm:^1.2.0"
|
||||
checksum: 697f53c30841eb380240b27b385f55596d66ff2d8c479ca3af2ad448cbbeb930d87f7c70105be5467a1424bdd0dfb161173238df413a2c79d8263b9140f917be
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"emittery@npm:^0.13.1":
|
||||
version: 0.13.1
|
||||
resolution: "emittery@npm:0.13.1"
|
||||
@@ -11888,6 +12040,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"obuf@npm:~1.1.2":
|
||||
version: 1.1.2
|
||||
resolution: "obuf@npm:1.1.2"
|
||||
checksum: 53ff4ab3a13cc33ba6c856cf281f2965c0aec9720967af450e8fd06cfd50aceeefc791986a16bcefa14e7898b3ca9acdfcf15b9d9a1b9c7e1366581a8ad6e65e
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"on-finished@npm:2.4.1":
|
||||
version: 2.4.1
|
||||
resolution: "on-finished@npm:2.4.1"
|
||||
@@ -12309,6 +12468,42 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"pg-int8@npm:1.0.1":
|
||||
version: 1.0.1
|
||||
resolution: "pg-int8@npm:1.0.1"
|
||||
checksum: a1e3a05a69005ddb73e5f324b6b4e689868a447c5fa280b44cd4d04e6916a344ac289e0b8d2695d66e8e89a7fba023affb9e0e94778770ada5df43f003d664c9
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"pg-numeric@npm:1.0.2":
|
||||
version: 1.0.2
|
||||
resolution: "pg-numeric@npm:1.0.2"
|
||||
checksum: 8899f8200caa1744439a8778a9eb3ceefb599d893e40a09eef84ee0d4c151319fd416634a6c0fc7b7db4ac268710042da5be700b80ef0de716fe089b8652c84f
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"pg-protocol@npm:*":
|
||||
version: 1.6.0
|
||||
resolution: "pg-protocol@npm:1.6.0"
|
||||
checksum: 995864cc2a8517368b84697c753caff769a4db292eda66f96d9eec46e3aa84737cd0b0fe171aca9d7d4b4a4c46bb25bd399713cb1027a5bf8f38adea0b4284f4
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"pg-types@npm:^4.0.1":
|
||||
version: 4.0.1
|
||||
resolution: "pg-types@npm:4.0.1"
|
||||
dependencies:
|
||||
pg-int8: "npm:1.0.1"
|
||||
pg-numeric: "npm:1.0.2"
|
||||
postgres-array: "npm:~3.0.1"
|
||||
postgres-bytea: "npm:~3.0.0"
|
||||
postgres-date: "npm:~2.0.1"
|
||||
postgres-interval: "npm:^3.0.0"
|
||||
postgres-range: "npm:^1.1.1"
|
||||
checksum: 2c686ef361856ff9695a40cf34d7bb59bcf2218bd1f121d7fc8b77a75cc4106c09519e9520a82aac8fce4540dec81f695458b8e43bfc420ba74eb9b5005b5619
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"picocolors@npm:^1.0.0":
|
||||
version: 1.0.0
|
||||
resolution: "picocolors@npm:1.0.0"
|
||||
@@ -12370,6 +12565,43 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"postgres-array@npm:~3.0.1":
|
||||
version: 3.0.2
|
||||
resolution: "postgres-array@npm:3.0.2"
|
||||
checksum: 0159517e4e5f263bf9e324f0c4d3c10244a294021f2b5980abc8c23afdb965370a7fc0c82012fce4d28e83186ad089b6476b05fcef6c88f8e43e37a3a2fa0ad5
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"postgres-bytea@npm:~3.0.0":
|
||||
version: 3.0.0
|
||||
resolution: "postgres-bytea@npm:3.0.0"
|
||||
dependencies:
|
||||
obuf: "npm:~1.1.2"
|
||||
checksum: f5c01758fd2fa807afbd34e1ba2146f683818ebc2d23f4a62f0fd627c0b1126fc543cab1b63925f97ce6c7d8f5f316043218619c447445210ea82f10411efb1b
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"postgres-date@npm:~2.0.1":
|
||||
version: 2.0.1
|
||||
resolution: "postgres-date@npm:2.0.1"
|
||||
checksum: 908eacec35faf9b6fa4248074e7c8db0fc4ac6a2e0a00eacef9fb9cf44a25fc6f100af7cda09f5d89d076c87923b59e940a309560d86aded4753985c97255be1
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"postgres-interval@npm:^3.0.0":
|
||||
version: 3.0.0
|
||||
resolution: "postgres-interval@npm:3.0.0"
|
||||
checksum: c7a1cf006de97de663b6b8c4d2b167aa9909a238c4866a94b15d303762f5ac884ff4796cd6e2111b7f0a91302b83c570453aa8506fd005b5a5d5dfa87441bebc
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"postgres-range@npm:^1.1.1":
|
||||
version: 1.1.3
|
||||
resolution: "postgres-range@npm:1.1.3"
|
||||
checksum: 356a46d97eab301cca0dbf7a65be6f68c4a7e33d304909e5ca840038b1210390e1d33fcc3b566c42020f7b97e9de189bc5286d1ec8d7ba2445ed3377c576ce9e
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"prelude-ls@npm:^1.2.1":
|
||||
version: 1.2.1
|
||||
resolution: "prelude-ls@npm:1.2.1"
|
||||
@@ -13078,6 +13310,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"semver@npm:^5.4.1":
|
||||
version: 5.7.2
|
||||
resolution: "semver@npm:5.7.2"
|
||||
bin:
|
||||
semver: bin/semver
|
||||
checksum: fca14418a174d4b4ef1fecb32c5941e3412d52a4d3d85165924ce3a47fbc7073372c26faf7484ceb4bbc2bde25880c6b97e492473dc7e9708fdfb1c6a02d546e
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"semver@npm:^6.0.0, semver@npm:^6.3.0":
|
||||
version: 6.3.0
|
||||
resolution: "semver@npm:6.3.0"
|
||||
@@ -13087,7 +13328,7 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"semver@npm:^7.5.2, semver@npm:^7.5.4":
|
||||
"semver@npm:^7.5.2, semver@npm:^7.5.3, semver@npm:^7.5.4":
|
||||
version: 7.5.4
|
||||
resolution: "semver@npm:7.5.4"
|
||||
dependencies:
|
||||
@@ -13198,6 +13439,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"shimmer@npm:^1.2.0":
|
||||
version: 1.2.1
|
||||
resolution: "shimmer@npm:1.2.1"
|
||||
checksum: aa0d6252ad1c682a4fdfda69e541be987f7a265ac7b00b1208e5e48cc68dc55f293955346ea4c71a169b7324b82c70f8400b3d3d2d60b2a7519f0a3522423250
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"side-channel@npm:^1.0.4":
|
||||
version: 1.0.4
|
||||
resolution: "side-channel@npm:1.0.4"
|
||||
@@ -13482,6 +13730,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"stack-chain@npm:^1.3.7":
|
||||
version: 1.3.7
|
||||
resolution: "stack-chain@npm:1.3.7"
|
||||
checksum: 6420637b7607566763f2452aa058af06ad31773333c4bb55ceb2a71338016fd82f55425bf2ea950bf148576b28d72a235ec46b8f01d117a194a2ec123e577d18
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"stack-trace@npm:0.0.x":
|
||||
version: 0.0.10
|
||||
resolution: "stack-trace@npm:0.0.10"
|
||||
|
||||
Reference in New Issue
Block a user