mirror of
https://github.com/standardnotes/server
synced 2026-01-17 23:04:34 -05:00
Compare commits
36 Commits
@standardn
...
@standardn
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed14d2dfb7 | ||
|
|
64f1fe59c2 | ||
|
|
207ef9f3e5 | ||
|
|
3ea0435459 | ||
|
|
7668713dd6 | ||
|
|
f6bc1c3084 | ||
|
|
4203ff9a0c | ||
|
|
0281724196 | ||
|
|
94bd319279 | ||
|
|
373cfad1f7 | ||
|
|
59107f8553 | ||
|
|
e066b6a126 | ||
|
|
a0e4926135 | ||
|
|
b1b244a2cf | ||
|
|
b4a38d9dcc | ||
|
|
71e2a4187e | ||
|
|
c94f9c1a60 | ||
|
|
b9907924be | ||
|
|
d4c188c096 | ||
|
|
2ccaf490df | ||
|
|
cd893b41d7 | ||
|
|
3838358081 | ||
|
|
1a388f00c3 | ||
|
|
bd71422fab | ||
|
|
3e1697f47f | ||
|
|
032cde7723 | ||
|
|
34b956b482 | ||
|
|
681e0378ae | ||
|
|
609e85f926 | ||
|
|
e4ca310707 | ||
|
|
d606493356 | ||
|
|
5ef6c5c14a | ||
|
|
0188f290f9 | ||
|
|
676cf36f8d | ||
|
|
f8aef6c8ef | ||
|
|
5bf8cf49c1 |
159
.github/workflows/common-e2e.yml
vendored
159
.github/workflows/common-e2e.yml
vendored
@@ -7,6 +7,10 @@ on:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
@@ -18,149 +22,16 @@ on:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
name: (Self Hosting) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
secondary_db_enabled: [true, false]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Run Server
|
||||
run: docker compose -f docker-compose.ci.yml up -d
|
||||
env:
|
||||
DB_TYPE: mysql
|
||||
CACHE_TYPE: redis
|
||||
SECONDARY_DB_ENABLED: ${{ matrix.secondary_db_enabled }}
|
||||
|
||||
- name: Wait for server to start
|
||||
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html
|
||||
|
||||
- name: Show logs on failure
|
||||
if: ${{ failure() }}
|
||||
run: |
|
||||
echo "# Errors:"
|
||||
tail -n 100 logs/*.err
|
||||
echo "# Logs:"
|
||||
tail -n 100 logs/*.log
|
||||
e2e-self-hosted:
|
||||
uses: standardnotes/server/.github/workflows/e2e-self-hosted.yml@main
|
||||
with:
|
||||
snjs_image_tag: ${{ inputs.snjs_image_tag }}
|
||||
suite: ${{ inputs.suite }}
|
||||
secrets: inherit
|
||||
|
||||
e2e-home-server:
|
||||
name: (Home Server) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
db_type: [mysql, sqlite]
|
||||
cache_type: [redis, memory]
|
||||
secondary_db_enabled: [true, false]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
cache:
|
||||
image: redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
db:
|
||||
image: mysql
|
||||
ports:
|
||||
- 3306:3306
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
MYSQL_DATABASE: standardnotes
|
||||
MYSQL_USER: standardnotes
|
||||
MYSQL_PASSWORD: standardnotes
|
||||
secondary_db:
|
||||
image: mongo:5.0
|
||||
ports:
|
||||
- 27017:27017
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: standardnotes
|
||||
MONGO_INITDB_ROOT_PASSWORD: standardnotes
|
||||
MONGO_INITDB_DATABASE: standardnotes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build
|
||||
run: yarn build
|
||||
|
||||
- name: Copy dotenv file
|
||||
run: cp packages/home-server/.env.sample packages/home-server/.env
|
||||
|
||||
- name: Fill in env variables
|
||||
run: |
|
||||
sed -i "s/JWT_SECRET=/JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/AUTH_JWT_SECRET=/AUTH_JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/ENCRYPTION_SERVER_KEY=/ENCRYPTION_SERVER_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/PSEUDO_KEY_PARAMS_KEY=/PSEUDO_KEY_PARAMS_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/VALET_TOKEN_SECRET=/VALET_TOKEN_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
echo "ACCESS_TOKEN_AGE=4" >> packages/home-server/.env
|
||||
echo "REFRESH_TOKEN_AGE=10" >> packages/home-server/.env
|
||||
echo "REVISIONS_FREQUENCY=2" >> packages/home-server/.env
|
||||
echo "DB_HOST=localhost" >> packages/home-server/.env
|
||||
echo "DB_PORT=3306" >> packages/home-server/.env
|
||||
echo "DB_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_SQLITE_DATABASE_PATH=homeserver.db" >> packages/home-server/.env
|
||||
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
|
||||
echo "DB_DEBUG_LEVEL=all" >> packages/home-server/.env
|
||||
echo "REDIS_URL=redis://localhost:6379" >> packages/home-server/.env
|
||||
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
|
||||
echo "SECONDARY_DB_ENABLED=${{ matrix.secondary_db_enabled }}" >> packages/home-server/.env
|
||||
echo "MONGO_HOST=localhost" >> packages/home-server/.env
|
||||
echo "MONGO_PORT=27017" >> packages/home-server/.env
|
||||
echo "MONGO_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "FILES_SERVER_URL=http://localhost:3123" >> packages/home-server/.env
|
||||
echo "E2E_TESTING=true" >> packages/home-server/.env
|
||||
|
||||
- name: Run Server
|
||||
run: nohup yarn workspace @standardnotes/home-server start > logs/output.log 2>&1 &
|
||||
env:
|
||||
PORT: 3123
|
||||
|
||||
- name: Wait for server to start
|
||||
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html
|
||||
|
||||
- name: Show logs on failure
|
||||
if: ${{ failure() }}
|
||||
run: tail -n 500 logs/output.log
|
||||
uses: standardnotes/server/.github/workflows/e2e-home-server.yml@main
|
||||
with:
|
||||
snjs_image_tag: ${{ inputs.snjs_image_tag }}
|
||||
suite: ${{ inputs.suite }}
|
||||
secrets: inherit
|
||||
|
||||
128
.github/workflows/e2e-home-server.yml
vendored
Normal file
128
.github/workflows/e2e-home-server.yml
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
name: E2E Home Server Test Suite
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
snjs_image_tag:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
AWS_ACCESS_KEY_ID:
|
||||
required: true
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
e2e-home-server:
|
||||
name: (Home Server) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
db_type: [mysql, sqlite]
|
||||
cache_type: [redis, memory]
|
||||
secondary_db_enabled: [true, false]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
cache:
|
||||
image: redis
|
||||
ports:
|
||||
- 6379:6379
|
||||
db:
|
||||
image: mysql
|
||||
ports:
|
||||
- 3306:3306
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
MYSQL_DATABASE: standardnotes
|
||||
MYSQL_USER: standardnotes
|
||||
MYSQL_PASSWORD: standardnotes
|
||||
secondary_db:
|
||||
image: mongo:5.0
|
||||
ports:
|
||||
- 27017:27017
|
||||
env:
|
||||
MONGO_INITDB_ROOT_USERNAME: standardnotes
|
||||
MONGO_INITDB_ROOT_PASSWORD: standardnotes
|
||||
MONGO_INITDB_DATABASE: standardnotes
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install Dependencies
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Build
|
||||
run: yarn build
|
||||
|
||||
- name: Copy dotenv file
|
||||
run: cp packages/home-server/.env.sample packages/home-server/.env
|
||||
|
||||
- name: Fill in env variables
|
||||
run: |
|
||||
sed -i "s/JWT_SECRET=/JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/AUTH_JWT_SECRET=/AUTH_JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/ENCRYPTION_SERVER_KEY=/ENCRYPTION_SERVER_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/PSEUDO_KEY_PARAMS_KEY=/PSEUDO_KEY_PARAMS_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
sed -i "s/VALET_TOKEN_SECRET=/VALET_TOKEN_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
|
||||
echo "ACCESS_TOKEN_AGE=4" >> packages/home-server/.env
|
||||
echo "REFRESH_TOKEN_AGE=10" >> packages/home-server/.env
|
||||
echo "REVISIONS_FREQUENCY=2" >> packages/home-server/.env
|
||||
echo "DB_HOST=localhost" >> packages/home-server/.env
|
||||
echo "DB_PORT=3306" >> packages/home-server/.env
|
||||
echo "DB_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_SQLITE_DATABASE_PATH=homeserver.db" >> packages/home-server/.env
|
||||
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
|
||||
echo "DB_DEBUG_LEVEL=all" >> packages/home-server/.env
|
||||
echo "REDIS_URL=redis://localhost:6379" >> packages/home-server/.env
|
||||
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
|
||||
echo "SECONDARY_DB_ENABLED=${{ matrix.secondary_db_enabled }}" >> packages/home-server/.env
|
||||
echo "MONGO_HOST=localhost" >> packages/home-server/.env
|
||||
echo "MONGO_PORT=27017" >> packages/home-server/.env
|
||||
echo "MONGO_DATABASE=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_USERNAME=standardnotes" >> packages/home-server/.env
|
||||
echo "MONGO_PASSWORD=standardnotes" >> packages/home-server/.env
|
||||
echo "FILES_SERVER_URL=http://localhost:3123" >> packages/home-server/.env
|
||||
echo "E2E_TESTING=true" >> packages/home-server/.env
|
||||
|
||||
- name: Run Server
|
||||
run: nohup yarn workspace @standardnotes/home-server start > logs/output.log 2>&1 &
|
||||
env:
|
||||
PORT: 3123
|
||||
|
||||
- name: Wait for server to start
|
||||
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
|
||||
|
||||
- name: Archive failed run logs
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: home-server-failure-logs-${{ inputs.suite }}-${{ matrix.db_type }}-${{ matrix.cache_type }}-${{ matrix.secondary_db_enabled }}
|
||||
retention-days: 5
|
||||
path: |
|
||||
logs/output.log
|
||||
72
.github/workflows/e2e-self-hosted.yml
vendored
Normal file
72
.github/workflows/e2e-self-hosted.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: E2E Self Hosted Test Suite
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
snjs_image_tag:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
AWS_ACCESS_KEY_ID:
|
||||
required: true
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
e2e:
|
||||
name: (Self Hosting) E2E Test Suite
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
secondary_db_enabled: [true, false]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
snjs:
|
||||
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
|
||||
ports:
|
||||
- 9001:9001
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
registry-url: 'https://registry.npmjs.org'
|
||||
node-version-file: '.nvmrc'
|
||||
|
||||
- name: Install
|
||||
run: yarn install --immutable
|
||||
|
||||
- name: Run Server
|
||||
run: docker compose -f docker-compose.ci.yml up -d
|
||||
env:
|
||||
DB_TYPE: mysql
|
||||
CACHE_TYPE: redis
|
||||
SECONDARY_DB_ENABLED: ${{ matrix.secondary_db_enabled }}
|
||||
|
||||
- name: Wait for server to start
|
||||
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
|
||||
|
||||
- name: Run E2E Test Suite
|
||||
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
|
||||
|
||||
- name: Archive failed run logs
|
||||
if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: self-hosted-failure-logs-${{ inputs.suite }}-${{ matrix.secondary_db_enabled }}
|
||||
retention-days: 5
|
||||
path: |
|
||||
logs/*.err
|
||||
logs/*.log
|
||||
9
.github/workflows/e2e-test-suite.yml
vendored
9
.github/workflows/e2e-test-suite.yml
vendored
@@ -1,6 +1,6 @@
|
||||
name: E2E Test Suite On Self Hosted Server
|
||||
name: E2E Test Suite
|
||||
|
||||
run-name: E2E Test Suite against ${{ inputs.ref_name }} by ${{ inputs.author }}
|
||||
run-name: E2E ${{ inputs.suite }} Test Suite against ${{ inputs.ref_name }} by ${{ inputs.author }}
|
||||
|
||||
on:
|
||||
schedule:
|
||||
@@ -11,6 +11,10 @@ on:
|
||||
type: string
|
||||
default: latest
|
||||
description: The Docker image tag used for SNJS container
|
||||
suite:
|
||||
type: string
|
||||
default: all
|
||||
description: The test suite to run
|
||||
author:
|
||||
type: string
|
||||
default: unknown
|
||||
@@ -26,4 +30,5 @@ jobs:
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: ${{ inputs.snjs_image_tag || 'latest' }}
|
||||
suite: ${{ inputs.suite || 'all' }}
|
||||
secrets: inherit
|
||||
|
||||
17
.github/workflows/pr.yml
vendored
17
.github/workflows/pr.yml
vendored
@@ -95,9 +95,20 @@ jobs:
|
||||
- name: Test
|
||||
run: yarn test
|
||||
|
||||
e2e:
|
||||
e2e-base:
|
||||
needs: build
|
||||
|
||||
name: E2E
|
||||
name: E2E Base Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'base'
|
||||
secrets: inherit
|
||||
|
||||
e2e-vaults:
|
||||
needs: build
|
||||
name: E2E Vaults Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'vaults'
|
||||
secrets: inherit
|
||||
|
||||
20
.github/workflows/publish.yml
vendored
20
.github/workflows/publish.yml
vendored
@@ -95,20 +95,32 @@ jobs:
|
||||
- name: Test
|
||||
run: yarn test
|
||||
|
||||
e2e:
|
||||
e2e-base:
|
||||
needs: build
|
||||
name: E2E
|
||||
name: E2E Base Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'base'
|
||||
secrets: inherit
|
||||
|
||||
e2e-vaults:
|
||||
needs: build
|
||||
name: E2E Vaults Suite
|
||||
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
|
||||
with:
|
||||
snjs_image_tag: 'latest'
|
||||
suite: 'vaults'
|
||||
secrets: inherit
|
||||
|
||||
publish-self-hosting:
|
||||
needs: [ test, lint, e2e ]
|
||||
needs: [ test, lint, e2e-base, e2e-vaults ]
|
||||
name: Publish Self Hosting Docker Image
|
||||
uses: standardnotes/server/.github/workflows/common-self-hosting.yml@main
|
||||
secrets: inherit
|
||||
|
||||
publish-services:
|
||||
needs: [ test, lint, e2e ]
|
||||
needs: [ test, lint, e2e-base, e2e-vaults ]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
319
.pnp.cjs
generated
319
.pnp.cjs
generated
@@ -1605,6 +1605,13 @@ const RAW_RUNTIME_STATE =
|
||||
["@aws-sdk/service-error-classification", "npm:3.342.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:3.370.0", {\
|
||||
"packageLocation": "./.yarn/cache/@aws-sdk-service-error-classification-npm-3.370.0-0d5b615252-500f067ba1.zip/node_modules/@aws-sdk/service-error-classification/",\
|
||||
"packageDependencies": [\
|
||||
["@aws-sdk/service-error-classification", "npm:3.370.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@aws-sdk/shared-ini-file-loader", [\
|
||||
@@ -1788,6 +1795,15 @@ const RAW_RUNTIME_STATE =
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:3.418.0", {\
|
||||
"packageLocation": "./.yarn/cache/@aws-sdk-types-npm-3.418.0-451c0cadd0-627955c2c9.zip/node_modules/@aws-sdk/types/",\
|
||||
"packageDependencies": [\
|
||||
["@aws-sdk/types", "npm:3.418.0"],\
|
||||
["@smithy/types", "npm:2.3.4"],\
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@aws-sdk/url-parser", [\
|
||||
@@ -5488,6 +5504,14 @@ const RAW_RUNTIME_STATE =
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:2.3.4", {\
|
||||
"packageLocation": "./.yarn/cache/@smithy-types-npm-2.3.4-7d0b3a2a2f-8a5ad3b47e.zip/node_modules/@smithy/types/",\
|
||||
"packageDependencies": [\
|
||||
["@smithy/types", "npm:2.3.4"],\
|
||||
["tslib", "npm:2.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@smithy/url-parser", [\
|
||||
@@ -5835,6 +5859,7 @@ const RAW_RUNTIME_STATE =
|
||||
["@types/uuid", "npm:9.0.3"],\
|
||||
["@typescript-eslint/eslint-plugin", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["@typescript-eslint/parser", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["aws-xray-sdk", "npm:3.5.2"],\
|
||||
["axios", "npm:1.4.0"],\
|
||||
["bcryptjs", "npm:2.4.3"],\
|
||||
["cors", "npm:2.8.5"],\
|
||||
@@ -5938,6 +5963,7 @@ const RAW_RUNTIME_STATE =
|
||||
["@types/newrelic", "npm:9.14.0"],\
|
||||
["@typescript-eslint/eslint-plugin", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["@typescript-eslint/parser", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:6.5.0"],\
|
||||
["aws-xray-sdk", "npm:3.5.2"],\
|
||||
["eslint", "npm:8.41.0"],\
|
||||
["eslint-plugin-prettier", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:5.0.0"],\
|
||||
["ioredis", "npm:5.3.2"],\
|
||||
@@ -6609,6 +6635,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/cls-hooked", [\
|
||||
["npm:4.3.6", {\
|
||||
"packageLocation": "./.yarn/cache/@types-cls-hooked-npm-4.3.6-8787b64e86-f5b9864348.zip/node_modules/@types/cls-hooked/",\
|
||||
"packageDependencies": [\
|
||||
["@types/cls-hooked", "npm:4.3.6"],\
|
||||
["@types/node", "npm:20.2.5"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/concat-stream", [\
|
||||
["npm:1.6.1", {\
|
||||
"packageLocation": "./.yarn/cache/@types-concat-stream-npm-1.6.1-42cd06b019-7d211e7433.zip/node_modules/@types/concat-stream/",\
|
||||
@@ -6868,6 +6904,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/mysql", [\
|
||||
["npm:2.15.22", {\
|
||||
"packageLocation": "./.yarn/cache/@types-mysql-npm-2.15.22-d643eb999e-6be0aac58f.zip/node_modules/@types/mysql/",\
|
||||
"packageDependencies": [\
|
||||
["@types/mysql", "npm:2.15.22"],\
|
||||
["@types/node", "npm:20.2.5"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/newrelic", [\
|
||||
["npm:9.14.0", {\
|
||||
"packageLocation": "./.yarn/cache/@types-newrelic-npm-9.14.0-4668da51a1-3a54ea75a4.zip/node_modules/@types/newrelic/",\
|
||||
@@ -6936,6 +6982,18 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/pg", [\
|
||||
["npm:8.10.3", {\
|
||||
"packageLocation": "./.yarn/cache/@types-pg-npm-8.10.3-3fc3365c7b-22d4836bd9.zip/node_modules/@types/pg/",\
|
||||
"packageDependencies": [\
|
||||
["@types/pg", "npm:8.10.3"],\
|
||||
["@types/node", "npm:20.2.5"],\
|
||||
["pg-protocol", "npm:1.6.0"],\
|
||||
["pg-types", "npm:4.0.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["@types/prettier", [\
|
||||
["npm:2.7.3", {\
|
||||
"packageLocation": "./.yarn/cache/@types-prettier-npm-2.7.3-497316f37c-cda84c19ac.zip/node_modules/@types/prettier/",\
|
||||
@@ -7946,6 +8004,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["async-hook-jl", [\
|
||||
["npm:1.7.6", {\
|
||||
"packageLocation": "./.yarn/cache/async-hook-jl-npm-1.7.6-9999815029-f61a3bd4c3.zip/node_modules/async-hook-jl/",\
|
||||
"packageDependencies": [\
|
||||
["async-hook-jl", "npm:1.7.6"],\
|
||||
["stack-chain", "npm:1.3.7"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["asynckit", [\
|
||||
["npm:0.4.0", {\
|
||||
"packageLocation": "./.yarn/cache/asynckit-npm-0.4.0-c718858525-3ce727cbc7.zip/node_modules/asynckit/",\
|
||||
@@ -7955,6 +8023,112 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["atomic-batcher", [\
|
||||
["npm:1.0.2", {\
|
||||
"packageLocation": "./.yarn/cache/atomic-batcher-npm-1.0.2-6fcd3a3097-025e334f1f.zip/node_modules/atomic-batcher/",\
|
||||
"packageDependencies": [\
|
||||
["atomic-batcher", "npm:1.0.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-npm-3.5.2-15fc4e54ee-576d0d5ccb.zip/node_modules/aws-xray-sdk/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk", "npm:3.5.2"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"],\
|
||||
["aws-xray-sdk-express", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["aws-xray-sdk-mysql", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["aws-xray-sdk-postgres", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-core", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-core-npm-3.5.2-9083a0c00f-a643998187.zip/node_modules/aws-xray-sdk-core/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"],\
|
||||
["@aws-sdk/service-error-classification", "npm:3.370.0"],\
|
||||
["@aws-sdk/types", "npm:3.418.0"],\
|
||||
["@types/cls-hooked", "npm:4.3.6"],\
|
||||
["atomic-batcher", "npm:1.0.2"],\
|
||||
["cls-hooked", "npm:4.2.2"],\
|
||||
["semver", "npm:7.5.4"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-express", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip/node_modules/aws-xray-sdk-express/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-express", "npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/__virtual__/aws-xray-sdk-express-virtual-36027c3d91/0/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip/node_modules/aws-xray-sdk-express/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-express", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["@types/aws-xray-sdk-core", null],\
|
||||
["@types/express", "npm:4.17.17"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/aws-xray-sdk-core",\
|
||||
"aws-xray-sdk-core"\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-mysql", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip/node_modules/aws-xray-sdk-mysql/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-mysql", "npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/__virtual__/aws-xray-sdk-mysql-virtual-d8a5f29b75/0/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip/node_modules/aws-xray-sdk-mysql/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-mysql", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["@types/aws-xray-sdk-core", null],\
|
||||
["@types/mysql", "npm:2.15.22"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/aws-xray-sdk-core",\
|
||||
"aws-xray-sdk-core"\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["aws-xray-sdk-postgres", [\
|
||||
["npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip/node_modules/aws-xray-sdk-postgres/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-postgres", "npm:3.5.2"]\
|
||||
],\
|
||||
"linkType": "SOFT"\
|
||||
}],\
|
||||
["virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2", {\
|
||||
"packageLocation": "./.yarn/__virtual__/aws-xray-sdk-postgres-virtual-c6edb063fc/0/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip/node_modules/aws-xray-sdk-postgres/",\
|
||||
"packageDependencies": [\
|
||||
["aws-xray-sdk-postgres", "virtual:15fc4e54eec18d85ce3f22aa2405619072c35fbd500ad809cce2c9e4ead8a191fcc189cef6a5d76df3bea5576f09735fc4d32f086db561623afc56dd36794c8f#npm:3.5.2"],\
|
||||
["@types/aws-xray-sdk-core", null],\
|
||||
["@types/pg", "npm:8.10.3"],\
|
||||
["aws-xray-sdk-core", "npm:3.5.2"]\
|
||||
],\
|
||||
"packagePeers": [\
|
||||
"@types/aws-xray-sdk-core",\
|
||||
"aws-xray-sdk-core"\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["axios", [\
|
||||
["npm:0.21.4", {\
|
||||
"packageLocation": "./.yarn/cache/axios-npm-0.21.4-e278873748-da644592cb.zip/node_modules/axios/",\
|
||||
@@ -8745,6 +8919,18 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["cls-hooked", [\
|
||||
["npm:4.2.2", {\
|
||||
"packageLocation": "./.yarn/cache/cls-hooked-npm-4.2.2-db33b9b95f-59081fcc0f.zip/node_modules/cls-hooked/",\
|
||||
"packageDependencies": [\
|
||||
["cls-hooked", "npm:4.2.2"],\
|
||||
["async-hook-jl", "npm:1.7.6"],\
|
||||
["emitter-listener", "npm:1.1.2"],\
|
||||
["semver", "npm:5.7.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["cluster-key-slot", [\
|
||||
["npm:1.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/cluster-key-slot-npm-1.1.2-0571a28825-516ed8b5e1.zip/node_modules/cluster-key-slot/",\
|
||||
@@ -9635,6 +9821,16 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["emitter-listener", [\
|
||||
["npm:1.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/emitter-listener-npm-1.1.2-0fe118d0b3-697f53c308.zip/node_modules/emitter-listener/",\
|
||||
"packageDependencies": [\
|
||||
["emitter-listener", "npm:1.1.2"],\
|
||||
["shimmer", "npm:1.2.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["emittery", [\
|
||||
["npm:0.13.1", {\
|
||||
"packageLocation": "./.yarn/cache/emittery-npm-0.13.1-cb6cd1bb03-fbe214171d.zip/node_modules/emittery/",\
|
||||
@@ -14007,6 +14203,15 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["obuf", [\
|
||||
["npm:1.1.2", {\
|
||||
"packageLocation": "./.yarn/cache/obuf-npm-1.1.2-8db5fae8dd-53ff4ab3a1.zip/node_modules/obuf/",\
|
||||
"packageDependencies": [\
|
||||
["obuf", "npm:1.1.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["on-finished", [\
|
||||
["npm:2.4.1", {\
|
||||
"packageLocation": "./.yarn/cache/on-finished-npm-2.4.1-907af70f88-8e81472c50.zip/node_modules/on-finished/",\
|
||||
@@ -14467,6 +14672,49 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-int8", [\
|
||||
["npm:1.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/pg-int8-npm-1.0.1-5cd67f3e22-a1e3a05a69.zip/node_modules/pg-int8/",\
|
||||
"packageDependencies": [\
|
||||
["pg-int8", "npm:1.0.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-numeric", [\
|
||||
["npm:1.0.2", {\
|
||||
"packageLocation": "./.yarn/cache/pg-numeric-npm-1.0.2-9026ec3427-8899f8200c.zip/node_modules/pg-numeric/",\
|
||||
"packageDependencies": [\
|
||||
["pg-numeric", "npm:1.0.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-protocol", [\
|
||||
["npm:1.6.0", {\
|
||||
"packageLocation": "./.yarn/cache/pg-protocol-npm-1.6.0-089a4b1d3c-995864cc2a.zip/node_modules/pg-protocol/",\
|
||||
"packageDependencies": [\
|
||||
["pg-protocol", "npm:1.6.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["pg-types", [\
|
||||
["npm:4.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/pg-types-npm-4.0.1-8f922557d3-2c686ef361.zip/node_modules/pg-types/",\
|
||||
"packageDependencies": [\
|
||||
["pg-types", "npm:4.0.1"],\
|
||||
["pg-int8", "npm:1.0.1"],\
|
||||
["pg-numeric", "npm:1.0.2"],\
|
||||
["postgres-array", "npm:3.0.2"],\
|
||||
["postgres-bytea", "npm:3.0.0"],\
|
||||
["postgres-date", "npm:2.0.1"],\
|
||||
["postgres-interval", "npm:3.0.0"],\
|
||||
["postgres-range", "npm:1.1.3"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["picocolors", [\
|
||||
["npm:1.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/picocolors-npm-1.0.0-d81e0b1927-a2e8092dd8.zip/node_modules/picocolors/",\
|
||||
@@ -14538,6 +14786,52 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-array", [\
|
||||
["npm:3.0.2", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-array-npm-3.0.2-da6a3f1fed-0159517e4e.zip/node_modules/postgres-array/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-array", "npm:3.0.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-bytea", [\
|
||||
["npm:3.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-bytea-npm-3.0.0-5de4c664f6-f5c01758fd.zip/node_modules/postgres-bytea/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-bytea", "npm:3.0.0"],\
|
||||
["obuf", "npm:1.1.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-date", [\
|
||||
["npm:2.0.1", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-date-npm-2.0.1-00e0e0bc9e-908eacec35.zip/node_modules/postgres-date/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-date", "npm:2.0.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-interval", [\
|
||||
["npm:3.0.0", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-interval-npm-3.0.0-936c769b98-c7a1cf006d.zip/node_modules/postgres-interval/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-interval", "npm:3.0.0"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["postgres-range", [\
|
||||
["npm:1.1.3", {\
|
||||
"packageLocation": "./.yarn/cache/postgres-range-npm-1.1.3-46f68e1a9e-356a46d97e.zip/node_modules/postgres-range/",\
|
||||
"packageDependencies": [\
|
||||
["postgres-range", "npm:1.1.3"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["prelude-ls", [\
|
||||
["npm:1.2.1", {\
|
||||
"packageLocation": "./.yarn/cache/prelude-ls-npm-1.2.1-3e4d272a55-0b9d2c7680.zip/node_modules/prelude-ls/",\
|
||||
@@ -15297,6 +15591,13 @@ const RAW_RUNTIME_STATE =
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:5.7.2", {\
|
||||
"packageLocation": "./.yarn/cache/semver-npm-5.7.2-938ee91eaa-fca14418a1.zip/node_modules/semver/",\
|
||||
"packageDependencies": [\
|
||||
["semver", "npm:5.7.2"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}],\
|
||||
["npm:6.3.0", {\
|
||||
"packageLocation": "./.yarn/cache/semver-npm-6.3.0-b3eace8bfd-8dd72e7c7c.zip/node_modules/semver/",\
|
||||
"packageDependencies": [\
|
||||
@@ -15441,6 +15742,15 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["shimmer", [\
|
||||
["npm:1.2.1", {\
|
||||
"packageLocation": "./.yarn/cache/shimmer-npm-1.2.1-8b50bf3206-aa0d6252ad.zip/node_modules/shimmer/",\
|
||||
"packageDependencies": [\
|
||||
["shimmer", "npm:1.2.1"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["side-channel", [\
|
||||
["npm:1.0.4", {\
|
||||
"packageLocation": "./.yarn/cache/side-channel-npm-1.0.4-e1f38b9e06-c4998d9fc5.zip/node_modules/side-channel/",\
|
||||
@@ -15765,6 +16075,15 @@ const RAW_RUNTIME_STATE =
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["stack-chain", [\
|
||||
["npm:1.3.7", {\
|
||||
"packageLocation": "./.yarn/cache/stack-chain-npm-1.3.7-c803ef2abb-6420637b76.zip/node_modules/stack-chain/",\
|
||||
"packageDependencies": [\
|
||||
["stack-chain", "npm:1.3.7"]\
|
||||
],\
|
||||
"linkType": "HARD"\
|
||||
}]\
|
||||
]],\
|
||||
["stack-trace", [\
|
||||
["npm:0.0.10", {\
|
||||
"packageLocation": "./.yarn/cache/stack-trace-npm-0.0.10-9460b173e1-7bd633f0e9.zip/node_modules/stack-trace/",\
|
||||
|
||||
BIN
.yarn/cache/@aws-sdk-service-error-classification-npm-3.370.0-0d5b615252-500f067ba1.zip
vendored
Normal file
BIN
.yarn/cache/@aws-sdk-service-error-classification-npm-3.370.0-0d5b615252-500f067ba1.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@aws-sdk-types-npm-3.418.0-451c0cadd0-627955c2c9.zip
vendored
Normal file
BIN
.yarn/cache/@aws-sdk-types-npm-3.418.0-451c0cadd0-627955c2c9.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@smithy-types-npm-2.3.4-7d0b3a2a2f-8a5ad3b47e.zip
vendored
Normal file
BIN
.yarn/cache/@smithy-types-npm-2.3.4-7d0b3a2a2f-8a5ad3b47e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-cls-hooked-npm-4.3.6-8787b64e86-f5b9864348.zip
vendored
Normal file
BIN
.yarn/cache/@types-cls-hooked-npm-4.3.6-8787b64e86-f5b9864348.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-mysql-npm-2.15.22-d643eb999e-6be0aac58f.zip
vendored
Normal file
BIN
.yarn/cache/@types-mysql-npm-2.15.22-d643eb999e-6be0aac58f.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/@types-pg-npm-8.10.3-3fc3365c7b-22d4836bd9.zip
vendored
Normal file
BIN
.yarn/cache/@types-pg-npm-8.10.3-3fc3365c7b-22d4836bd9.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/async-hook-jl-npm-1.7.6-9999815029-f61a3bd4c3.zip
vendored
Normal file
BIN
.yarn/cache/async-hook-jl-npm-1.7.6-9999815029-f61a3bd4c3.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/atomic-batcher-npm-1.0.2-6fcd3a3097-025e334f1f.zip
vendored
Normal file
BIN
.yarn/cache/atomic-batcher-npm-1.0.2-6fcd3a3097-025e334f1f.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-core-npm-3.5.2-9083a0c00f-a643998187.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-core-npm-3.5.2-9083a0c00f-a643998187.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-express-npm-3.5.2-c4574a664b-62a07d0f3b.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-mysql-npm-3.5.2-095483ab95-f910a96630.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-npm-3.5.2-15fc4e54ee-576d0d5ccb.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-npm-3.5.2-15fc4e54ee-576d0d5ccb.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip
vendored
Normal file
BIN
.yarn/cache/aws-xray-sdk-postgres-npm-3.5.2-3a7e7bcc42-f2d6da22c7.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/cls-hooked-npm-4.2.2-db33b9b95f-59081fcc0f.zip
vendored
Normal file
BIN
.yarn/cache/cls-hooked-npm-4.2.2-db33b9b95f-59081fcc0f.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/emitter-listener-npm-1.1.2-0fe118d0b3-697f53c308.zip
vendored
Normal file
BIN
.yarn/cache/emitter-listener-npm-1.1.2-0fe118d0b3-697f53c308.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/obuf-npm-1.1.2-8db5fae8dd-53ff4ab3a1.zip
vendored
Normal file
BIN
.yarn/cache/obuf-npm-1.1.2-8db5fae8dd-53ff4ab3a1.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-int8-npm-1.0.1-5cd67f3e22-a1e3a05a69.zip
vendored
Normal file
BIN
.yarn/cache/pg-int8-npm-1.0.1-5cd67f3e22-a1e3a05a69.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-numeric-npm-1.0.2-9026ec3427-8899f8200c.zip
vendored
Normal file
BIN
.yarn/cache/pg-numeric-npm-1.0.2-9026ec3427-8899f8200c.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-protocol-npm-1.6.0-089a4b1d3c-995864cc2a.zip
vendored
Normal file
BIN
.yarn/cache/pg-protocol-npm-1.6.0-089a4b1d3c-995864cc2a.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/pg-types-npm-4.0.1-8f922557d3-2c686ef361.zip
vendored
Normal file
BIN
.yarn/cache/pg-types-npm-4.0.1-8f922557d3-2c686ef361.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-array-npm-3.0.2-da6a3f1fed-0159517e4e.zip
vendored
Normal file
BIN
.yarn/cache/postgres-array-npm-3.0.2-da6a3f1fed-0159517e4e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-bytea-npm-3.0.0-5de4c664f6-f5c01758fd.zip
vendored
Normal file
BIN
.yarn/cache/postgres-bytea-npm-3.0.0-5de4c664f6-f5c01758fd.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-date-npm-2.0.1-00e0e0bc9e-908eacec35.zip
vendored
Normal file
BIN
.yarn/cache/postgres-date-npm-2.0.1-00e0e0bc9e-908eacec35.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-interval-npm-3.0.0-936c769b98-c7a1cf006d.zip
vendored
Normal file
BIN
.yarn/cache/postgres-interval-npm-3.0.0-936c769b98-c7a1cf006d.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/postgres-range-npm-1.1.3-46f68e1a9e-356a46d97e.zip
vendored
Normal file
BIN
.yarn/cache/postgres-range-npm-1.1.3-46f68e1a9e-356a46d97e.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/semver-npm-5.7.2-938ee91eaa-fca14418a1.zip
vendored
Normal file
BIN
.yarn/cache/semver-npm-5.7.2-938ee91eaa-fca14418a1.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/shimmer-npm-1.2.1-8b50bf3206-aa0d6252ad.zip
vendored
Normal file
BIN
.yarn/cache/shimmer-npm-1.2.1-8b50bf3206-aa0d6252ad.zip
vendored
Normal file
Binary file not shown.
BIN
.yarn/cache/stack-chain-npm-1.3.7-c803ef2abb-6420637b76.zip
vendored
Normal file
BIN
.yarn/cache/stack-chain-npm-1.3.7-c803ef2abb-6420637b76.zip
vendored
Normal file
Binary file not shown.
@@ -3,6 +3,26 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [2.27.6](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.5...@standardnotes/analytics@2.27.6) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
## [2.27.5](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.4...@standardnotes/analytics@2.27.5) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
## [2.27.4](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.3...@standardnotes/analytics@2.27.4) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
## [2.27.3](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.2...@standardnotes/analytics@2.27.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
## [2.27.2](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.1...@standardnotes/analytics@2.27.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
## [2.27.1](https://github.com/standardnotes/server/compare/@standardnotes/analytics@2.27.0...@standardnotes/analytics@2.27.1) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/analytics
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/analytics",
|
||||
"version": "2.27.1",
|
||||
"version": "2.27.6",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,26 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.75.11](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.10...@standardnotes/api-gateway@1.75.11) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
## [1.75.10](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.9...@standardnotes/api-gateway@1.75.10) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
## [1.75.9](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.8...@standardnotes/api-gateway@1.75.9) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
## [1.75.8](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.7...@standardnotes/api-gateway@1.75.8) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
## [1.75.7](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.6...@standardnotes/api-gateway@1.75.7) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
## [1.75.6](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.75.5...@standardnotes/api-gateway@1.75.6) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/api-gateway
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/api-gateway",
|
||||
"version": "1.75.6",
|
||||
"version": "1.75.11",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,71 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.150.5](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.150.4...@standardnotes/auth-server@1.150.5) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/auth-server
|
||||
|
||||
## [1.150.4](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.150.3...@standardnotes/auth-server@1.150.4) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/auth-server
|
||||
|
||||
## [1.150.3](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.150.2...@standardnotes/auth-server@1.150.3) (2023-10-04)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** remove ec2 plugin from worker ([0281724](https://github.com/standardnotes/server/commit/02817241965db06cc02e9cca1d068446db0dee4e))
|
||||
|
||||
## [1.150.2](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.150.1...@standardnotes/auth-server@1.150.2) (2023-10-04)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** remove ec2 plugin ([373cfad](https://github.com/standardnotes/server/commit/373cfad1f7c26d3a0ec2a69e720bbce535170359))
|
||||
|
||||
## [1.150.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.150.0...@standardnotes/auth-server@1.150.1) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/auth-server
|
||||
|
||||
# [1.150.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.149.1...@standardnotes/auth-server@1.150.0) (2023-10-03)
|
||||
|
||||
### Features
|
||||
|
||||
* add xray segment tracing on auth-worker ([b1b244a](https://github.com/standardnotes/server/commit/b1b244a2cf1e17ddf67fc9b238b4b25a1bc5a190))
|
||||
|
||||
## [1.149.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.149.0...@standardnotes/auth-server@1.149.1) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** enable xray only for server ([71e2a41](https://github.com/standardnotes/server/commit/71e2a4187e8f79b2a3055f502310ed0b6a6d82ea))
|
||||
|
||||
# [1.149.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.148.2...@standardnotes/auth-server@1.149.0) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** add aws xray ecs and ec2 plugins ([3838358](https://github.com/standardnotes/server/commit/383835808165aba58d15e79f03b5ba4e1c899f4c))
|
||||
* **auth:** prevent from loop disabling of email settings ([#858](https://github.com/standardnotes/server/issues/858)) ([bd71422](https://github.com/standardnotes/server/commit/bd71422fabc4b5b47cca6dd071e3332943adc81d))
|
||||
|
||||
### Features
|
||||
|
||||
* **auth:** introduce AWS X-Ray SDK ([#859](https://github.com/standardnotes/server/issues/859)) ([1a388f0](https://github.com/standardnotes/server/commit/1a388f00c3897d1ebb1411793cfb23c3d305ac2e))
|
||||
|
||||
## [1.148.2](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.148.1...@standardnotes/auth-server@1.148.2) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** increase ttl for in progress duration of transitions ([681e037](https://github.com/standardnotes/server/commit/681e0378ae6b97c838b0b34ccc630194b304b81a))
|
||||
|
||||
## [1.148.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.148.0...@standardnotes/auth-server@1.148.1) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **auth:** disable transitions retriggering if they are in progress ([5ef6c5c](https://github.com/standardnotes/server/commit/5ef6c5c14a9f7a558de7ac9ff0ab99a5f831c127))
|
||||
|
||||
# [1.148.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.147.1...@standardnotes/auth-server@1.148.0) (2023-09-28)
|
||||
|
||||
### Features
|
||||
|
||||
* block file operations during transition ([#856](https://github.com/standardnotes/server/issues/856)) ([676cf36](https://github.com/standardnotes/server/commit/676cf36f8d769aa433880b2800f0457d06fbbf14))
|
||||
|
||||
## [1.147.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.147.0...@standardnotes/auth-server@1.147.1) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/auth-server
|
||||
|
||||
@@ -24,11 +24,13 @@ import { urlencoded, json, Request, Response, NextFunction } from 'express'
|
||||
import * as winston from 'winston'
|
||||
import * as dayjs from 'dayjs'
|
||||
import * as utc from 'dayjs/plugin/utc'
|
||||
import * as AWSXRay from 'aws-xray-sdk'
|
||||
|
||||
import { InversifyExpressServer } from 'inversify-express-utils'
|
||||
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
|
||||
import TYPES from '../src/Bootstrap/Types'
|
||||
import { Env } from '../src/Bootstrap/Env'
|
||||
import { ServiceIdentifier } from '@standardnotes/domain-core'
|
||||
|
||||
const container = new ContainerConfigLoader()
|
||||
void container.load().then((container) => {
|
||||
@@ -37,9 +39,20 @@ void container.load().then((container) => {
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const isConfiguredForAWSProduction =
|
||||
env.get('MODE', true) !== 'home-server' && env.get('MODE', true) !== 'self-hosted'
|
||||
|
||||
if (isConfiguredForAWSProduction) {
|
||||
AWSXRay.config([AWSXRay.plugins.ECSPlugin])
|
||||
}
|
||||
|
||||
const server = new InversifyExpressServer(container)
|
||||
|
||||
server.setConfig((app) => {
|
||||
if (isConfiguredForAWSProduction) {
|
||||
app.use(AWSXRay.express.openSegment(ServiceIdentifier.NAMES.Auth))
|
||||
}
|
||||
|
||||
app.use((_request: Request, response: Response, next: NextFunction) => {
|
||||
response.setHeader('X-Auth-Version', container.get(TYPES.Auth_VERSION))
|
||||
next()
|
||||
@@ -66,6 +79,10 @@ void container.load().then((container) => {
|
||||
|
||||
const serverInstance = server.build()
|
||||
|
||||
if (isConfiguredForAWSProduction) {
|
||||
serverInstance.use(AWSXRay.express.closeSegment())
|
||||
}
|
||||
|
||||
serverInstance.listen(env.get('PORT'))
|
||||
|
||||
logger.info(`Server started on port ${process.env.PORT}`)
|
||||
|
||||
@@ -55,7 +55,7 @@ const requestTransition = async (
|
||||
|
||||
let wasTransitionRequested = false
|
||||
|
||||
if (itemsTransitionStatus?.value !== TransitionStatus.STATUSES.Verified) {
|
||||
if (itemsTransitionStatus === null || itemsTransitionStatus.value === TransitionStatus.STATUSES.Failed) {
|
||||
wasTransitionRequested = true
|
||||
await transitionStatusRepository.remove(user.uuid, 'items')
|
||||
|
||||
@@ -68,7 +68,7 @@ const requestTransition = async (
|
||||
)
|
||||
}
|
||||
|
||||
if (revisionsTransitionStatus?.value !== TransitionStatus.STATUSES.Verified) {
|
||||
if (revisionsTransitionStatus === null || revisionsTransitionStatus.value === TransitionStatus.STATUSES.Failed) {
|
||||
wasTransitionRequested = true
|
||||
await transitionStatusRepository.remove(user.uuid, 'revisions')
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import { Env } from '../src/Bootstrap/Env'
|
||||
import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-events'
|
||||
import * as dayjs from 'dayjs'
|
||||
import * as utc from 'dayjs/plugin/utc'
|
||||
import * as AWSXRay from 'aws-xray-sdk'
|
||||
|
||||
const container = new ContainerConfigLoader('worker')
|
||||
void container.load().then((container) => {
|
||||
@@ -16,6 +17,14 @@ void container.load().then((container) => {
|
||||
const env: Env = new Env()
|
||||
env.load()
|
||||
|
||||
const isConfiguredForAWSProduction =
|
||||
env.get('MODE', true) !== 'home-server' && env.get('MODE', true) !== 'self-hosted'
|
||||
|
||||
if (isConfiguredForAWSProduction) {
|
||||
AWSXRay.enableManualMode()
|
||||
AWSXRay.config([AWSXRay.plugins.ECSPlugin])
|
||||
}
|
||||
|
||||
const logger: Logger = container.get(TYPES.Auth_Logger)
|
||||
|
||||
logger.info('Starting worker...')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/auth-server",
|
||||
"version": "1.147.1",
|
||||
"version": "1.150.5",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
@@ -54,6 +54,7 @@
|
||||
"@standardnotes/sncrypto-common": "^1.13.4",
|
||||
"@standardnotes/sncrypto-node": "workspace:*",
|
||||
"@standardnotes/time": "workspace:*",
|
||||
"aws-xray-sdk": "^3.5.2",
|
||||
"axios": "^1.1.3",
|
||||
"bcryptjs": "2.4.3",
|
||||
"cors": "2.8.5",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import * as winston from 'winston'
|
||||
import Redis from 'ioredis'
|
||||
import { captureAWSv3Client } from 'aws-xray-sdk'
|
||||
import { SNSClient, SNSClientConfig } from '@aws-sdk/client-sns'
|
||||
import { SQSClient, SQSClientConfig } from '@aws-sdk/client-sqs'
|
||||
import { Container } from 'inversify'
|
||||
@@ -92,7 +93,7 @@ import {
|
||||
SNSDomainEventPublisher,
|
||||
SQSDomainEventSubscriberFactory,
|
||||
SQSEventMessageHandler,
|
||||
SQSNewRelicEventMessageHandler,
|
||||
SQSXRayEventMessageHandler,
|
||||
} from '@standardnotes/domain-events-infra'
|
||||
import { GetUserSubscription } from '../Domain/UseCase/GetUserSubscription/GetUserSubscription'
|
||||
import { ChangeCredentials } from '../Domain/UseCase/ChangeCredentials/ChangeCredentials'
|
||||
@@ -273,6 +274,7 @@ import { UserAddedToSharedVaultEventHandler } from '../Domain/Handler/UserAddedT
|
||||
import { UserRemovedFromSharedVaultEventHandler } from '../Domain/Handler/UserRemovedFromSharedVaultEventHandler'
|
||||
import { DesignateSurvivor } from '../Domain/UseCase/DesignateSurvivor/DesignateSurvivor'
|
||||
import { UserDesignatedAsSurvivorInSharedVaultEventHandler } from '../Domain/Handler/UserDesignatedAsSurvivorInSharedVaultEventHandler'
|
||||
import { DisableEmailSettingBasedOnEmailSubscription } from '../Domain/UseCase/DisableEmailSettingBasedOnEmailSubscription/DisableEmailSettingBasedOnEmailSubscription'
|
||||
|
||||
export class ContainerConfigLoader {
|
||||
constructor(private mode: 'server' | 'worker' = 'server') {}
|
||||
@@ -319,6 +321,8 @@ export class ContainerConfigLoader {
|
||||
logger.debug('Database initialized')
|
||||
|
||||
const isConfiguredForHomeServer = env.get('MODE', true) === 'home-server'
|
||||
const isConfiguredForSelfHosting = env.get('MODE', true) === 'self-hosted'
|
||||
const isConfiguredForAWSProduction = !isConfiguredForHomeServer && !isConfiguredForSelfHosting
|
||||
const isConfiguredForInMemoryCache = env.get('CACHE_TYPE', true) === 'memory'
|
||||
|
||||
if (!isConfiguredForInMemoryCache) {
|
||||
@@ -349,7 +353,11 @@ export class ContainerConfigLoader {
|
||||
secretAccessKey: env.get('SNS_SECRET_ACCESS_KEY', true),
|
||||
}
|
||||
}
|
||||
container.bind<SNSClient>(TYPES.Auth_SNS).toConstantValue(new SNSClient(snsConfig))
|
||||
let snsClient = new SNSClient(snsConfig)
|
||||
if (isConfiguredForAWSProduction) {
|
||||
snsClient = captureAWSv3Client(snsClient)
|
||||
}
|
||||
container.bind<SNSClient>(TYPES.Auth_SNS).toConstantValue(snsClient)
|
||||
|
||||
const sqsConfig: SQSClientConfig = {
|
||||
region: env.get('SQS_AWS_REGION', true),
|
||||
@@ -363,13 +371,17 @@ export class ContainerConfigLoader {
|
||||
secretAccessKey: env.get('SQS_SECRET_ACCESS_KEY', true),
|
||||
}
|
||||
}
|
||||
container.bind<SQSClient>(TYPES.Auth_SQS).toConstantValue(new SQSClient(sqsConfig))
|
||||
let sqsClient = new SQSClient(sqsConfig)
|
||||
if (isConfiguredForAWSProduction) {
|
||||
sqsClient = captureAWSv3Client(sqsClient)
|
||||
}
|
||||
container.bind<SQSClient>(TYPES.Auth_SQS).toConstantValue(sqsClient)
|
||||
}
|
||||
|
||||
// Mapping
|
||||
container
|
||||
.bind<MapperInterface<SessionTrace, TypeORMSessionTrace>>(TYPES.Auth_SessionTracePersistenceMapper)
|
||||
.toConstantValue(new SessionTracePersistenceMapper())
|
||||
.toConstantValue(new SessionTracePersistenceMapper(container.get<TimerInterface>(TYPES.Auth_Timer)))
|
||||
container
|
||||
.bind<MapperInterface<Authenticator, TypeORMAuthenticator>>(TYPES.Auth_AuthenticatorPersistenceMapper)
|
||||
.toConstantValue(new AuthenticatorPersistenceMapper())
|
||||
@@ -458,8 +470,9 @@ export class ContainerConfigLoader {
|
||||
.bind<SessionTraceRepositoryInterface>(TYPES.Auth_SessionTraceRepository)
|
||||
.toConstantValue(
|
||||
new TypeORMSessionTraceRepository(
|
||||
container.get(TYPES.Auth_ORMSessionTraceRepository),
|
||||
container.get(TYPES.Auth_SessionTracePersistenceMapper),
|
||||
container.get<Repository<TypeORMSessionTrace>>(TYPES.Auth_ORMSessionTraceRepository),
|
||||
container.get<MapperInterface<SessionTrace, TypeORMSessionTrace>>(TYPES.Auth_SessionTracePersistenceMapper),
|
||||
container.get<TimerInterface>(TYPES.Auth_Timer),
|
||||
),
|
||||
)
|
||||
container
|
||||
@@ -497,20 +510,6 @@ export class ContainerConfigLoader {
|
||||
),
|
||||
)
|
||||
|
||||
// Middleware
|
||||
container.bind<SessionMiddleware>(TYPES.Auth_SessionMiddleware).to(SessionMiddleware)
|
||||
container.bind<LockMiddleware>(TYPES.Auth_LockMiddleware).to(LockMiddleware)
|
||||
container
|
||||
.bind<RequiredCrossServiceTokenMiddleware>(TYPES.Auth_RequiredCrossServiceTokenMiddleware)
|
||||
.to(RequiredCrossServiceTokenMiddleware)
|
||||
container
|
||||
.bind<OptionalCrossServiceTokenMiddleware>(TYPES.Auth_OptionalCrossServiceTokenMiddleware)
|
||||
.to(OptionalCrossServiceTokenMiddleware)
|
||||
container
|
||||
.bind<ApiGatewayOfflineAuthMiddleware>(TYPES.Auth_ApiGatewayOfflineAuthMiddleware)
|
||||
.to(ApiGatewayOfflineAuthMiddleware)
|
||||
container.bind<OfflineUserAuthMiddleware>(TYPES.Auth_OfflineUserAuthMiddleware).to(OfflineUserAuthMiddleware)
|
||||
|
||||
// Projectors
|
||||
container.bind<SessionProjector>(TYPES.Auth_SessionProjector).to(SessionProjector)
|
||||
container.bind<UserProjector>(TYPES.Auth_UserProjector).to(UserProjector)
|
||||
@@ -739,6 +738,32 @@ export class ContainerConfigLoader {
|
||||
: new SNSDomainEventPublisher(container.get(TYPES.Auth_SNS), container.get(TYPES.Auth_SNS_TOPIC_ARN)),
|
||||
)
|
||||
|
||||
// Middleware
|
||||
container.bind<SessionMiddleware>(TYPES.Auth_SessionMiddleware).to(SessionMiddleware)
|
||||
container.bind<LockMiddleware>(TYPES.Auth_LockMiddleware).to(LockMiddleware)
|
||||
container
|
||||
.bind<RequiredCrossServiceTokenMiddleware>(TYPES.Auth_RequiredCrossServiceTokenMiddleware)
|
||||
.toConstantValue(
|
||||
new RequiredCrossServiceTokenMiddleware(
|
||||
container.get<TokenDecoderInterface<CrossServiceTokenData>>(TYPES.Auth_CrossServiceTokenDecoder),
|
||||
isConfiguredForAWSProduction && this.mode === 'server',
|
||||
container.get<winston.Logger>(TYPES.Auth_Logger),
|
||||
),
|
||||
)
|
||||
container
|
||||
.bind<OptionalCrossServiceTokenMiddleware>(TYPES.Auth_OptionalCrossServiceTokenMiddleware)
|
||||
.toConstantValue(
|
||||
new OptionalCrossServiceTokenMiddleware(
|
||||
container.get<TokenDecoderInterface<CrossServiceTokenData>>(TYPES.Auth_CrossServiceTokenDecoder),
|
||||
isConfiguredForAWSProduction && this.mode === 'server',
|
||||
container.get<winston.Logger>(TYPES.Auth_Logger),
|
||||
),
|
||||
)
|
||||
container
|
||||
.bind<ApiGatewayOfflineAuthMiddleware>(TYPES.Auth_ApiGatewayOfflineAuthMiddleware)
|
||||
.to(ApiGatewayOfflineAuthMiddleware)
|
||||
container.bind<OfflineUserAuthMiddleware>(TYPES.Auth_OfflineUserAuthMiddleware).to(OfflineUserAuthMiddleware)
|
||||
|
||||
// use cases
|
||||
container
|
||||
.bind<TraceSession>(TYPES.Auth_TraceSession)
|
||||
@@ -964,6 +989,15 @@ export class ContainerConfigLoader {
|
||||
container.get<TimerInterface>(TYPES.Auth_Timer),
|
||||
),
|
||||
)
|
||||
container
|
||||
.bind<DisableEmailSettingBasedOnEmailSubscription>(TYPES.Auth_DisableEmailSettingBasedOnEmailSubscription)
|
||||
.toConstantValue(
|
||||
new DisableEmailSettingBasedOnEmailSubscription(
|
||||
container.get<UserRepositoryInterface>(TYPES.Auth_UserRepository),
|
||||
container.get<SettingRepositoryInterface>(TYPES.Auth_SettingRepository),
|
||||
container.get<SettingFactoryInterface>(TYPES.Auth_SettingFactory),
|
||||
),
|
||||
)
|
||||
|
||||
// Controller
|
||||
container
|
||||
@@ -1011,7 +1045,6 @@ export class ContainerConfigLoader {
|
||||
container.get<SessionRepositoryInterface>(TYPES.Auth_SessionRepository),
|
||||
container.get<EphemeralSessionRepositoryInterface>(TYPES.Auth_EphemeralSessionRepository),
|
||||
container.get<RevokedSessionRepositoryInterface>(TYPES.Auth_RevokedSessionRepository),
|
||||
container.get<RemoveSharedVaultUser>(TYPES.Auth_RemoveSharedVaultUser),
|
||||
container.get<winston.Logger>(TYPES.Auth_Logger),
|
||||
),
|
||||
)
|
||||
@@ -1102,8 +1135,10 @@ export class ContainerConfigLoader {
|
||||
.bind<EmailSubscriptionUnsubscribedEventHandler>(TYPES.Auth_EmailSubscriptionUnsubscribedEventHandler)
|
||||
.toConstantValue(
|
||||
new EmailSubscriptionUnsubscribedEventHandler(
|
||||
container.get(TYPES.Auth_UserRepository),
|
||||
container.get(TYPES.Auth_SettingService),
|
||||
container.get<DisableEmailSettingBasedOnEmailSubscription>(
|
||||
TYPES.Auth_DisableEmailSettingBasedOnEmailSubscription,
|
||||
),
|
||||
container.get<winston.Logger>(TYPES.Auth_Logger),
|
||||
),
|
||||
)
|
||||
container
|
||||
@@ -1201,8 +1236,8 @@ export class ContainerConfigLoader {
|
||||
container
|
||||
.bind<DomainEventMessageHandlerInterface>(TYPES.Auth_DomainEventMessageHandler)
|
||||
.toConstantValue(
|
||||
env.get('NEW_RELIC_ENABLED', true) === 'true'
|
||||
? new SQSNewRelicEventMessageHandler(eventHandlers, container.get(TYPES.Auth_Logger))
|
||||
isConfiguredForAWSProduction
|
||||
? new SQSXRayEventMessageHandler(eventHandlers, container.get(TYPES.Auth_Logger))
|
||||
: new SQSEventMessageHandler(eventHandlers, container.get(TYPES.Auth_Logger)),
|
||||
)
|
||||
|
||||
|
||||
@@ -161,6 +161,7 @@ const TYPES = {
|
||||
Auth_AddSharedVaultUser: Symbol.for('Auth_AddSharedVaultUser'),
|
||||
Auth_RemoveSharedVaultUser: Symbol.for('Auth_RemoveSharedVaultUser'),
|
||||
Auth_DesignateSurvivor: Symbol.for('Auth_DesignateSurvivor'),
|
||||
Auth_DisableEmailSettingBasedOnEmailSubscription: Symbol.for('Auth_DisableEmailSettingBasedOnEmailSubscription'),
|
||||
// Handlers
|
||||
Auth_UserRegisteredEventHandler: Symbol.for('Auth_UserRegisteredEventHandler'),
|
||||
Auth_AccountDeletionRequestedEventHandler: Symbol.for('Auth_AccountDeletionRequestedEventHandler'),
|
||||
|
||||
@@ -6,7 +6,6 @@ import { EphemeralSessionRepositoryInterface } from '../Session/EphemeralSession
|
||||
import { RevokedSessionRepositoryInterface } from '../Session/RevokedSessionRepositoryInterface'
|
||||
import { SessionRepositoryInterface } from '../Session/SessionRepositoryInterface'
|
||||
import { UserRepositoryInterface } from '../User/UserRepositoryInterface'
|
||||
import { RemoveSharedVaultUser } from '../UseCase/RemoveSharedVaultUser/RemoveSharedVaultUser'
|
||||
|
||||
export class AccountDeletionRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@@ -14,7 +13,6 @@ export class AccountDeletionRequestedEventHandler implements DomainEventHandlerI
|
||||
private sessionRepository: SessionRepositoryInterface,
|
||||
private ephemeralSessionRepository: EphemeralSessionRepositoryInterface,
|
||||
private revokedSessionRepository: RevokedSessionRepositoryInterface,
|
||||
private removeSharedVaultUser: RemoveSharedVaultUser,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
@@ -37,13 +35,6 @@ export class AccountDeletionRequestedEventHandler implements DomainEventHandlerI
|
||||
|
||||
await this.removeSessions(userUuid.value)
|
||||
|
||||
const result = await this.removeSharedVaultUser.execute({
|
||||
userUuid: userUuid.value,
|
||||
})
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(`Could not remove shared vault user: ${result.getError()}`)
|
||||
}
|
||||
|
||||
await this.userRepository.remove(user)
|
||||
|
||||
this.logger.info(`Finished account cleanup for user: ${userUuid.value}`)
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
import { EmailLevel } from '@standardnotes/domain-core'
|
||||
import { EmailSubscriptionUnsubscribedEvent } from '@standardnotes/domain-events'
|
||||
|
||||
import { SettingServiceInterface } from '../Setting/SettingServiceInterface'
|
||||
import { User } from '../User/User'
|
||||
import { UserRepositoryInterface } from '../User/UserRepositoryInterface'
|
||||
import { EmailSubscriptionUnsubscribedEventHandler } from './EmailSubscriptionUnsubscribedEventHandler'
|
||||
|
||||
describe('EmailSubscriptionUnsubscribedEventHandler', () => {
|
||||
let userRepository: UserRepositoryInterface
|
||||
let settingsService: SettingServiceInterface
|
||||
let event: EmailSubscriptionUnsubscribedEvent
|
||||
|
||||
const createHandler = () => new EmailSubscriptionUnsubscribedEventHandler(userRepository, settingsService)
|
||||
|
||||
beforeEach(() => {
|
||||
userRepository = {} as jest.Mocked<UserRepositoryInterface>
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockReturnValue({} as jest.Mocked<User>)
|
||||
|
||||
settingsService = {} as jest.Mocked<SettingServiceInterface>
|
||||
settingsService.createOrReplace = jest.fn()
|
||||
|
||||
event = {
|
||||
payload: {
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
},
|
||||
} as jest.Mocked<EmailSubscriptionUnsubscribedEvent>
|
||||
})
|
||||
|
||||
it('should not do anything if username is invalid', async () => {
|
||||
event.payload.userEmail = ''
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not do anything if user is not found', async () => {
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockReturnValue(null)
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should update user marketing email settings', async () => {
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_MARKETING_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should update user sign in email settings', async () => {
|
||||
event.payload.level = EmailLevel.LEVELS.SignIn
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_SIGN_IN_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should update user email backup email settings', async () => {
|
||||
event.payload.level = EmailLevel.LEVELS.FailedEmailBackup
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_FAILED_BACKUPS_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should update user email backup email settings', async () => {
|
||||
event.payload.level = EmailLevel.LEVELS.FailedCloudBackup
|
||||
|
||||
await createHandler().handle(event)
|
||||
|
||||
expect(settingsService.createOrReplace).toHaveBeenCalledWith({
|
||||
user: {},
|
||||
props: {
|
||||
name: 'MUTE_FAILED_CLOUD_BACKUPS_EMAILS',
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw error for unrecognized level', async () => {
|
||||
event.payload.level = 'foobar'
|
||||
|
||||
let caughtError = null
|
||||
try {
|
||||
await createHandler().handle(event)
|
||||
} catch (error) {
|
||||
caughtError = error
|
||||
}
|
||||
|
||||
expect(caughtError).not.toBeNull()
|
||||
})
|
||||
})
|
||||
@@ -1,50 +1,21 @@
|
||||
import { EmailLevel, Username } from '@standardnotes/domain-core'
|
||||
import { DomainEventHandlerInterface, EmailSubscriptionUnsubscribedEvent } from '@standardnotes/domain-events'
|
||||
import { SettingName } from '@standardnotes/settings'
|
||||
|
||||
import { SettingServiceInterface } from '../Setting/SettingServiceInterface'
|
||||
import { UserRepositoryInterface } from '../User/UserRepositoryInterface'
|
||||
import { Logger } from 'winston'
|
||||
import { DisableEmailSettingBasedOnEmailSubscription } from '../UseCase/DisableEmailSettingBasedOnEmailSubscription/DisableEmailSettingBasedOnEmailSubscription'
|
||||
|
||||
export class EmailSubscriptionUnsubscribedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
private userRepository: UserRepositoryInterface,
|
||||
private settingsService: SettingServiceInterface,
|
||||
private disableEmailSettingBasedOnEmailSubscription: DisableEmailSettingBasedOnEmailSubscription,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: EmailSubscriptionUnsubscribedEvent): Promise<void> {
|
||||
const usernameOrError = Username.create(event.payload.userEmail)
|
||||
if (usernameOrError.isFailed()) {
|
||||
return
|
||||
}
|
||||
const username = usernameOrError.getValue()
|
||||
|
||||
const user = await this.userRepository.findOneByUsernameOrEmail(username)
|
||||
if (user === null) {
|
||||
return
|
||||
}
|
||||
|
||||
await this.settingsService.createOrReplace({
|
||||
user,
|
||||
props: {
|
||||
name: this.getSettingNameFromLevel(event.payload.level),
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
const result = await this.disableEmailSettingBasedOnEmailSubscription.execute({
|
||||
userEmail: event.payload.userEmail,
|
||||
level: event.payload.level,
|
||||
})
|
||||
}
|
||||
|
||||
private getSettingNameFromLevel(level: string): string {
|
||||
switch (level) {
|
||||
case EmailLevel.LEVELS.FailedCloudBackup:
|
||||
return SettingName.NAMES.MuteFailedCloudBackupsEmails
|
||||
case EmailLevel.LEVELS.FailedEmailBackup:
|
||||
return SettingName.NAMES.MuteFailedBackupsEmails
|
||||
case EmailLevel.LEVELS.Marketing:
|
||||
return SettingName.NAMES.MuteMarketingEmails
|
||||
case EmailLevel.LEVELS.SignIn:
|
||||
return SettingName.NAMES.MuteSignInEmails
|
||||
default:
|
||||
throw new Error(`Unknown level: ${level}`)
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(`Failed to disable email setting for user ${event.payload.userEmail}: ${result.getError()}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ import { SubscriptionPlanName, Uuid } from '@standardnotes/domain-core'
|
||||
import { SessionTrace } from './SessionTrace'
|
||||
|
||||
export interface SessionTraceRepositoryInterface {
|
||||
save(sessionTrace: SessionTrace): Promise<void>
|
||||
insert(sessionTrace: SessionTrace): Promise<void>
|
||||
removeExpiredBefore(date: Date): Promise<void>
|
||||
findOneByUserUuidAndDate(userUuid: Uuid, date: Date): Promise<SessionTrace | null>
|
||||
countByDate(date: Date): Promise<number>
|
||||
|
||||
@@ -1,14 +1,17 @@
|
||||
import 'reflect-metadata'
|
||||
|
||||
import { TokenEncoderInterface, ValetTokenData, ValetTokenOperation } from '@standardnotes/security'
|
||||
import { CreateValetToken } from './CreateValetToken'
|
||||
import { TransitionStatus } from '@standardnotes/domain-core'
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
import { TokenEncoderInterface, ValetTokenData, ValetTokenOperation } from '@standardnotes/security'
|
||||
|
||||
import { CreateValetToken } from './CreateValetToken'
|
||||
import { UserSubscription } from '../../Subscription/UserSubscription'
|
||||
import { SubscriptionSettingServiceInterface } from '../../Setting/SubscriptionSettingServiceInterface'
|
||||
import { User } from '../../User/User'
|
||||
import { UserSubscriptionType } from '../../Subscription/UserSubscriptionType'
|
||||
import { SubscriptionSettingsAssociationServiceInterface } from '../../Setting/SubscriptionSettingsAssociationServiceInterface'
|
||||
import { UserSubscriptionServiceInterface } from '../../Subscription/UserSubscriptionServiceInterface'
|
||||
import { TransitionStatusRepositoryInterface } from '../../Transition/TransitionStatusRepositoryInterface'
|
||||
|
||||
describe('CreateValetToken', () => {
|
||||
let tokenEncoder: TokenEncoderInterface<ValetTokenData>
|
||||
@@ -20,6 +23,7 @@ describe('CreateValetToken', () => {
|
||||
let regularSubscription: UserSubscription
|
||||
let sharedSubscription: UserSubscription
|
||||
let user: User
|
||||
let transitionStatusRepository: TransitionStatusRepositoryInterface
|
||||
|
||||
const createUseCase = () =>
|
||||
new CreateValetToken(
|
||||
@@ -29,6 +33,7 @@ describe('CreateValetToken', () => {
|
||||
userSubscriptionService,
|
||||
timer,
|
||||
valetTokenTTL,
|
||||
transitionStatusRepository,
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -66,6 +71,11 @@ describe('CreateValetToken', () => {
|
||||
|
||||
timer = {} as jest.Mocked<TimerInterface>
|
||||
timer.getTimestampInMicroseconds = jest.fn().mockReturnValue(100)
|
||||
|
||||
transitionStatusRepository = {} as jest.Mocked<TransitionStatusRepositoryInterface>
|
||||
transitionStatusRepository.getStatus = jest
|
||||
.fn()
|
||||
.mockReturnValue(TransitionStatus.create(TransitionStatus.STATUSES.Verified).getValue())
|
||||
})
|
||||
|
||||
it('should create a read valet token', async () => {
|
||||
@@ -166,6 +176,7 @@ describe('CreateValetToken', () => {
|
||||
{
|
||||
sharedSubscriptionUuid: undefined,
|
||||
regularSubscriptionUuid: '1-2-3',
|
||||
ongoingTransition: false,
|
||||
permittedOperation: 'write',
|
||||
permittedResources: [
|
||||
{
|
||||
@@ -206,6 +217,7 @@ describe('CreateValetToken', () => {
|
||||
{
|
||||
sharedSubscriptionUuid: '2-3-4',
|
||||
regularSubscriptionUuid: '1-2-3',
|
||||
ongoingTransition: false,
|
||||
permittedOperation: 'write',
|
||||
permittedResources: [
|
||||
{
|
||||
@@ -266,6 +278,7 @@ describe('CreateValetToken', () => {
|
||||
{
|
||||
sharedSubscriptionUuid: undefined,
|
||||
regularSubscriptionUuid: '1-2-3',
|
||||
ongoingTransition: false,
|
||||
permittedOperation: 'write',
|
||||
permittedResources: [
|
||||
{
|
||||
|
||||
@@ -13,6 +13,8 @@ import { CreateValetTokenDTO } from './CreateValetTokenDTO'
|
||||
import { SubscriptionSettingsAssociationServiceInterface } from '../../Setting/SubscriptionSettingsAssociationServiceInterface'
|
||||
import { UserSubscriptionServiceInterface } from '../../Subscription/UserSubscriptionServiceInterface'
|
||||
import { CreateValetTokenPayload } from '../../ValetToken/CreateValetTokenPayload'
|
||||
import { TransitionStatusRepositoryInterface } from '../../Transition/TransitionStatusRepositoryInterface'
|
||||
import { TransitionStatus } from '@standardnotes/domain-core'
|
||||
|
||||
@injectable()
|
||||
export class CreateValetToken implements UseCaseInterface {
|
||||
@@ -25,6 +27,8 @@ export class CreateValetToken implements UseCaseInterface {
|
||||
@inject(TYPES.Auth_UserSubscriptionService) private userSubscriptionService: UserSubscriptionServiceInterface,
|
||||
@inject(TYPES.Auth_Timer) private timer: TimerInterface,
|
||||
@inject(TYPES.Auth_VALET_TOKEN_TTL) private valetTokenTTL: number,
|
||||
@inject(TYPES.Auth_TransitionStatusRepository)
|
||||
private transitionStatusRepository: TransitionStatusRepositoryInterface,
|
||||
) {}
|
||||
|
||||
async execute(dto: CreateValetTokenDTO): Promise<CreateValetTokenResponseData> {
|
||||
@@ -83,6 +87,8 @@ export class CreateValetToken implements UseCaseInterface {
|
||||
sharedSubscriptionUuid = sharedSubscription.uuid
|
||||
}
|
||||
|
||||
const transitionStatus = await this.transitionStatusRepository.getStatus(userUuid, 'items')
|
||||
|
||||
const tokenData: ValetTokenData = {
|
||||
userUuid: dto.userUuid,
|
||||
permittedOperation: dto.operation,
|
||||
@@ -91,6 +97,7 @@ export class CreateValetToken implements UseCaseInterface {
|
||||
uploadBytesLimit,
|
||||
sharedSubscriptionUuid,
|
||||
regularSubscriptionUuid: regularSubscription.uuid,
|
||||
ongoingTransition: transitionStatus?.value === TransitionStatus.STATUSES.InProgress,
|
||||
}
|
||||
|
||||
const valetToken = this.tokenEncoder.encodeExpirableToken(tokenData, this.valetTokenTTL)
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
import { EmailLevel } from '@standardnotes/domain-core'
|
||||
import { Setting } from '../../Setting/Setting'
|
||||
import { SettingFactoryInterface } from '../../Setting/SettingFactoryInterface'
|
||||
import { SettingRepositoryInterface } from '../../Setting/SettingRepositoryInterface'
|
||||
import { User } from '../../User/User'
|
||||
import { UserRepositoryInterface } from '../../User/UserRepositoryInterface'
|
||||
import { DisableEmailSettingBasedOnEmailSubscription } from './DisableEmailSettingBasedOnEmailSubscription'
|
||||
|
||||
describe('DisableEmailSettingBasedOnEmailSubscription', () => {
|
||||
let userRepository: UserRepositoryInterface
|
||||
let settingRepository: SettingRepositoryInterface
|
||||
let factory: SettingFactoryInterface
|
||||
let user: User
|
||||
|
||||
const createUseCase = () =>
|
||||
new DisableEmailSettingBasedOnEmailSubscription(userRepository, settingRepository, factory)
|
||||
|
||||
beforeEach(() => {
|
||||
user = {} as jest.Mocked<User>
|
||||
user.uuid = 'userUuid'
|
||||
|
||||
userRepository = {} as jest.Mocked<UserRepositoryInterface>
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockResolvedValue(user)
|
||||
|
||||
settingRepository = {} as jest.Mocked<SettingRepositoryInterface>
|
||||
settingRepository.findLastByNameAndUserUuid = jest.fn().mockResolvedValue({} as jest.Mocked<Setting>)
|
||||
settingRepository.save = jest.fn()
|
||||
|
||||
factory = {} as jest.Mocked<SettingFactoryInterface>
|
||||
factory.create = jest.fn().mockResolvedValue({} as jest.Mocked<Setting>)
|
||||
factory.createReplacement = jest.fn().mockResolvedValue({} as jest.Mocked<Setting>)
|
||||
})
|
||||
|
||||
it('should fail if the username is empty', async () => {
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: '',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should fail if the user is not found', async () => {
|
||||
userRepository.findOneByUsernameOrEmail = jest.fn().mockResolvedValue(null)
|
||||
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should fail if the setting name cannot be determined', async () => {
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: 'invalid',
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should create a new setting if it does not exist', async () => {
|
||||
settingRepository.findLastByNameAndUserUuid = jest.fn().mockResolvedValue(null)
|
||||
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeFalsy()
|
||||
expect(factory.create).toHaveBeenCalled()
|
||||
expect(factory.createReplacement).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should replace the setting if it exists', async () => {
|
||||
const useCase = createUseCase()
|
||||
|
||||
const result = await useCase.execute({
|
||||
userEmail: 'test@test.te',
|
||||
level: EmailLevel.LEVELS.Marketing,
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBeFalsy()
|
||||
expect(factory.create).not.toHaveBeenCalled()
|
||||
expect(factory.createReplacement).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,72 @@
|
||||
import { EmailLevel, Result, UseCaseInterface, Username } from '@standardnotes/domain-core'
|
||||
import { SettingName } from '@standardnotes/settings'
|
||||
|
||||
import { DisableEmailSettingBasedOnEmailSubscriptionDTO } from './DisableEmailSettingBasedOnEmailSubscriptionDTO'
|
||||
import { UserRepositoryInterface } from '../../User/UserRepositoryInterface'
|
||||
import { SettingRepositoryInterface } from '../../Setting/SettingRepositoryInterface'
|
||||
import { SettingFactoryInterface } from '../../Setting/SettingFactoryInterface'
|
||||
|
||||
export class DisableEmailSettingBasedOnEmailSubscription implements UseCaseInterface<void> {
|
||||
constructor(
|
||||
private userRepository: UserRepositoryInterface,
|
||||
private settingRepository: SettingRepositoryInterface,
|
||||
private factory: SettingFactoryInterface,
|
||||
) {}
|
||||
|
||||
async execute(dto: DisableEmailSettingBasedOnEmailSubscriptionDTO): Promise<Result<void>> {
|
||||
const usernameOrError = Username.create(dto.userEmail)
|
||||
if (usernameOrError.isFailed()) {
|
||||
return Result.fail(usernameOrError.getError())
|
||||
}
|
||||
const username = usernameOrError.getValue()
|
||||
|
||||
const user = await this.userRepository.findOneByUsernameOrEmail(username)
|
||||
if (user === null) {
|
||||
return Result.fail(`User not found for email ${dto.userEmail}`)
|
||||
}
|
||||
|
||||
const settingNameOrError = this.getSettingNameFromLevel(dto.level)
|
||||
if (settingNameOrError.isFailed()) {
|
||||
return Result.fail(settingNameOrError.getError())
|
||||
}
|
||||
const settingName = settingNameOrError.getValue()
|
||||
|
||||
let setting = await this.settingRepository.findLastByNameAndUserUuid(settingName, user.uuid)
|
||||
if (!setting) {
|
||||
setting = await this.factory.create(
|
||||
{
|
||||
name: settingName,
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
},
|
||||
user,
|
||||
)
|
||||
} else {
|
||||
setting = await this.factory.createReplacement(setting, {
|
||||
name: settingName,
|
||||
unencryptedValue: 'muted',
|
||||
sensitive: false,
|
||||
})
|
||||
}
|
||||
|
||||
await this.settingRepository.save(setting)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
private getSettingNameFromLevel(level: string): Result<string> {
|
||||
/* istanbul ignore next */
|
||||
switch (level) {
|
||||
case EmailLevel.LEVELS.FailedCloudBackup:
|
||||
return Result.ok(SettingName.NAMES.MuteFailedCloudBackupsEmails)
|
||||
case EmailLevel.LEVELS.FailedEmailBackup:
|
||||
return Result.ok(SettingName.NAMES.MuteFailedBackupsEmails)
|
||||
case EmailLevel.LEVELS.Marketing:
|
||||
return Result.ok(SettingName.NAMES.MuteMarketingEmails)
|
||||
case EmailLevel.LEVELS.SignIn:
|
||||
return Result.ok(SettingName.NAMES.MuteSignInEmails)
|
||||
default:
|
||||
return Result.fail(`Unknown level: ${level}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
export interface DisableEmailSettingBasedOnEmailSubscriptionDTO {
|
||||
userEmail: string
|
||||
level: string
|
||||
}
|
||||
@@ -15,7 +15,7 @@ describe('TraceSession', () => {
|
||||
beforeEach(() => {
|
||||
sessionTraceRepository = {} as jest.Mocked<SessionTraceRepositoryInterface>
|
||||
sessionTraceRepository.findOneByUserUuidAndDate = jest.fn().mockReturnValue(null)
|
||||
sessionTraceRepository.save = jest.fn()
|
||||
sessionTraceRepository.insert = jest.fn()
|
||||
|
||||
timer = {} as jest.Mocked<TimerInterface>
|
||||
timer.getUTCDateNDaysAhead = jest.fn().mockReturnValue(new Date())
|
||||
@@ -30,7 +30,7 @@ describe('TraceSession', () => {
|
||||
|
||||
expect(result.isFailed()).toBe(false)
|
||||
expect(result.getValue().props.userUuid.value).toEqual('0702b137-4f5c-438a-915e-8f8b46572ce5')
|
||||
expect(sessionTraceRepository.save).toHaveBeenCalledTimes(1)
|
||||
expect(sessionTraceRepository.insert).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should not save a session trace if one already exists for the same user and date', async () => {
|
||||
@@ -43,7 +43,7 @@ describe('TraceSession', () => {
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBe(false)
|
||||
expect(sessionTraceRepository.save).not.toHaveBeenCalled()
|
||||
expect(sessionTraceRepository.insert).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return an error if userUuid is invalid', async () => {
|
||||
@@ -54,7 +54,7 @@ describe('TraceSession', () => {
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBe(true)
|
||||
expect(sessionTraceRepository.save).not.toHaveBeenCalled()
|
||||
expect(sessionTraceRepository.insert).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return an error if username is invalid', async () => {
|
||||
@@ -65,7 +65,7 @@ describe('TraceSession', () => {
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBe(true)
|
||||
expect(sessionTraceRepository.save).not.toHaveBeenCalled()
|
||||
expect(sessionTraceRepository.insert).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return an error if subscriptionPlanName is invalid', async () => {
|
||||
@@ -76,7 +76,7 @@ describe('TraceSession', () => {
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBe(true)
|
||||
expect(sessionTraceRepository.save).not.toHaveBeenCalled()
|
||||
expect(sessionTraceRepository.insert).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not save a session trace if creating of the session trace fails', async () => {
|
||||
@@ -90,7 +90,7 @@ describe('TraceSession', () => {
|
||||
})
|
||||
|
||||
expect(result.isFailed()).toBe(true)
|
||||
expect(sessionTraceRepository.save).not.toHaveBeenCalled()
|
||||
expect(sessionTraceRepository.insert).not.toHaveBeenCalled()
|
||||
|
||||
mock.mockRestore()
|
||||
})
|
||||
|
||||
@@ -53,7 +53,7 @@ export class TraceSession implements UseCaseInterface<SessionTrace> {
|
||||
}
|
||||
const sessionTrace = sessionTraceOrError.getValue()
|
||||
|
||||
await this.sessionTraceRepository.save(sessionTrace)
|
||||
await this.sessionTraceRepository.insert(sessionTrace)
|
||||
|
||||
return Result.ok<SessionTrace>(sessionTrace)
|
||||
}
|
||||
|
||||
@@ -2,10 +2,12 @@ import { CrossServiceTokenData, TokenDecoderInterface } from '@standardnotes/sec
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { BaseMiddleware } from 'inversify-express-utils'
|
||||
import { Logger } from 'winston'
|
||||
import { Segment, getSegment } from 'aws-xray-sdk'
|
||||
|
||||
export abstract class ApiGatewayAuthMiddleware extends BaseMiddleware {
|
||||
constructor(
|
||||
private tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
private isConfiguredForAWSProduction: boolean,
|
||||
private logger: Logger,
|
||||
) {
|
||||
super()
|
||||
@@ -39,6 +41,13 @@ export abstract class ApiGatewayAuthMiddleware extends BaseMiddleware {
|
||||
response.locals.session = token.session
|
||||
response.locals.readOnlyAccess = token.session?.readonly_access ?? false
|
||||
|
||||
if (this.isConfiguredForAWSProduction) {
|
||||
const segment = getSegment()
|
||||
if (segment instanceof Segment) {
|
||||
segment.setUser(token.user.uuid)
|
||||
}
|
||||
}
|
||||
|
||||
return next()
|
||||
} catch (error) {
|
||||
return next(error)
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import { CrossServiceTokenData, TokenDecoderInterface } from '@standardnotes/security'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../../Bootstrap/Types'
|
||||
import { ApiGatewayAuthMiddleware } from './ApiGatewayAuthMiddleware'
|
||||
|
||||
@injectable()
|
||||
export class OptionalCrossServiceTokenMiddleware extends ApiGatewayAuthMiddleware {
|
||||
constructor(
|
||||
@inject(TYPES.Auth_CrossServiceTokenDecoder) tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
@inject(TYPES.Auth_Logger) logger: Logger,
|
||||
tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
isConfiguredForAWSProduction: boolean,
|
||||
logger: Logger,
|
||||
) {
|
||||
super(tokenDecoder, logger)
|
||||
super(tokenDecoder, isConfiguredForAWSProduction, logger)
|
||||
}
|
||||
|
||||
protected override handleMissingToken(request: Request, _response: Response, next: NextFunction): boolean {
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
import { CrossServiceTokenData, TokenDecoderInterface } from '@standardnotes/security'
|
||||
import { NextFunction, Request, Response } from 'express'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../../Bootstrap/Types'
|
||||
import { ApiGatewayAuthMiddleware } from './ApiGatewayAuthMiddleware'
|
||||
|
||||
@injectable()
|
||||
export class RequiredCrossServiceTokenMiddleware extends ApiGatewayAuthMiddleware {
|
||||
constructor(
|
||||
@inject(TYPES.Auth_CrossServiceTokenDecoder) tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
@inject(TYPES.Auth_Logger) logger: Logger,
|
||||
tokenDecoder: TokenDecoderInterface<CrossServiceTokenData>,
|
||||
isConfiguredForAWSProduction: boolean,
|
||||
logger: Logger,
|
||||
) {
|
||||
super(tokenDecoder, logger)
|
||||
super(tokenDecoder, isConfiguredForAWSProduction, logger)
|
||||
}
|
||||
|
||||
protected override handleMissingToken(request: Request, response: Response, _next: NextFunction): boolean {
|
||||
|
||||
@@ -19,8 +19,8 @@ export class RedisTransitionStatusRepository implements TransitionStatusReposito
|
||||
await this.redisClient.set(`${this.PREFIX}:${transitionType}:${userUuid}`, status.value)
|
||||
break
|
||||
case TransitionStatus.STATUSES.InProgress: {
|
||||
const ttl2Hourse = 7_200
|
||||
await this.redisClient.setex(`${this.PREFIX}:${transitionType}:${userUuid}`, ttl2Hourse, status.value)
|
||||
const ttl24Hours = 86_400
|
||||
await this.redisClient.setex(`${this.PREFIX}:${transitionType}:${userUuid}`, ttl24Hours, status.value)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { MapperInterface, SubscriptionPlanName, Uuid } from '@standardnotes/domain-core'
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
import { Repository } from 'typeorm'
|
||||
|
||||
import { SessionTrace } from '../../Domain/Session/SessionTrace'
|
||||
import { SessionTraceRepositoryInterface } from '../../Domain/Session/SessionTraceRepositoryInterface'
|
||||
import { TypeORMSessionTrace } from './TypeORMSessionTrace'
|
||||
@@ -8,13 +10,14 @@ export class TypeORMSessionTraceRepository implements SessionTraceRepositoryInte
|
||||
constructor(
|
||||
private ormRepository: Repository<TypeORMSessionTrace>,
|
||||
private mapper: MapperInterface<SessionTrace, TypeORMSessionTrace>,
|
||||
private timer: TimerInterface,
|
||||
) {}
|
||||
|
||||
async countByDateAndSubscriptionPlanName(date: Date, subscriptionPlanName: SubscriptionPlanName): Promise<number> {
|
||||
return this.ormRepository
|
||||
.createQueryBuilder('trace')
|
||||
.where('trace.creation_date = :creationDate', {
|
||||
creationDate: `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`,
|
||||
creationDate: this.timer.convertDateToFormattedString(date, 'YYYY-MM-DD'),
|
||||
})
|
||||
.andWhere('trace.subscription_plan_name = :subscriptionPlanName', {
|
||||
subscriptionPlanName: subscriptionPlanName.value,
|
||||
@@ -26,7 +29,7 @@ export class TypeORMSessionTraceRepository implements SessionTraceRepositoryInte
|
||||
return this.ormRepository
|
||||
.createQueryBuilder('trace')
|
||||
.where('trace.creation_date = :creationDate', {
|
||||
creationDate: `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`,
|
||||
creationDate: this.timer.convertDateToFormattedString(date, 'YYYY-MM-DD'),
|
||||
})
|
||||
.getCount()
|
||||
}
|
||||
@@ -44,7 +47,7 @@ export class TypeORMSessionTraceRepository implements SessionTraceRepositoryInte
|
||||
.createQueryBuilder('trace')
|
||||
.where('trace.user_uuid = :userUuid AND trace.creation_date = :creationDate', {
|
||||
userUuid: userUuid.value,
|
||||
creationDate: `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`,
|
||||
creationDate: this.timer.convertDateToFormattedString(date, 'YYYY-MM-DD'),
|
||||
})
|
||||
.getOne()
|
||||
|
||||
@@ -55,9 +58,9 @@ export class TypeORMSessionTraceRepository implements SessionTraceRepositoryInte
|
||||
return this.mapper.toDomain(typeOrm)
|
||||
}
|
||||
|
||||
async save(sessionTrace: SessionTrace): Promise<void> {
|
||||
async insert(sessionTrace: SessionTrace): Promise<void> {
|
||||
const persistence = this.mapper.toProjection(sessionTrace)
|
||||
|
||||
await this.ormRepository.save(persistence)
|
||||
await this.ormRepository.insert(persistence)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import { MapperInterface, SubscriptionPlanName, UniqueEntityId, Username, Uuid } from '@standardnotes/domain-core'
|
||||
import { SessionTrace } from '../Domain/Session/SessionTrace'
|
||||
import { TypeORMSessionTrace } from '../Infra/TypeORM/TypeORMSessionTrace'
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
|
||||
export class SessionTracePersistenceMapper implements MapperInterface<SessionTrace, TypeORMSessionTrace> {
|
||||
constructor(private timer: TimerInterface) {}
|
||||
|
||||
toDomain(projection: TypeORMSessionTrace): SessionTrace {
|
||||
const userUuidOrError = Uuid.create(projection.userUuid)
|
||||
if (userUuidOrError.isFailed()) {
|
||||
@@ -50,11 +53,7 @@ export class SessionTracePersistenceMapper implements MapperInterface<SessionTra
|
||||
typeOrm.username = domain.props.username.value
|
||||
typeOrm.subscriptionPlanName = domain.props.subscriptionPlanName ? domain.props.subscriptionPlanName.value : null
|
||||
typeOrm.createdAt = domain.props.createdAt
|
||||
typeOrm.creationDate = new Date(
|
||||
domain.props.createdAt.getFullYear(),
|
||||
domain.props.createdAt.getMonth(),
|
||||
domain.props.createdAt.getDate(),
|
||||
)
|
||||
typeOrm.creationDate = new Date(this.timer.convertDateToFormattedString(domain.props.createdAt, 'YYYY-MM-DD'))
|
||||
typeOrm.expiresAt = domain.props.expiresAt
|
||||
|
||||
return typeOrm
|
||||
|
||||
@@ -3,6 +3,36 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.14.3](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.14.2...@standardnotes/domain-events-infra@1.14.3) (2023-10-04)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **domain-events-infra:** handling segments ([207ef9f](https://github.com/standardnotes/server/commit/207ef9f3e531b730f3f4869fb128354dbd659f46))
|
||||
* **domain-events-infra:** remove redundant flush ([64f1fe5](https://github.com/standardnotes/server/commit/64f1fe59c23894bda9ad40c6bdeaf2997a8b48ce))
|
||||
|
||||
## [1.14.2](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.14.1...@standardnotes/domain-events-infra@1.14.2) (2023-10-04)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **domain-events-infra:** handling async functions ([f6bc1c3](https://github.com/standardnotes/server/commit/f6bc1c30846ecfb159f7fb1cdd8bfc3ab15b2dde))
|
||||
* **domain-events-infra:** imports ([7668713](https://github.com/standardnotes/server/commit/7668713dd6f65b65e546152828f5f52ea34f8bf5))
|
||||
|
||||
## [1.14.1](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.14.0...@standardnotes/domain-events-infra@1.14.1) (2023-10-03)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **domain-events-infra:** change segment closing ([e066b6a](https://github.com/standardnotes/server/commit/e066b6a126754e5b1ea73b8ef5ce9068dc424d77))
|
||||
|
||||
# [1.14.0](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.13.1...@standardnotes/domain-events-infra@1.14.0) (2023-10-03)
|
||||
|
||||
### Features
|
||||
|
||||
* add xray segment tracing on auth-worker ([b1b244a](https://github.com/standardnotes/server/commit/b1b244a2cf1e17ddf67fc9b238b4b25a1bc5a190))
|
||||
|
||||
## [1.13.1](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.13.0...@standardnotes/domain-events-infra@1.13.1) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/domain-events-infra
|
||||
|
||||
# [1.13.0](https://github.com/standardnotes/server/compare/@standardnotes/domain-events-infra@1.12.34...@standardnotes/domain-events-infra@1.13.0) (2023-09-26)
|
||||
|
||||
### Features
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/domain-events-infra",
|
||||
"version": "1.13.0",
|
||||
"version": "1.14.3",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
@@ -27,6 +27,7 @@
|
||||
"@aws-sdk/client-sns": "^3.332.0",
|
||||
"@aws-sdk/client-sqs": "^3.332.0",
|
||||
"@standardnotes/domain-events": "workspace:*",
|
||||
"aws-xray-sdk": "^3.5.2",
|
||||
"ioredis": "^5.2.4",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"sqs-consumer": "^6.2.1",
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
import { Logger } from 'winston'
|
||||
import * as zlib from 'zlib'
|
||||
import { Segment, Subsegment, captureAsyncFunc } from 'aws-xray-sdk'
|
||||
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventInterface,
|
||||
DomainEventMessageHandlerInterface,
|
||||
} from '@standardnotes/domain-events'
|
||||
|
||||
export class SQSXRayEventMessageHandler implements DomainEventMessageHandlerInterface {
|
||||
constructor(
|
||||
private handlers: Map<string, DomainEventHandlerInterface>,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handleMessage(message: string): Promise<void> {
|
||||
const messageParsed = JSON.parse(message)
|
||||
|
||||
const domainEventJson = zlib.unzipSync(Buffer.from(messageParsed.Message, 'base64')).toString()
|
||||
|
||||
const domainEvent: DomainEventInterface = JSON.parse(domainEventJson)
|
||||
|
||||
domainEvent.createdAt = new Date(domainEvent.createdAt)
|
||||
|
||||
const handler = this.handlers.get(domainEvent.type)
|
||||
if (!handler) {
|
||||
this.logger.debug(`Event handler for event type ${domainEvent.type} does not exist`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
this.logger.debug(`Received event: ${domainEvent.type}`)
|
||||
|
||||
const xRaySegment = new Segment(domainEvent.type)
|
||||
|
||||
if (domainEvent.meta.correlation.userIdentifierType === 'uuid') {
|
||||
xRaySegment.setUser(domainEvent.meta.correlation.userIdentifier)
|
||||
}
|
||||
|
||||
await captureAsyncFunc(
|
||||
`${handler.constructor.name}.handle}`,
|
||||
async (subsegment?: Subsegment) => {
|
||||
await handler.handle(domainEvent)
|
||||
|
||||
if (subsegment) {
|
||||
subsegment.close()
|
||||
}
|
||||
},
|
||||
xRaySegment,
|
||||
)
|
||||
|
||||
xRaySegment.close()
|
||||
}
|
||||
|
||||
async handleError(error: Error): Promise<void> {
|
||||
this.logger.error('Error occured while handling SQS message: %O', error)
|
||||
}
|
||||
}
|
||||
@@ -12,3 +12,4 @@ export * from './SQS/SQSNewRelicBounceNotificiationHandler'
|
||||
export * from './SQS/SQSDomainEventSubscriberFactory'
|
||||
export * from './SQS/SQSEventMessageHandler'
|
||||
export * from './SQS/SQSNewRelicEventMessageHandler'
|
||||
export * from './SQS/SQSXRayEventMessageHandler'
|
||||
|
||||
@@ -3,6 +3,10 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [2.131.1](https://github.com/standardnotes/server/compare/@standardnotes/domain-events@2.131.0...@standardnotes/domain-events@2.131.1) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/domain-events
|
||||
|
||||
# [2.131.0](https://github.com/standardnotes/server/compare/@standardnotes/domain-events@2.130.0...@standardnotes/domain-events@2.131.0) (2023-09-26)
|
||||
|
||||
### Features
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/domain-events",
|
||||
"version": "2.131.0",
|
||||
"version": "2.131.1",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,26 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.12.6](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.5...@standardnotes/event-store@1.12.6) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
## [1.12.5](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.4...@standardnotes/event-store@1.12.5) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
## [1.12.4](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.3...@standardnotes/event-store@1.12.4) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
## [1.12.3](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.2...@standardnotes/event-store@1.12.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
## [1.12.2](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.1...@standardnotes/event-store@1.12.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
## [1.12.1](https://github.com/standardnotes/server/compare/@standardnotes/event-store@1.12.0...@standardnotes/event-store@1.12.1) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/event-store
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/event-store",
|
||||
"version": "1.12.1",
|
||||
"version": "1.12.6",
|
||||
"description": "Event Store Service",
|
||||
"private": true,
|
||||
"main": "dist/src/index.js",
|
||||
|
||||
@@ -3,6 +3,28 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.25.4](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.25.3...@standardnotes/files-server@1.25.4) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/files-server
|
||||
|
||||
## [1.25.3](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.25.2...@standardnotes/files-server@1.25.3) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/files-server
|
||||
|
||||
## [1.25.2](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.25.1...@standardnotes/files-server@1.25.2) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/files-server
|
||||
|
||||
## [1.25.1](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.25.0...@standardnotes/files-server@1.25.1) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/files-server
|
||||
|
||||
# [1.25.0](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.24.1...@standardnotes/files-server@1.25.0) (2023-09-28)
|
||||
|
||||
### Features
|
||||
|
||||
* block file operations during transition ([#856](https://github.com/standardnotes/files/issues/856)) ([676cf36](https://github.com/standardnotes/files/commit/676cf36f8d769aa433880b2800f0457d06fbbf14))
|
||||
|
||||
## [1.24.1](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.24.0...@standardnotes/files-server@1.24.1) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/files-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/files-server",
|
||||
"version": "1.24.1",
|
||||
"version": "1.25.4",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -198,7 +198,9 @@ export class ContainerConfigLoader {
|
||||
.toConstantValue(
|
||||
new FSFileUploader(container.get(TYPES.Files_FILE_UPLOAD_PATH), container.get(TYPES.Files_Logger)),
|
||||
)
|
||||
container.bind<FileRemoverInterface>(TYPES.Files_FileRemover).to(FSFileRemover)
|
||||
container
|
||||
.bind<FileRemoverInterface>(TYPES.Files_FileRemover)
|
||||
.toConstantValue(new FSFileRemover(container.get<string>(TYPES.Files_FILE_UPLOAD_PATH)))
|
||||
container.bind<FileMoverInterface>(TYPES.Files_FileMover).to(FSFileMover)
|
||||
}
|
||||
|
||||
@@ -247,12 +249,26 @@ export class ContainerConfigLoader {
|
||||
// Handlers
|
||||
container
|
||||
.bind<AccountDeletionRequestedEventHandler>(TYPES.Files_AccountDeletionRequestedEventHandler)
|
||||
.to(AccountDeletionRequestedEventHandler)
|
||||
.toConstantValue(
|
||||
new AccountDeletionRequestedEventHandler(
|
||||
container.get<MarkFilesToBeRemoved>(TYPES.Files_MarkFilesToBeRemoved),
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Files_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Files_DomainEventFactory),
|
||||
container.get<winston.Logger>(TYPES.Files_Logger),
|
||||
),
|
||||
)
|
||||
container
|
||||
.bind<SharedSubscriptionInvitationCanceledEventHandler>(
|
||||
TYPES.Files_SharedSubscriptionInvitationCanceledEventHandler,
|
||||
)
|
||||
.to(SharedSubscriptionInvitationCanceledEventHandler)
|
||||
.toConstantValue(
|
||||
new SharedSubscriptionInvitationCanceledEventHandler(
|
||||
container.get<MarkFilesToBeRemoved>(TYPES.Files_MarkFilesToBeRemoved),
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Files_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Files_DomainEventFactory),
|
||||
container.get<winston.Logger>(TYPES.Files_Logger),
|
||||
),
|
||||
)
|
||||
|
||||
const eventHandlers: Map<string, DomainEventHandlerInterface> = new Map([
|
||||
['ACCOUNT_DELETION_REQUESTED', container.get(TYPES.Files_AccountDeletionRequestedEventHandler)],
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export type RemovedFileDescription = {
|
||||
userUuid: string
|
||||
userOrSharedVaultUuid: string
|
||||
filePath: string
|
||||
fileName: string
|
||||
fileByteSize: number
|
||||
|
||||
@@ -3,18 +3,17 @@ import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventPublisherInterface,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved'
|
||||
|
||||
@injectable()
|
||||
export class AccountDeletionRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.Files_MarkFilesToBeRemoved) private markFilesToBeRemoved: MarkFilesToBeRemoved,
|
||||
@inject(TYPES.Files_DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
|
||||
@inject(TYPES.Files_DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
|
||||
private markFilesToBeRemoved: MarkFilesToBeRemoved,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: AccountDeletionRequestedEvent): Promise<void> {
|
||||
@@ -27,16 +26,23 @@ export class AccountDeletionRequestedEventHandler implements DomainEventHandlerI
|
||||
})
|
||||
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(`Could not mark files for removal for user ${event.payload.userUuid}: ${result.getError()}`)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const filesRemoved = result.getValue()
|
||||
|
||||
this.logger.debug(`Marked ${filesRemoved.length} files for removal for user ${event.payload.userUuid}`)
|
||||
|
||||
for (const fileRemoved of filesRemoved) {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createFileRemovedEvent({
|
||||
regularSubscriptionUuid: event.payload.regularSubscriptionUuid,
|
||||
...fileRemoved,
|
||||
userUuid: fileRemoved.userOrSharedVaultUuid,
|
||||
filePath: fileRemoved.filePath,
|
||||
fileName: fileRemoved.fileName,
|
||||
fileByteSize: fileRemoved.fileByteSize,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -3,18 +3,17 @@ import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventPublisherInterface,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved'
|
||||
|
||||
@injectable()
|
||||
export class SharedSubscriptionInvitationCanceledEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
@inject(TYPES.Files_MarkFilesToBeRemoved) private markFilesToBeRemoved: MarkFilesToBeRemoved,
|
||||
@inject(TYPES.Files_DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface,
|
||||
@inject(TYPES.Files_DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface,
|
||||
private markFilesToBeRemoved: MarkFilesToBeRemoved,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: SharedSubscriptionInvitationCanceledEvent): Promise<void> {
|
||||
@@ -27,16 +26,25 @@ export class SharedSubscriptionInvitationCanceledEventHandler implements DomainE
|
||||
})
|
||||
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(
|
||||
`Could not mark files to be removed for invitee: ${event.payload.inviteeIdentifier}: ${result.getError()}`,
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
const filesRemoved = result.getValue()
|
||||
|
||||
this.logger.debug(`Marked ${filesRemoved.length} files for removal for invitee ${event.payload.inviteeIdentifier}`)
|
||||
|
||||
for (const fileRemoved of filesRemoved) {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createFileRemovedEvent({
|
||||
regularSubscriptionUuid: event.payload.inviterSubscriptionUuid,
|
||||
...fileRemoved,
|
||||
userUuid: fileRemoved.userOrSharedVaultUuid,
|
||||
filePath: fileRemoved.filePath,
|
||||
fileName: fileRemoved.fileName,
|
||||
fileByteSize: fileRemoved.fileByteSize,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -29,6 +29,10 @@ export class SharedVaultRemovedEventHandler implements DomainEventHandlerInterfa
|
||||
|
||||
const filesRemoved = result.getValue()
|
||||
|
||||
this.logger.debug(
|
||||
`Marked ${filesRemoved.length} files for removal for shared vault ${event.payload.sharedVaultUuid}`,
|
||||
)
|
||||
|
||||
for (const fileRemoved of filesRemoved) {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createSharedVaultFileRemovedEvent({
|
||||
|
||||
@@ -2,5 +2,5 @@ import { RemovedFileDescription } from '../File/RemovedFileDescription'
|
||||
|
||||
export interface FileRemoverInterface {
|
||||
remove(filePath: string): Promise<number>
|
||||
markFilesToBeRemoved(userUuid: string): Promise<Array<RemovedFileDescription>>
|
||||
markFilesToBeRemoved(userOrSharedVaultUuid: string): Promise<Array<RemovedFileDescription>>
|
||||
}
|
||||
|
||||
@@ -1,18 +1,42 @@
|
||||
import { inject, injectable } from 'inversify'
|
||||
import { promises } from 'fs'
|
||||
|
||||
import { FileRemoverInterface } from '../../Domain/Services/FileRemoverInterface'
|
||||
import { RemovedFileDescription } from '../../Domain/File/RemovedFileDescription'
|
||||
import TYPES from '../../Bootstrap/Types'
|
||||
|
||||
@injectable()
|
||||
export class FSFileRemover implements FileRemoverInterface {
|
||||
constructor(@inject(TYPES.Files_FILE_UPLOAD_PATH) private fileUploadPath: string) {}
|
||||
constructor(private fileUploadPath: string) {}
|
||||
|
||||
async markFilesToBeRemoved(userUuid: string): Promise<Array<RemovedFileDescription>> {
|
||||
await promises.rmdir(`${this.fileUploadPath}/${userUuid}`)
|
||||
async markFilesToBeRemoved(userOrSharedVaultUuid: string): Promise<Array<RemovedFileDescription>> {
|
||||
const removedFileDescriptions: RemovedFileDescription[] = []
|
||||
|
||||
return []
|
||||
let directoryExists: boolean
|
||||
try {
|
||||
await promises.access(`${this.fileUploadPath}/${userOrSharedVaultUuid}`)
|
||||
directoryExists = true
|
||||
} catch (error) {
|
||||
directoryExists = false
|
||||
}
|
||||
|
||||
if (!directoryExists) {
|
||||
return []
|
||||
}
|
||||
|
||||
const files = await promises.readdir(`${this.fileUploadPath}/${userOrSharedVaultUuid}`, { withFileTypes: true })
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = `${this.fileUploadPath}/${userOrSharedVaultUuid}/${file.name}`
|
||||
|
||||
const fileByteSize = await this.remove(`${userOrSharedVaultUuid}/${file.name}`)
|
||||
|
||||
removedFileDescriptions.push({
|
||||
filePath,
|
||||
fileByteSize,
|
||||
userOrSharedVaultUuid,
|
||||
fileName: file.name,
|
||||
})
|
||||
}
|
||||
|
||||
return removedFileDescriptions
|
||||
}
|
||||
|
||||
async remove(filePath: string): Promise<number> {
|
||||
|
||||
@@ -13,6 +13,7 @@ describe('ValetTokenAuthMiddleware', () => {
|
||||
|
||||
const logger = {
|
||||
debug: jest.fn(),
|
||||
error: jest.fn(),
|
||||
} as unknown as jest.Mocked<Logger>
|
||||
|
||||
const createMiddleware = () => new ValetTokenAuthMiddleware(tokenDecoder, logger)
|
||||
@@ -222,4 +223,27 @@ describe('ValetTokenAuthMiddleware', () => {
|
||||
|
||||
expect(next).toHaveBeenCalledWith(error)
|
||||
})
|
||||
|
||||
it('should throw an error if the valet token indicates an ongoing transition', async () => {
|
||||
request.headers['x-valet-token'] = 'valet-token'
|
||||
|
||||
tokenDecoder.decodeToken = jest.fn().mockReturnValue({
|
||||
userUuid: '1-2-3',
|
||||
permittedResources: [
|
||||
{
|
||||
remoteIdentifier: '00000000-0000-0000-0000-000000000000',
|
||||
unencryptedFileSize: 30,
|
||||
},
|
||||
],
|
||||
permittedOperation: 'write',
|
||||
uploadBytesLimit: -1,
|
||||
uploadBytesUsed: 80,
|
||||
ongoingTransition: true,
|
||||
})
|
||||
|
||||
await createMiddleware().handler(request, response, next)
|
||||
|
||||
expect(response.status).toHaveBeenCalledWith(500)
|
||||
expect(next).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -46,6 +46,19 @@ export class ValetTokenAuthMiddleware extends BaseMiddleware {
|
||||
return
|
||||
}
|
||||
|
||||
if (valetTokenData.ongoingTransition === true) {
|
||||
this.logger.error(`Cannot perform file operations for user ${valetTokenData.userUuid} during transition`)
|
||||
|
||||
response.status(500).send({
|
||||
error: {
|
||||
tag: 'ongoing-transition',
|
||||
message: 'Cannot perform file operations during transition',
|
||||
},
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
for (const resource of valetTokenData.permittedResources) {
|
||||
const resourceUuidOrError = Uuid.create(resource.remoteIdentifier)
|
||||
if (resourceUuidOrError.isFailed()) {
|
||||
|
||||
@@ -59,7 +59,7 @@ export class S3FileRemover implements FileRemoverInterface {
|
||||
fileByteSize: file.Size as number,
|
||||
fileName: file.Key.replace(`${userUuid}/`, ''),
|
||||
filePath: file.Key,
|
||||
userUuid,
|
||||
userOrSharedVaultUuid: userUuid,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,54 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.16.20](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.19...@standardnotes/home-server@1.16.20) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.19](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.18...@standardnotes/home-server@1.16.19) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.18](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.17...@standardnotes/home-server@1.16.18) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.17](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.16...@standardnotes/home-server@1.16.17) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.16](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.15...@standardnotes/home-server@1.16.16) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.15](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.14...@standardnotes/home-server@1.16.15) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.14](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.13...@standardnotes/home-server@1.16.14) (2023-10-02)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.13](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.12...@standardnotes/home-server@1.16.13) (2023-10-02)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.12](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.11...@standardnotes/home-server@1.16.12) (2023-09-29)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.11](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.10...@standardnotes/home-server@1.16.11) (2023-09-29)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.10](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.9...@standardnotes/home-server@1.16.10) (2023-09-29)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.9](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.8...@standardnotes/home-server@1.16.9) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
## [1.16.8](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.16.7...@standardnotes/home-server@1.16.8) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/home-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/home-server",
|
||||
"version": "1.16.8",
|
||||
"version": "1.16.20",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,44 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.38.9](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.8...@standardnotes/revisions-server@1.38.9) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
## [1.38.8](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.7...@standardnotes/revisions-server@1.38.8) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
## [1.38.7](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.6...@standardnotes/revisions-server@1.38.7) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
## [1.38.6](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.5...@standardnotes/revisions-server@1.38.6) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
## [1.38.5](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.4...@standardnotes/revisions-server@1.38.5) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* temproarily disable transitions to empty overpopulated queues ([cd893b4](https://github.com/standardnotes/server/commit/cd893b41d7371bdc32acc111f7cea797ec33cad5))
|
||||
|
||||
## [1.38.4](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.3...@standardnotes/revisions-server@1.38.4) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* keep transition in-progress status alive ([032cde7](https://github.com/standardnotes/server/commit/032cde77233076814b1de5791850b4ee4c8dc1f4))
|
||||
|
||||
## [1.38.3](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.2...@standardnotes/revisions-server@1.38.3) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add paging memory to integrity check ([e4ca310](https://github.com/standardnotes/server/commit/e4ca310707b12b1c08073a391e8857ee52acd92b))
|
||||
|
||||
## [1.38.2](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.1...@standardnotes/revisions-server@1.38.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
## [1.38.1](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.38.0...@standardnotes/revisions-server@1.38.1) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/revisions-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/revisions-server",
|
||||
"version": "1.38.1",
|
||||
"version": "1.38.9",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -390,6 +390,8 @@ export class ContainerConfigLoader {
|
||||
container.get<TimerInterface>(TYPES.Revisions_Timer),
|
||||
container.get<winston.Logger>(TYPES.Revisions_Logger),
|
||||
env.get('MIGRATION_BATCH_SIZE', true) ? +env.get('MIGRATION_BATCH_SIZE', true) : 100,
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Revisions_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Revisions_DomainEventFactory),
|
||||
),
|
||||
)
|
||||
container
|
||||
@@ -470,12 +472,10 @@ export class ContainerConfigLoader {
|
||||
.bind<TransitionRequestedEventHandler>(TYPES.Revisions_TransitionRequestedEventHandler)
|
||||
.toConstantValue(
|
||||
new TransitionRequestedEventHandler(
|
||||
true,
|
||||
container.get<TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser>(
|
||||
TYPES.Revisions_TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser,
|
||||
),
|
||||
container.get<RevisionRepositoryInterface>(TYPES.Revisions_SQLRevisionRepository),
|
||||
container.get<DomainEventPublisherInterface>(TYPES.Revisions_DomainEventPublisher),
|
||||
container.get<DomainEventFactoryInterface>(TYPES.Revisions_DomainEventFactory),
|
||||
container.get<winston.Logger>(TYPES.Revisions_Logger),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1,116 +1,30 @@
|
||||
import {
|
||||
DomainEventHandlerInterface,
|
||||
DomainEventPublisherInterface,
|
||||
TransitionRequestedEvent,
|
||||
} from '@standardnotes/domain-events'
|
||||
import { DomainEventHandlerInterface, TransitionRequestedEvent } from '@standardnotes/domain-events'
|
||||
import { Logger } from 'winston'
|
||||
import { TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser } from '../UseCase/Transition/TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser/TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser'
|
||||
import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface'
|
||||
import { RevisionRepositoryInterface } from '../Revision/RevisionRepositoryInterface'
|
||||
import { TransitionStatus, Uuid } from '@standardnotes/domain-core'
|
||||
|
||||
export class TransitionRequestedEventHandler implements DomainEventHandlerInterface {
|
||||
constructor(
|
||||
private disabled: boolean,
|
||||
private transitionRevisionsFromPrimaryToSecondaryDatabaseForUser: TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser,
|
||||
private primaryRevisionsRepository: RevisionRepositoryInterface,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
private logger: Logger,
|
||||
) {}
|
||||
|
||||
async handle(event: TransitionRequestedEvent): Promise<void> {
|
||||
if (this.disabled) {
|
||||
return
|
||||
}
|
||||
|
||||
if (event.payload.type !== 'revisions') {
|
||||
return
|
||||
}
|
||||
|
||||
const userUuid = await this.getUserUuidFromEvent(event)
|
||||
if (!userUuid) {
|
||||
return
|
||||
}
|
||||
|
||||
if (await this.isAlreadyMigrated(userUuid)) {
|
||||
this.logger.info(`[${event.payload.userUuid}] User already migrated.`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Verified,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
this.logger.info(`[${event.payload.userUuid}] Handling transition requested event`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.InProgress,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
const result = await this.transitionRevisionsFromPrimaryToSecondaryDatabaseForUser.execute({
|
||||
userUuid: event.payload.userUuid,
|
||||
timestamp: event.payload.timestamp,
|
||||
})
|
||||
|
||||
if (result.isFailed()) {
|
||||
this.logger.error(`[${event.payload.userUuid}] Failed to transition: ${result.getError()}`)
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Failed,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Verified,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
private async isAlreadyMigrated(userUuid: Uuid): Promise<boolean> {
|
||||
const totalRevisionsCountForUserInPrimary = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
|
||||
if (totalRevisionsCountForUserInPrimary > 0) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] User has ${totalRevisionsCountForUserInPrimary} revisions in primary database.`,
|
||||
)
|
||||
}
|
||||
|
||||
return totalRevisionsCountForUserInPrimary === 0
|
||||
}
|
||||
|
||||
private async getUserUuidFromEvent(event: TransitionRequestedEvent): Promise<Uuid | null> {
|
||||
const userUuidOrError = Uuid.create(event.payload.userUuid)
|
||||
if (userUuidOrError.isFailed()) {
|
||||
this.logger.error(`[${event.payload.userUuid}] Failed to transition revisions: ${userUuidOrError.getError()}`)
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: event.payload.userUuid,
|
||||
status: TransitionStatus.STATUSES.Failed,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: event.payload.timestamp,
|
||||
}),
|
||||
)
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
return userUuidOrError.getValue()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
export interface TransitionRepositoryInterface {
|
||||
getPagingProgress(userUuid: string): Promise<number>
|
||||
setPagingProgress(userUuid: string, progress: number): Promise<void>
|
||||
getIntegrityProgress(userUuid: string): Promise<number>
|
||||
setIntegrityProgress(userUuid: string, progress: number): Promise<void>
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
/* istanbul ignore file */
|
||||
import { Result, UseCaseInterface, Uuid } from '@standardnotes/domain-core'
|
||||
import { Result, TransitionStatus, UseCaseInterface, Uuid } from '@standardnotes/domain-core'
|
||||
import { TimerInterface } from '@standardnotes/time'
|
||||
import { Logger } from 'winston'
|
||||
|
||||
import { TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO } from './TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO'
|
||||
import { RevisionRepositoryInterface } from '../../../Revision/RevisionRepositoryInterface'
|
||||
import { TransitionRepositoryInterface } from '../../../Transition/TransitionRepositoryInterface'
|
||||
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
|
||||
import { DomainEventFactoryInterface } from '../../../Event/DomainEventFactoryInterface'
|
||||
|
||||
export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements UseCaseInterface<void> {
|
||||
constructor(
|
||||
@@ -15,6 +17,8 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
private timer: TimerInterface,
|
||||
private logger: Logger,
|
||||
private pageSize: number,
|
||||
private domainEventPublisher: DomainEventPublisherInterface,
|
||||
private domainEventFactory: DomainEventFactoryInterface,
|
||||
) {}
|
||||
|
||||
async execute(dto: TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO): Promise<Result<void>> {
|
||||
@@ -34,15 +38,26 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
}
|
||||
const userUuid = userUuidOrError.getValue()
|
||||
|
||||
if (await this.isAlreadyMigrated(userUuid)) {
|
||||
this.logger.info(`[${userUuid.value}] User already migrated.`)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Verified, dto.timestamp)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.InProgress, dto.timestamp)
|
||||
|
||||
const migrationTimeStart = this.timer.getTimestampInMicroseconds()
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Migrating revisions`)
|
||||
|
||||
const migrationResult = await this.migrateRevisionsForUser(userUuid)
|
||||
const migrationResult = await this.migrateRevisionsForUser(userUuid, dto.timestamp)
|
||||
if (migrationResult.isFailed()) {
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
return Result.fail(migrationResult.getError())
|
||||
}
|
||||
const revisionsToSkipInIntegrityCheck = migrationResult.getValue()
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Revisions migrated`)
|
||||
|
||||
@@ -50,16 +65,20 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
|
||||
this.logger.info(`[${dto.userUuid}] Checking integrity between primary and secondary database`)
|
||||
|
||||
const integrityCheckResult = await this.checkIntegrityBetweenPrimaryAndSecondaryDatabase(
|
||||
userUuid,
|
||||
revisionsToSkipInIntegrityCheck,
|
||||
)
|
||||
const integrityCheckResult = await this.checkIntegrityBetweenPrimaryAndSecondaryDatabase(userUuid)
|
||||
if (integrityCheckResult.isFailed()) {
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setPagingProgress(userUuid.value, 1)
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setIntegrityProgress(userUuid.value, 1)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
return Result.fail(integrityCheckResult.getError())
|
||||
}
|
||||
|
||||
const cleanupResult = await this.deleteRevisionsForUser(userUuid, this.primaryRevisionsRepository)
|
||||
if (cleanupResult.isFailed()) {
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Failed, dto.timestamp)
|
||||
|
||||
this.logger.error(`[${dto.userUuid}] Failed to clean up primary database revisions: ${cleanupResult.getError()}`)
|
||||
}
|
||||
|
||||
@@ -72,10 +91,12 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
`[${dto.userUuid}] Transitioned revisions in ${migrationDurationTimeStructure.hours}h ${migrationDurationTimeStructure.minutes}m ${migrationDurationTimeStructure.seconds}s ${migrationDurationTimeStructure.milliseconds}ms`,
|
||||
)
|
||||
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.Verified, dto.timestamp)
|
||||
|
||||
return Result.ok()
|
||||
}
|
||||
|
||||
private async migrateRevisionsForUser(userUuid: Uuid): Promise<Result<string[]>> {
|
||||
private async migrateRevisionsForUser(userUuid: Uuid, timestamp: number): Promise<Result<void>> {
|
||||
try {
|
||||
const initialPage = await (this.transitionStatusRepository as TransitionRepositoryInterface).getPagingProgress(
|
||||
userUuid.value,
|
||||
@@ -85,8 +106,17 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
|
||||
const totalRevisionsCountForUser = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
const totalPages = Math.ceil(totalRevisionsCountForUser / this.pageSize)
|
||||
const revisionsToSkipInIntegrityCheck = []
|
||||
for (let currentPage = initialPage; currentPage <= totalPages; currentPage++) {
|
||||
const isPageInEvery10Percent = currentPage % Math.ceil(totalPages / 10) === 0
|
||||
if (isPageInEvery10Percent) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] Migrating revisions for user: ${Math.round(
|
||||
(currentPage / totalPages) * 100,
|
||||
)}% completed`,
|
||||
)
|
||||
await this.updateTransitionStatus(userUuid, TransitionStatus.STATUSES.InProgress, timestamp)
|
||||
}
|
||||
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setPagingProgress(
|
||||
userUuid.value,
|
||||
currentPage,
|
||||
@@ -113,7 +143,6 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] Revision ${revision.id.toString()} is older than revision in secondary database`,
|
||||
)
|
||||
revisionsToSkipInIntegrityCheck.push(revision.id.toString())
|
||||
|
||||
continue
|
||||
}
|
||||
@@ -145,7 +174,7 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
}
|
||||
}
|
||||
|
||||
return Result.ok(revisionsToSkipInIntegrityCheck)
|
||||
return Result.ok()
|
||||
} catch (error) {
|
||||
return Result.fail(`Errored when migrating revisions for user ${userUuid.value}: ${(error as Error).message}`)
|
||||
}
|
||||
@@ -171,11 +200,14 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
await this.timer.sleep(twoSecondsInMilliseconds)
|
||||
}
|
||||
|
||||
private async checkIntegrityBetweenPrimaryAndSecondaryDatabase(
|
||||
userUuid: Uuid,
|
||||
revisionsToSkipInIntegrityCheck: string[],
|
||||
): Promise<Result<boolean>> {
|
||||
private async checkIntegrityBetweenPrimaryAndSecondaryDatabase(userUuid: Uuid): Promise<Result<boolean>> {
|
||||
try {
|
||||
const initialPage = await (this.transitionStatusRepository as TransitionRepositoryInterface).getIntegrityProgress(
|
||||
userUuid.value,
|
||||
)
|
||||
|
||||
this.logger.info(`[${userUuid.value}] Checking integrity from page ${initialPage}`)
|
||||
|
||||
const totalRevisionsCountForUserInPrimary = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
const totalRevisionsCountForUserInSecondary = await (
|
||||
this.secondRevisionsRepository as RevisionRepositoryInterface
|
||||
@@ -188,7 +220,12 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
}
|
||||
|
||||
const totalPages = Math.ceil(totalRevisionsCountForUserInPrimary / this.pageSize)
|
||||
for (let currentPage = 1; currentPage <= totalPages; currentPage++) {
|
||||
for (let currentPage = initialPage; currentPage <= totalPages; currentPage++) {
|
||||
await (this.transitionStatusRepository as TransitionRepositoryInterface).setIntegrityProgress(
|
||||
userUuid.value,
|
||||
currentPage,
|
||||
)
|
||||
|
||||
const query = {
|
||||
userUuid: userUuid,
|
||||
offset: (currentPage - 1) * this.pageSize,
|
||||
@@ -212,17 +249,25 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
return Result.fail(`Revision ${revision.id.toString()} not found in secondary database`)
|
||||
}
|
||||
|
||||
if (revisionsToSkipInIntegrityCheck.includes(revision.id.toString())) {
|
||||
if (revision.isIdenticalTo(revisionInSecondary)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!revision.isIdenticalTo(revisionInSecondary)) {
|
||||
return Result.fail(
|
||||
`Revision ${revision.id.toString()} is not identical in primary and secondary database. Revision in primary database: ${JSON.stringify(
|
||||
revision,
|
||||
)}, revision in secondary database: ${JSON.stringify(revisionInSecondary)}`,
|
||||
if (revisionInSecondary.props.dates.updatedAt > revision.props.dates.updatedAt) {
|
||||
this.logger.info(
|
||||
`[${
|
||||
userUuid.value
|
||||
}] Integrity check of revision ${revision.id.toString()} - is older than revision in secondary database`,
|
||||
)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
return Result.fail(
|
||||
`Revision ${revision.id.toString()} is not identical in primary and secondary database. Revision in primary database: ${JSON.stringify(
|
||||
revision,
|
||||
)}, revision in secondary database: ${JSON.stringify(revisionInSecondary)}`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,4 +278,27 @@ export class TransitionRevisionsFromPrimaryToSecondaryDatabaseForUser implements
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private async updateTransitionStatus(userUuid: Uuid, status: string, timestamp: number): Promise<void> {
|
||||
await this.domainEventPublisher.publish(
|
||||
this.domainEventFactory.createTransitionStatusUpdatedEvent({
|
||||
userUuid: userUuid.value,
|
||||
status,
|
||||
transitionType: 'revisions',
|
||||
transitionTimestamp: timestamp,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
private async isAlreadyMigrated(userUuid: Uuid): Promise<boolean> {
|
||||
const totalRevisionsCountForUserInPrimary = await this.primaryRevisionsRepository.countByUserUuid(userUuid)
|
||||
|
||||
if (totalRevisionsCountForUserInPrimary > 0) {
|
||||
this.logger.info(
|
||||
`[${userUuid.value}] User has ${totalRevisionsCountForUserInPrimary} revisions in primary database.`,
|
||||
)
|
||||
}
|
||||
|
||||
return totalRevisionsCountForUserInPrimary === 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export interface TransitionRevisionsFromPrimaryToSecondaryDatabaseForUserDTO {
|
||||
userUuid: string
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
@@ -3,10 +3,25 @@ import * as IORedis from 'ioredis'
|
||||
import { TransitionRepositoryInterface } from '../../Domain/Transition/TransitionRepositoryInterface'
|
||||
|
||||
export class RedisTransitionRepository implements TransitionRepositoryInterface {
|
||||
private readonly PREFIX = 'transition-revisions-paging-progress'
|
||||
private readonly PREFIX = 'transition-revisions-migration-progress'
|
||||
private readonly INTEGRITY_PREFIX = 'transition-revisions-integrity-progress'
|
||||
|
||||
constructor(private redisClient: IORedis.Redis) {}
|
||||
|
||||
async getIntegrityProgress(userUuid: string): Promise<number> {
|
||||
const progress = await this.redisClient.get(`${this.INTEGRITY_PREFIX}:${userUuid}`)
|
||||
|
||||
if (progress === null) {
|
||||
return 1
|
||||
}
|
||||
|
||||
return parseInt(progress)
|
||||
}
|
||||
|
||||
async setIntegrityProgress(userUuid: string, progress: number): Promise<void> {
|
||||
await this.redisClient.setex(`${this.INTEGRITY_PREFIX}:${userUuid}`, 172_800, progress.toString())
|
||||
}
|
||||
|
||||
async getPagingProgress(userUuid: string): Promise<number> {
|
||||
const progress = await this.redisClient.get(`${this.PREFIX}:${userUuid}`)
|
||||
|
||||
|
||||
@@ -30,7 +30,7 @@ export class SQLLegacyRevisionRepository implements RevisionRepositoryInterface
|
||||
const queryBuilder = this.ormRepository
|
||||
.createQueryBuilder('revision')
|
||||
.where('revision.user_uuid = :userUuid', { userUuid: dto.userUuid.value })
|
||||
.orderBy('revision.uuid', 'ASC')
|
||||
.orderBy('revision.created_at', 'ASC')
|
||||
|
||||
if (dto.offset !== undefined) {
|
||||
queryBuilder.skip(dto.offset)
|
||||
|
||||
@@ -3,6 +3,26 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.21.6](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.5...@standardnotes/scheduler-server@1.21.6) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
## [1.21.5](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.4...@standardnotes/scheduler-server@1.21.5) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
## [1.21.4](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.3...@standardnotes/scheduler-server@1.21.4) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
## [1.21.3](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.2...@standardnotes/scheduler-server@1.21.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
## [1.21.2](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.1...@standardnotes/scheduler-server@1.21.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
## [1.21.1](https://github.com/standardnotes/server/compare/@standardnotes/scheduler-server@1.21.0...@standardnotes/scheduler-server@1.21.1) (2023-09-27)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/scheduler-server
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/scheduler-server",
|
||||
"version": "1.21.1",
|
||||
"version": "1.21.6",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -3,6 +3,12 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
# [1.15.0](https://github.com/standardnotes/server/compare/@standardnotes/security@1.14.0...@standardnotes/security@1.15.0) (2023-09-28)
|
||||
|
||||
### Features
|
||||
|
||||
* block file operations during transition ([#856](https://github.com/standardnotes/server/issues/856)) ([676cf36](https://github.com/standardnotes/server/commit/676cf36f8d769aa433880b2800f0457d06fbbf14))
|
||||
|
||||
# [1.14.0](https://github.com/standardnotes/server/compare/@standardnotes/security@1.13.1...@standardnotes/security@1.14.0) (2023-09-26)
|
||||
|
||||
### Features
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@standardnotes/security",
|
||||
"version": "1.14.0",
|
||||
"version": "1.15.0",
|
||||
"engines": {
|
||||
"node": ">=18.0.0 <21.0.0"
|
||||
},
|
||||
|
||||
@@ -11,4 +11,5 @@ export type ValetTokenData = {
|
||||
}>
|
||||
uploadBytesUsed: number
|
||||
uploadBytesLimit: number
|
||||
ongoingTransition?: boolean
|
||||
}
|
||||
|
||||
@@ -3,6 +3,48 @@
|
||||
All notable changes to this project will be documented in this file.
|
||||
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
|
||||
|
||||
## [1.110.6](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.5...@standardnotes/syncing-server@1.110.6) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/syncing-server
|
||||
|
||||
## [1.110.5](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.4...@standardnotes/syncing-server@1.110.5) (2023-10-04)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/syncing-server
|
||||
|
||||
## [1.110.4](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.3...@standardnotes/syncing-server@1.110.4) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/syncing-server
|
||||
|
||||
## [1.110.3](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.2...@standardnotes/syncing-server@1.110.3) (2023-10-03)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/syncing-server
|
||||
|
||||
## [1.110.2](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.1...@standardnotes/syncing-server@1.110.2) (2023-10-02)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* temproarily disable transitions to empty overpopulated queues ([cd893b4](https://github.com/standardnotes/syncing-server-js/commit/cd893b41d7371bdc32acc111f7cea797ec33cad5))
|
||||
|
||||
## [1.110.1](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.110.0...@standardnotes/syncing-server@1.110.1) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* keep transition in-progress status alive ([032cde7](https://github.com/standardnotes/syncing-server-js/commit/032cde77233076814b1de5791850b4ee4c8dc1f4))
|
||||
|
||||
# [1.110.0](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.109.2...@standardnotes/syncing-server@1.110.0) (2023-09-29)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* add paging memory to integrity check ([e4ca310](https://github.com/standardnotes/syncing-server-js/commit/e4ca310707b12b1c08073a391e8857ee52acd92b))
|
||||
|
||||
### Features
|
||||
|
||||
* **syncing-server:** allow surviving only upon account deletion ([#857](https://github.com/standardnotes/syncing-server-js/issues/857)) ([609e85f](https://github.com/standardnotes/syncing-server-js/commit/609e85f926ebbc2887656c46df18471c68d70185))
|
||||
|
||||
## [1.109.2](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.109.1...@standardnotes/syncing-server@1.109.2) (2023-09-28)
|
||||
|
||||
**Note:** Version bump only for package @standardnotes/syncing-server
|
||||
|
||||
## [1.109.1](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.109.0...@standardnotes/syncing-server@1.109.1) (2023-09-27)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user