diff --git a/.github/workflows/files.release.dev.yml b/.github/workflows/files.release.dev.yml new file mode 100644 index 000000000..b718405b8 --- /dev/null +++ b/.github/workflows/files.release.dev.yml @@ -0,0 +1,170 @@ +name: Files Server Dev + +concurrency: + group: files_dev_environment + cancel-in-progress: true + +on: + push: + tags: + - '@standardnotes/files-server@[0-9]*.[0-9]*.[0-9]*-alpha.[0-9]*' + - '@standardnotes/files-server@[0-9]*.[0-9]*.[0-9]*-beta.[0-9]*' + workflow_dispatch: + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v1 + with: + node-version: '16.x' + - run: yarn lint:files + - run: yarn test:files + + publish-aws-ecr: + needs: test + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - run: cp .env.sample .env + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v1 + - name: Build, tag, and push image to Amazon ECR + id: build-image + env: + ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} + ECR_REPOSITORY: files + IMAGE_TAG: ${{ github.sha }} + run: | + yarn docker build @standardnotes/files -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker tag $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:dev + docker push $ECR_REGISTRY/$ECR_REPOSITORY:dev + + publish-docker-hub: + needs: test + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - run: cp .env.sample .env + - name: Publish to Registry + uses: elgohr/Publish-Docker-Github-Action@master + with: + name: standardnotes/files + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + tags: "dev,${{ github.sha }}" + + deploy-web: + needs: publish-aws-ecr + + runs-on: ubuntu-latest + + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + - name: Download task definition + run: | + aws ecs describe-task-definition --task-definition files-dev --query taskDefinition > task-definition.json + - name: Fill in the new version in the Amazon ECS task definition + run: | + jq '(.containerDefinitions[] | select(.name=="files-dev") | .environment[] | select(.name=="VERSION")).value = "${{ github.sha }}"' task-definition.json > tmp.json && mv tmp.json task-definition.json + - name: Fill in the new image ID in the Amazon ECS task definition + id: task-def + uses: aws-actions/amazon-ecs-render-task-definition@v1 + with: + task-definition: task-definition.json + container-name: files-dev + image: ${{ secrets.AWS_ECR_REGISTRY }}/files:${{ github.sha }} + - name: Deploy Amazon ECS task definition + uses: aws-actions/amazon-ecs-deploy-task-definition@v1 + with: + task-definition: ${{ steps.task-def.outputs.task-definition }} + service: files-dev + cluster: dev + wait-for-service-stability: true + + deploy-worker: + needs: publish-aws-ecr + + runs-on: ubuntu-latest + + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + - name: Download task definition + run: | + aws ecs describe-task-definition --task-definition files-worker-dev --query taskDefinition > task-definition.json + - name: Fill in the new version in the Amazon ECS task definition + run: | + jq '(.containerDefinitions[] | select(.name=="files-worker-dev") | .environment[] | select(.name=="VERSION")).value = "${{ github.sha }}"' task-definition.json > tmp.json && mv tmp.json task-definition.json + - name: Fill in the new image ID in the Amazon ECS task definition + id: task-def + uses: aws-actions/amazon-ecs-render-task-definition@v1 + with: + task-definition: task-definition.json + container-name: files-worker-dev + image: ${{ secrets.AWS_ECR_REGISTRY }}/files:${{ github.sha }} + - name: Deploy Amazon ECS task definition + uses: aws-actions/amazon-ecs-deploy-task-definition@v1 + with: + task-definition: ${{ steps.task-def.outputs.task-definition }} + service: files-worker-dev + cluster: dev + wait-for-service-stability: true + + newrelic: + needs: [ deploy-web, deploy-worker ] + + runs-on: ubuntu-latest + steps: + - name: Create New Relic deployment marker for Web + uses: newrelic/deployment-marker-action@v1 + with: + accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }} + apiKey: ${{ secrets.NEW_RELIC_API_KEY }} + applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WEB_DEV }} + revision: "${{ github.sha }}" + description: "Automated Deployment via Github Actions" + user: "${{ github.actor }}" + - name: Create New Relic deployment marker for Worker + uses: newrelic/deployment-marker-action@v1 + with: + accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }} + apiKey: ${{ secrets.NEW_RELIC_API_KEY }} + applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WORKER_DEV }} + revision: "${{ github.sha }}" + description: "Automated Deployment via Github Actions" + user: "${{ github.actor }}" + + notify_discord: + needs: [ deploy-web, deploy-worker ] + + runs-on: ubuntu-latest + + steps: + - name: Run Discord Webhook + uses: johnnyhuy/actions-discord-git-webhook@main + with: + webhook_url: ${{ secrets.DISCORD_WEBHOOK_URL }} diff --git a/.gitignore b/.gitignore index 7153ae075..e5f0f4b1e 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,6 @@ newrelic_agent.log !.yarn/unplugged !.yarn/sdks !.yarn/versions + +packages/files/uploads/* +!packages/files/uploads/.gitkeep diff --git a/.pnp.cjs b/.pnp.cjs index 9c30d076b..88749e604 100755 --- a/.pnp.cjs +++ b/.pnp.cjs @@ -24,6 +24,10 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "name": "@standardnotes/auth-server",\ "reference": "workspace:packages/auth"\ },\ + {\ + "name": "@standardnotes/files-server",\ + "reference": "workspace:packages/files"\ + },\ {\ "name": "@standardnotes/scheduler-server",\ "reference": "workspace:packages/scheduler"\ @@ -37,6 +41,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "ignorePatternData": "(^(?:\\\\.yarn\\\\/sdks(?:\\\\/(?!\\\\.{1,2}(?:\\\\/|$))(?:(?:(?!(?:^|\\\\/)\\\\.{1,2}(?:\\\\/|$)).)*?)|$))$)",\ "fallbackExclusionList": [\ ["@standardnotes/auth-server", ["workspace:packages/auth"]],\ + ["@standardnotes/files-server", ["workspace:packages/files"]],\ ["@standardnotes/scheduler-server", ["workspace:packages/scheduler"]],\ ["@standardnotes/server-monorepo", ["workspace:."]],\ ["@standardnotes/syncing-server", ["workspace:packages/syncing-server"]]\ @@ -1719,6 +1724,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@newrelic/aws-sdk", [\ + ["npm:3.1.0", {\ + "packageLocation": "./.yarn/cache/@newrelic-aws-sdk-npm-3.1.0-7c3485a153-5601d90c78.zip/node_modules/@newrelic/aws-sdk/",\ + "packageDependencies": [\ + ["@newrelic/aws-sdk", "npm:3.1.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ ["npm:4.1.2", {\ "packageLocation": "./.yarn/cache/@newrelic-aws-sdk-npm-4.1.2-9930120a02-610f6353a7.zip/node_modules/@newrelic/aws-sdk/",\ "packageDependencies": [\ @@ -1753,9 +1765,29 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "newrelic"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:3.1.0", {\ + "packageLocation": "./.yarn/__virtual__/@newrelic-aws-sdk-virtual-9720173dde/0/cache/@newrelic-aws-sdk-npm-3.1.0-7c3485a153-5601d90c78.zip/node_modules/@newrelic/aws-sdk/",\ + "packageDependencies": [\ + ["@newrelic/aws-sdk", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:3.1.0"],\ + ["@types/newrelic", null],\ + ["newrelic", "npm:7.5.2"]\ + ],\ + "packagePeers": [\ + "@types/newrelic",\ + "newrelic"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@newrelic/koa", [\ + ["npm:5.0.0", {\ + "packageLocation": "./.yarn/cache/@newrelic-koa-npm-5.0.0-c9c6a0e1dc-e98d921b96.zip/node_modules/@newrelic/koa/",\ + "packageDependencies": [\ + ["@newrelic/koa", "npm:5.0.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ ["npm:6.1.2", {\ "packageLocation": "./.yarn/cache/@newrelic-koa-npm-6.1.2-df0f7c71b5-e269d37b13.zip/node_modules/@newrelic/koa/",\ "packageDependencies": [\ @@ -1788,9 +1820,33 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "newrelic"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:5.0.0", {\ + "packageLocation": "./.yarn/__virtual__/@newrelic-koa-virtual-2873d18af2/0/cache/@newrelic-koa-npm-5.0.0-c9c6a0e1dc-e98d921b96.zip/node_modules/@newrelic/koa/",\ + "packageDependencies": [\ + ["@newrelic/koa", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:5.0.0"],\ + ["@types/newrelic", null],\ + ["methods", "npm:1.1.2"],\ + ["newrelic", "npm:7.5.2"]\ + ],\ + "packagePeers": [\ + "@types/newrelic",\ + "newrelic"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@newrelic/native-metrics", [\ + ["npm:6.0.2", {\ + "packageLocation": "./.yarn/unplugged/@newrelic-native-metrics-npm-6.0.2-805c5534f5/node_modules/@newrelic/native-metrics/",\ + "packageDependencies": [\ + ["@newrelic/native-metrics", "npm:6.0.2"],\ + ["nan", "npm:2.16.0"],\ + ["node-gyp", "npm:9.0.0"],\ + ["semver", "npm:5.7.1"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:7.0.2", {\ "packageLocation": "./.yarn/unplugged/@newrelic-native-metrics-npm-7.0.2-b4dcec08eb/node_modules/@newrelic/native-metrics/",\ "packageDependencies": [\ @@ -1825,6 +1881,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@newrelic/superagent", [\ + ["npm:4.0.0", {\ + "packageLocation": "./.yarn/cache/@newrelic-superagent-npm-4.0.0-6cc7e8ec57-5fb257ac05.zip/node_modules/@newrelic/superagent/",\ + "packageDependencies": [\ + ["@newrelic/superagent", "npm:4.0.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ ["npm:5.1.1", {\ "packageLocation": "./.yarn/cache/@newrelic-superagent-npm-5.1.1-0d3c1fccf8-b43f7b9bb6.zip/node_modules/@newrelic/superagent/",\ "packageDependencies": [\ @@ -1857,6 +1920,20 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "newrelic"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:4.0.0", {\ + "packageLocation": "./.yarn/__virtual__/@newrelic-superagent-virtual-05297db2ec/0/cache/@newrelic-superagent-npm-4.0.0-6cc7e8ec57-5fb257ac05.zip/node_modules/@newrelic/superagent/",\ + "packageDependencies": [\ + ["@newrelic/superagent", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:4.0.0"],\ + ["@types/newrelic", null],\ + ["methods", "npm:1.1.2"],\ + ["newrelic", "npm:7.5.2"]\ + ],\ + "packagePeers": [\ + "@types/newrelic",\ + "newrelic"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@newrelic/winston-enricher", [\ @@ -2588,6 +2665,17 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@standardnotes/config", [\ + ["npm:2.0.1", {\ + "packageLocation": "./.yarn/cache/@standardnotes-config-npm-2.0.1-5f34962133-5284e034f2.zip/node_modules/@standardnotes/config/",\ + "packageDependencies": [\ + ["@standardnotes/config", "npm:2.0.1"],\ + ["@typescript-eslint/eslint-plugin", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\ + ["@typescript-eslint/parser", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["@standardnotes/domain-events", [\ ["npm:2.32.2", {\ "packageLocation": "./.yarn/cache/@standardnotes-domain-events-npm-2.32.2-73adf7a999-54da5fc885.zip/node_modules/@standardnotes/domain-events/",\ @@ -2639,6 +2727,58 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@standardnotes/files-server", [\ + ["workspace:packages/files", {\ + "packageLocation": "./packages/files/",\ + "packageDependencies": [\ + ["@standardnotes/files-server", "workspace:packages/files"],\ + ["@newrelic/native-metrics", "npm:7.0.2"],\ + ["@sentry/node", "npm:6.19.7"],\ + ["@standardnotes/auth", "npm:3.19.3"],\ + ["@standardnotes/common", "npm:1.23.0"],\ + ["@standardnotes/config", "npm:2.0.1"],\ + ["@standardnotes/domain-events", "npm:2.32.2"],\ + ["@standardnotes/domain-events-infra", "npm:1.5.2"],\ + ["@standardnotes/sncrypto-common", "npm:1.9.0"],\ + ["@standardnotes/sncrypto-node", "npm:1.8.3"],\ + ["@standardnotes/time", "npm:1.7.0"],\ + ["@types/connect-busboy", "npm:1.0.0"],\ + ["@types/cors", "npm:2.8.12"],\ + ["@types/express", "npm:4.17.13"],\ + ["@types/ioredis", "npm:4.28.10"],\ + ["@types/jest", "npm:28.1.3"],\ + ["@types/jsonwebtoken", "npm:8.5.8"],\ + ["@types/newrelic", "npm:7.0.3"],\ + ["@types/prettyjson", "npm:0.0.29"],\ + ["@types/uuid", "npm:8.3.4"],\ + ["@typescript-eslint/eslint-plugin", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:5.29.0"],\ + ["aws-sdk", "npm:2.1158.0"],\ + ["connect-busboy", "npm:1.0.0"],\ + ["cors", "npm:2.8.5"],\ + ["dayjs", "npm:1.11.3"],\ + ["dotenv", "npm:8.6.0"],\ + ["eslint", "npm:8.18.0"],\ + ["eslint-plugin-prettier", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:4.0.0"],\ + ["express", "npm:4.18.1"],\ + ["express-winston", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:4.2.0"],\ + ["helmet", "npm:4.6.0"],\ + ["inversify", "npm:6.0.1"],\ + ["inversify-express-utils", "npm:6.4.3"],\ + ["ioredis", "npm:5.0.6"],\ + ["jest", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:28.1.1"],\ + ["jsonwebtoken", "npm:8.5.1"],\ + ["newrelic", "npm:7.5.2"],\ + ["nodemon", "npm:2.0.16"],\ + ["prettyjson", "npm:1.2.5"],\ + ["reflect-metadata", "npm:0.1.13"],\ + ["ts-jest", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:28.0.5"],\ + ["ts-node", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:10.8.1"],\ + ["uuid", "npm:8.3.2"],\ + ["winston", "npm:3.7.2"]\ + ],\ + "linkType": "SOFT"\ + }]\ + ]],\ ["@standardnotes/models", [\ ["npm:1.11.10", {\ "packageLocation": "./.yarn/cache/@standardnotes-models-npm-1.11.10-e4b5e4717d-d69fd3940e.zip/node_modules/@standardnotes/models/",\ @@ -2989,6 +3129,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@types/busboy", [\ + ["npm:1.5.0", {\ + "packageLocation": "./.yarn/cache/@types-busboy-npm-1.5.0-0e24e7f08d-ffa7bf25c0.zip/node_modules/@types/busboy/",\ + "packageDependencies": [\ + ["@types/busboy", "npm:1.5.0"],\ + ["@types/node", "npm:18.0.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["@types/connect", [\ ["npm:3.4.35", {\ "packageLocation": "./.yarn/cache/@types-connect-npm-3.4.35-7337eee0a3-fe81351470.zip/node_modules/@types/connect/",\ @@ -2999,6 +3149,18 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["@types/connect-busboy", [\ + ["npm:1.0.0", {\ + "packageLocation": "./.yarn/cache/@types-connect-busboy-npm-1.0.0-fca702448d-ccbf7bc42d.zip/node_modules/@types/connect-busboy/",\ + "packageDependencies": [\ + ["@types/connect-busboy", "npm:1.0.0"],\ + ["@types/busboy", "npm:1.5.0"],\ + ["@types/express", "npm:4.17.13"],\ + ["@types/node", "npm:18.0.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["@types/cors", [\ ["npm:2.8.12", {\ "packageLocation": "./.yarn/cache/@types-cors-npm-2.8.12-ff52e8e514-8c45f112c7.zip/node_modules/@types/cors/",\ @@ -3337,6 +3499,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@typescript-eslint/eslint-plugin", [\ + ["npm:4.33.0", {\ + "packageLocation": "./.yarn/cache/@typescript-eslint-eslint-plugin-npm-4.33.0-b5d1be4879-d74855d0a5.zip/node_modules/@typescript-eslint/eslint-plugin/",\ + "packageDependencies": [\ + ["@typescript-eslint/eslint-plugin", "npm:4.33.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ ["npm:5.29.0", {\ "packageLocation": "./.yarn/cache/@typescript-eslint-eslint-plugin-npm-5.29.0-d7e482bb3e-b1022a640f.zip/node_modules/@typescript-eslint/eslint-plugin/",\ "packageDependencies": [\ @@ -3403,9 +3572,73 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "typescript"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0", {\ + "packageLocation": "./.yarn/__virtual__/@typescript-eslint-eslint-plugin-virtual-56ea46a0fe/0/cache/@typescript-eslint-eslint-plugin-npm-4.33.0-b5d1be4879-d74855d0a5.zip/node_modules/@typescript-eslint/eslint-plugin/",\ + "packageDependencies": [\ + ["@typescript-eslint/eslint-plugin", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\ + ["@types/eslint", null],\ + ["@types/typescript", null],\ + ["@types/typescript-eslint__parser", null],\ + ["@typescript-eslint/experimental-utils", "virtual:56ea46a0fe17b3df61f8f63a15a082b4da2a385d6e774395a132d9a90b9ce8a1ea4c8896dbc69243dfdd2325db5c22821bfa276cdbaddb1ec4f5f1efddea3e20#npm:4.33.0"],\ + ["@typescript-eslint/parser", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\ + ["@typescript-eslint/scope-manager", "npm:4.33.0"],\ + ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\ + ["eslint", null],\ + ["functional-red-black-tree", "npm:1.0.1"],\ + ["ignore", "npm:5.2.0"],\ + ["regexpp", "npm:3.2.0"],\ + ["semver", "npm:7.3.7"],\ + ["tsutils", "virtual:e64d2841693653abb2dee666d19406912f5e913a8081a709c081d9877d2f39987ff853b7cd736901a2df59af98328f7249f3db0da01abf060cf1d858d4d4e43b#npm:3.21.0"],\ + ["typescript", null]\ + ],\ + "packagePeers": [\ + "@types/eslint",\ + "@types/typescript-eslint__parser",\ + "@types/typescript",\ + "@typescript-eslint/parser",\ + "eslint",\ + "typescript"\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ + ["@typescript-eslint/experimental-utils", [\ + ["npm:4.33.0", {\ + "packageLocation": "./.yarn/cache/@typescript-eslint-experimental-utils-npm-4.33.0-f10f287886-f859800ada.zip/node_modules/@typescript-eslint/experimental-utils/",\ + "packageDependencies": [\ + ["@typescript-eslint/experimental-utils", "npm:4.33.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ + ["virtual:56ea46a0fe17b3df61f8f63a15a082b4da2a385d6e774395a132d9a90b9ce8a1ea4c8896dbc69243dfdd2325db5c22821bfa276cdbaddb1ec4f5f1efddea3e20#npm:4.33.0", {\ + "packageLocation": "./.yarn/__virtual__/@typescript-eslint-experimental-utils-virtual-3b1d487b65/0/cache/@typescript-eslint-experimental-utils-npm-4.33.0-f10f287886-f859800ada.zip/node_modules/@typescript-eslint/experimental-utils/",\ + "packageDependencies": [\ + ["@typescript-eslint/experimental-utils", "virtual:56ea46a0fe17b3df61f8f63a15a082b4da2a385d6e774395a132d9a90b9ce8a1ea4c8896dbc69243dfdd2325db5c22821bfa276cdbaddb1ec4f5f1efddea3e20#npm:4.33.0"],\ + ["@types/eslint", null],\ + ["@types/json-schema", "npm:7.0.11"],\ + ["@typescript-eslint/scope-manager", "npm:4.33.0"],\ + ["@typescript-eslint/types", "npm:4.33.0"],\ + ["@typescript-eslint/typescript-estree", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0"],\ + ["eslint", null],\ + ["eslint-scope", "npm:5.1.1"],\ + ["eslint-utils", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0"]\ + ],\ + "packagePeers": [\ + "@types/eslint",\ + "eslint"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["@typescript-eslint/parser", [\ + ["npm:4.33.0", {\ + "packageLocation": "./.yarn/cache/@typescript-eslint-parser-npm-4.33.0-799c6ce8d5-102457eae1.zip/node_modules/@typescript-eslint/parser/",\ + "packageDependencies": [\ + ["@typescript-eslint/parser", "npm:4.33.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ ["npm:5.29.0", {\ "packageLocation": "./.yarn/cache/@typescript-eslint-parser-npm-5.29.0-491a7f9690-7805796638.zip/node_modules/@typescript-eslint/parser/",\ "packageDependencies": [\ @@ -3413,6 +3646,27 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ + ["virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0", {\ + "packageLocation": "./.yarn/__virtual__/@typescript-eslint-parser-virtual-636bc1eaeb/0/cache/@typescript-eslint-parser-npm-4.33.0-799c6ce8d5-102457eae1.zip/node_modules/@typescript-eslint/parser/",\ + "packageDependencies": [\ + ["@typescript-eslint/parser", "virtual:5f34962133311be1f14bb34c0982459488dcec82f934766f3a869be491e21f1b6d2b4254097b1e3671b0c43a4da48e7dafc6c0e314719fdafff3dd267b51be35#npm:4.33.0"],\ + ["@types/eslint", null],\ + ["@types/typescript", null],\ + ["@typescript-eslint/scope-manager", "npm:4.33.0"],\ + ["@typescript-eslint/types", "npm:4.33.0"],\ + ["@typescript-eslint/typescript-estree", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0"],\ + ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\ + ["eslint", null],\ + ["typescript", null]\ + ],\ + "packagePeers": [\ + "@types/eslint",\ + "@types/typescript",\ + "eslint",\ + "typescript"\ + ],\ + "linkType": "HARD"\ + }],\ ["virtual:8859b278716fedf3e7458b5628625f7e35678c418626878559a0b816445001b7e24c55546f4677ba4c20b521aa0cf52cc33ac07deff171e383ada6eeab69933f#npm:5.29.0", {\ "packageLocation": "./.yarn/__virtual__/@typescript-eslint-parser-virtual-451c3112c8/0/cache/@typescript-eslint-parser-npm-5.29.0-491a7f9690-7805796638.zip/node_modules/@typescript-eslint/parser/",\ "packageDependencies": [\ @@ -3436,6 +3690,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@typescript-eslint/scope-manager", [\ + ["npm:4.33.0", {\ + "packageLocation": "./.yarn/cache/@typescript-eslint-scope-manager-npm-4.33.0-28014c179d-9a25fb7ba7.zip/node_modules/@typescript-eslint/scope-manager/",\ + "packageDependencies": [\ + ["@typescript-eslint/scope-manager", "npm:4.33.0"],\ + ["@typescript-eslint/types", "npm:4.33.0"],\ + ["@typescript-eslint/visitor-keys", "npm:4.33.0"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:5.29.0", {\ "packageLocation": "./.yarn/cache/@typescript-eslint-scope-manager-npm-5.29.0-03a6d28ed2-540642bef9.zip/node_modules/@typescript-eslint/scope-manager/",\ "packageDependencies": [\ @@ -3496,6 +3759,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@typescript-eslint/types", [\ + ["npm:4.33.0", {\ + "packageLocation": "./.yarn/cache/@typescript-eslint-types-npm-4.33.0-9e9b956afa-3baae1ca35.zip/node_modules/@typescript-eslint/types/",\ + "packageDependencies": [\ + ["@typescript-eslint/types", "npm:4.33.0"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:5.29.0", {\ "packageLocation": "./.yarn/cache/@typescript-eslint-types-npm-5.29.0-2bea7f0c9b-982ecdd691.zip/node_modules/@typescript-eslint/types/",\ "packageDependencies": [\ @@ -3505,6 +3775,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@typescript-eslint/typescript-estree", [\ + ["npm:4.33.0", {\ + "packageLocation": "./.yarn/cache/@typescript-eslint-typescript-estree-npm-4.33.0-b6b79c10d0-2566984390.zip/node_modules/@typescript-eslint/typescript-estree/",\ + "packageDependencies": [\ + ["@typescript-eslint/typescript-estree", "npm:4.33.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ ["npm:5.29.0", {\ "packageLocation": "./.yarn/cache/@typescript-eslint-typescript-estree-npm-5.29.0-f23de2ab5c-b91107a9fc.zip/node_modules/@typescript-eslint/typescript-estree/",\ "packageDependencies": [\ @@ -3512,6 +3789,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ + ["virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0", {\ + "packageLocation": "./.yarn/__virtual__/@typescript-eslint-typescript-estree-virtual-ec62c0bda6/0/cache/@typescript-eslint-typescript-estree-npm-4.33.0-b6b79c10d0-2566984390.zip/node_modules/@typescript-eslint/typescript-estree/",\ + "packageDependencies": [\ + ["@typescript-eslint/typescript-estree", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:4.33.0"],\ + ["@types/typescript", null],\ + ["@typescript-eslint/types", "npm:4.33.0"],\ + ["@typescript-eslint/visitor-keys", "npm:4.33.0"],\ + ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"],\ + ["globby", "npm:11.1.0"],\ + ["is-glob", "npm:4.0.3"],\ + ["semver", "npm:7.3.7"],\ + ["tsutils", "virtual:e64d2841693653abb2dee666d19406912f5e913a8081a709c081d9877d2f39987ff853b7cd736901a2df59af98328f7249f3db0da01abf060cf1d858d4d4e43b#npm:3.21.0"],\ + ["typescript", null]\ + ],\ + "packagePeers": [\ + "@types/typescript",\ + "typescript"\ + ],\ + "linkType": "HARD"\ + }],\ ["virtual:451c3112c8ebc24954be5135e65b7a370326adef1f6bb7aaca3ef2abc346ee165ef171b721a7207548ab6a19505983a443fc07f41cc553d4c9c5cddd04862b50#npm:5.29.0", {\ "packageLocation": "./.yarn/__virtual__/@typescript-eslint-typescript-estree-virtual-7e6283c452/0/cache/@typescript-eslint-typescript-estree-npm-5.29.0-f23de2ab5c-b91107a9fc.zip/node_modules/@typescript-eslint/typescript-estree/",\ "packageDependencies": [\ @@ -3591,7 +3888,7 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["@typescript-eslint/typescript-estree", "virtual:4ec458b53cfcb38d153394fe4d0300908a12ce721ae6026f1e2d7bbe8409ed98079b29d9688a9eb93463ace5dbaac7d454b12c4582b1cd0b1d8210588cf0cb1c#npm:5.29.0"],\ ["eslint", null],\ ["eslint-scope", "npm:5.1.1"],\ - ["eslint-utils", "virtual:9b3cc2e468ebc82b101c5313a9afa58bf6c93ab196f710844b44e247fc606cd503de5b07cdee6c592a841949dbe5daecc3f46a7ae43ee5bbf7fe046d76ec335e#npm:3.0.0"]\ + ["eslint-utils", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0"]\ ],\ "packagePeers": [\ "@types/eslint",\ @@ -3601,6 +3898,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["@typescript-eslint/visitor-keys", [\ + ["npm:4.33.0", {\ + "packageLocation": "./.yarn/cache/@typescript-eslint-visitor-keys-npm-4.33.0-8b7e72a3c9-59953e474a.zip/node_modules/@typescript-eslint/visitor-keys/",\ + "packageDependencies": [\ + ["@typescript-eslint/visitor-keys", "npm:4.33.0"],\ + ["@typescript-eslint/types", "npm:4.33.0"],\ + ["eslint-visitor-keys", "npm:2.1.0"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:5.29.0", {\ "packageLocation": "./.yarn/cache/@typescript-eslint-visitor-keys-npm-5.29.0-fe23f55f18-15f228ad9f.zip/node_modules/@typescript-eslint/visitor-keys/",\ "packageDependencies": [\ @@ -3701,6 +4007,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["agent-base", [\ + ["npm:5.1.1", {\ + "packageLocation": "./.yarn/cache/agent-base-npm-5.1.1-d451a4ad62-61ae789f30.zip/node_modules/agent-base/",\ + "packageDependencies": [\ + ["agent-base", "npm:5.1.1"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:6.0.2", {\ "packageLocation": "./.yarn/cache/agent-base-npm-6.0.2-428f325a93-f52b6872cc.zip/node_modules/agent-base/",\ "packageDependencies": [\ @@ -4381,6 +4694,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["busboy", [\ + ["npm:1.6.0", {\ + "packageLocation": "./.yarn/cache/busboy-npm-1.6.0-ebb5cbb04b-32801e2c01.zip/node_modules/busboy/",\ + "packageDependencies": [\ + ["busboy", "npm:1.6.0"],\ + ["streamsearch", "npm:1.1.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["byte-size", [\ ["npm:7.0.1", {\ "packageLocation": "./.yarn/cache/byte-size-npm-7.0.1-cda9f76d28-6791663a6d.zip/node_modules/byte-size/",\ @@ -4916,6 +5239,16 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["connect-busboy", [\ + ["npm:1.0.0", {\ + "packageLocation": "./.yarn/cache/connect-busboy-npm-1.0.0-9908d1785d-e4a8cece06.zip/node_modules/connect-busboy/",\ + "packageDependencies": [\ + ["connect-busboy", "npm:1.0.0"],\ + ["busboy", "npm:1.6.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["console-control-strings", [\ ["npm:1.1.0", {\ "packageLocation": "./.yarn/cache/console-control-strings-npm-1.1.0-e3160e5275-8755d76787.zip/node_modules/console-control-strings/",\ @@ -5568,6 +5901,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["dotenv", "npm:8.2.0"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:8.6.0", {\ + "packageLocation": "./.yarn/cache/dotenv-npm-8.6.0-2ce3e9f7bb-38e902c80b.zip/node_modules/dotenv/",\ + "packageDependencies": [\ + ["dotenv", "npm:8.6.0"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["duplexer", [\ @@ -5906,12 +6246,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "SOFT"\ }],\ - ["virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0", {\ - "packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-c2e00a0f83/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\ + ["virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0", {\ + "packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-19087eaf4f/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\ "packageDependencies": [\ - ["eslint-utils", "virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0"],\ + ["eslint-utils", "virtual:3b1d487b65ac14c3c2f5d6292c3e4b93bf25216a88a2d253428f98942e01532ac4933ee30564874cec0a0bb5aea3ee613d7494705e42eed4a2106f8ac0a03f97#npm:3.0.0"],\ ["@types/eslint", null],\ - ["eslint", "npm:8.18.0"],\ + ["eslint", null],\ ["eslint-visitor-keys", "npm:2.1.0"]\ ],\ "packagePeers": [\ @@ -5920,12 +6260,12 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ],\ "linkType": "HARD"\ }],\ - ["virtual:9b3cc2e468ebc82b101c5313a9afa58bf6c93ab196f710844b44e247fc606cd503de5b07cdee6c592a841949dbe5daecc3f46a7ae43ee5bbf7fe046d76ec335e#npm:3.0.0", {\ - "packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-d9a84e87ea/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\ + ["virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0", {\ + "packageLocation": "./.yarn/__virtual__/eslint-utils-virtual-c2e00a0f83/0/cache/eslint-utils-npm-3.0.0-630b3a4013-0668fe02f5.zip/node_modules/eslint-utils/",\ "packageDependencies": [\ - ["eslint-utils", "virtual:9b3cc2e468ebc82b101c5313a9afa58bf6c93ab196f710844b44e247fc606cd503de5b07cdee6c592a841949dbe5daecc3f46a7ae43ee5bbf7fe046d76ec335e#npm:3.0.0"],\ + ["eslint-utils", "virtual:3b3bfb190f25ed01591b1d51c8e6a15e818ab97d9cabea5c63912afc819a8f6e3ad395aaf338cd170314411b04e35eec5c8cff33dfa644476d292dcf2c5354d1#npm:3.0.0"],\ ["@types/eslint", null],\ - ["eslint", null],\ + ["eslint", "npm:8.18.0"],\ ["eslint-visitor-keys", "npm:2.1.0"]\ ],\ "packagePeers": [\ @@ -6162,6 +6502,30 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["express-winston", [\ + ["npm:4.2.0", {\ + "packageLocation": "./.yarn/cache/express-winston-npm-4.2.0-e4cfb26486-029529107f.zip/node_modules/express-winston/",\ + "packageDependencies": [\ + ["express-winston", "npm:4.2.0"]\ + ],\ + "linkType": "SOFT"\ + }],\ + ["virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:4.2.0", {\ + "packageLocation": "./.yarn/__virtual__/express-winston-virtual-7edb98a399/0/cache/express-winston-npm-4.2.0-e4cfb26486-029529107f.zip/node_modules/express-winston/",\ + "packageDependencies": [\ + ["express-winston", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:4.2.0"],\ + ["@types/winston", null],\ + ["chalk", "npm:2.4.2"],\ + ["lodash", "npm:4.17.21"],\ + ["winston", "npm:3.7.2"]\ + ],\ + "packagePeers": [\ + "@types/winston",\ + "winston"\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["external-editor", [\ ["npm:3.1.0", {\ "packageLocation": "./.yarn/cache/external-editor-npm-3.1.0-878e7807af-1c2a616a73.zip/node_modules/external-editor/",\ @@ -6866,6 +7230,13 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["helmet", "npm:4.3.1"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:4.6.0", {\ + "packageLocation": "./.yarn/cache/helmet-npm-4.6.0-f244fd965c-139ad678d1.zip/node_modules/helmet/",\ + "packageDependencies": [\ + ["helmet", "npm:4.6.0"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["highlight.js", [\ @@ -6990,6 +7361,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["https-proxy-agent", [\ + ["npm:4.0.0", {\ + "packageLocation": "./.yarn/cache/https-proxy-agent-npm-4.0.0-9021ec873f-19471d5aae.zip/node_modules/https-proxy-agent/",\ + "packageDependencies": [\ + ["https-proxy-agent", "npm:4.0.0"],\ + ["agent-base", "npm:5.1.1"],\ + ["debug", "virtual:b86a9fb34323a98c6519528ed55faa0d9b44ca8879307c0b29aa384bde47ff59a7d0c9051b31246f14521dfb71ba3c5d6d0b35c29fffc17bf875aa6ad977d9e8#npm:4.3.4"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:5.0.1", {\ "packageLocation": "./.yarn/cache/https-proxy-agent-npm-5.0.1-42d65f358e-571fccdf38.zip/node_modules/https-proxy-agent/",\ "packageDependencies": [\ @@ -9252,6 +9632,26 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { }]\ ]],\ ["newrelic", [\ + ["npm:7.5.2", {\ + "packageLocation": "./.yarn/cache/newrelic-npm-7.5.2-b949bcba7c-f6c67dbb7d.zip/node_modules/newrelic/",\ + "packageDependencies": [\ + ["newrelic", "npm:7.5.2"],\ + ["@grpc/grpc-js", "npm:1.6.7"],\ + ["@grpc/proto-loader", "npm:0.5.6"],\ + ["@newrelic/aws-sdk", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:3.1.0"],\ + ["@newrelic/koa", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:5.0.0"],\ + ["@newrelic/native-metrics", "npm:6.0.2"],\ + ["@newrelic/superagent", "virtual:b949bcba7c7d71e38bb586c0239fa765beb131ea5f64fec66d133f6e38e6e87926b1ba9f91c8df6bcbc8243f33e33f909381ca14cfb301b09e6526205ac71883#npm:4.0.0"],\ + ["@tyriar/fibonacci-heap", "npm:2.0.9"],\ + ["async", "npm:3.2.4"],\ + ["concat-stream", "npm:2.0.0"],\ + ["https-proxy-agent", "npm:4.0.0"],\ + ["json-stringify-safe", "npm:5.0.1"],\ + ["readable-stream", "npm:3.6.0"],\ + ["semver", "npm:5.7.1"]\ + ],\ + "linkType": "HARD"\ + }],\ ["npm:8.14.1", {\ "packageLocation": "./.yarn/cache/newrelic-npm-8.14.1-b659d4d19c-cd12bb2ac9.zip/node_modules/newrelic/",\ "packageDependencies": [\ @@ -10223,6 +10623,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { ["minimist", "npm:1.2.6"]\ ],\ "linkType": "HARD"\ + }],\ + ["npm:1.2.5", {\ + "packageLocation": "./.yarn/cache/prettyjson-npm-1.2.5-a72b7bf823-e36e8ae4f7.zip/node_modules/prettyjson/",\ + "packageDependencies": [\ + ["prettyjson", "npm:1.2.5"],\ + ["colors", "npm:1.4.0"],\ + ["minimist", "npm:1.2.6"]\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["proc-log", [\ @@ -11381,6 +11790,15 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "linkType": "HARD"\ }]\ ]],\ + ["streamsearch", [\ + ["npm:1.1.0", {\ + "packageLocation": "./.yarn/cache/streamsearch-npm-1.1.0-fc3ad6536d-1cce16cea8.zip/node_modules/streamsearch/",\ + "packageDependencies": [\ + ["streamsearch", "npm:1.1.0"]\ + ],\ + "linkType": "HARD"\ + }]\ + ]],\ ["strict-uri-encode", [\ ["npm:2.0.0", {\ "packageLocation": "./.yarn/cache/strict-uri-encode-npm-2.0.0-1ec3189376-eaac4cf978.zip/node_modules/strict-uri-encode/",\ @@ -11969,6 +12387,42 @@ function $$SETUP_STATE(hydrateRuntimeState, basePath) { "typescript"\ ],\ "linkType": "HARD"\ + }],\ + ["virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:10.8.1", {\ + "packageLocation": "./.yarn/__virtual__/ts-node-virtual-28037b75c9/0/cache/ts-node-npm-10.8.1-24280b0982-7d1aa7aa3a.zip/node_modules/ts-node/",\ + "packageDependencies": [\ + ["ts-node", "virtual:b442cf0427cc365d1c137f7340f9b81f9b204561afe791a8564ae9590c3a7fc4b5f793aaf8817b946f75a3cb64d03ef8790eb847f8b576b41e700da7b00c240c#npm:10.8.1"],\ + ["@cspotcode/source-map-support", "npm:0.8.1"],\ + ["@swc/core", null],\ + ["@swc/wasm", null],\ + ["@tsconfig/node10", "npm:1.0.9"],\ + ["@tsconfig/node12", "npm:1.0.11"],\ + ["@tsconfig/node14", "npm:1.0.3"],\ + ["@tsconfig/node16", "npm:1.0.3"],\ + ["@types/node", null],\ + ["@types/swc__core", null],\ + ["@types/swc__wasm", null],\ + ["@types/typescript", null],\ + ["acorn", "npm:8.7.1"],\ + ["acorn-walk", "npm:8.2.0"],\ + ["arg", "npm:4.1.3"],\ + ["create-require", "npm:1.1.1"],\ + ["diff", "npm:4.0.2"],\ + ["make-error", "npm:1.3.6"],\ + ["typescript", null],\ + ["v8-compile-cache-lib", "npm:3.0.1"],\ + ["yn", "npm:3.1.1"]\ + ],\ + "packagePeers": [\ + "@swc/core",\ + "@swc/wasm",\ + "@types/node",\ + "@types/swc__core",\ + "@types/swc__wasm",\ + "@types/typescript",\ + "typescript"\ + ],\ + "linkType": "HARD"\ }]\ ]],\ ["tslib", [\ diff --git a/.yarn/cache/@newrelic-aws-sdk-npm-3.1.0-7c3485a153-5601d90c78.zip b/.yarn/cache/@newrelic-aws-sdk-npm-3.1.0-7c3485a153-5601d90c78.zip new file mode 100644 index 000000000..c9ae4393b Binary files /dev/null and b/.yarn/cache/@newrelic-aws-sdk-npm-3.1.0-7c3485a153-5601d90c78.zip differ diff --git a/.yarn/cache/@newrelic-koa-npm-5.0.0-c9c6a0e1dc-e98d921b96.zip b/.yarn/cache/@newrelic-koa-npm-5.0.0-c9c6a0e1dc-e98d921b96.zip new file mode 100644 index 000000000..b70f8329e Binary files /dev/null and b/.yarn/cache/@newrelic-koa-npm-5.0.0-c9c6a0e1dc-e98d921b96.zip differ diff --git a/.yarn/cache/@newrelic-native-metrics-npm-6.0.2-805c5534f5-78f92bbe7f.zip b/.yarn/cache/@newrelic-native-metrics-npm-6.0.2-805c5534f5-78f92bbe7f.zip new file mode 100644 index 000000000..311b28eee Binary files /dev/null and b/.yarn/cache/@newrelic-native-metrics-npm-6.0.2-805c5534f5-78f92bbe7f.zip differ diff --git a/.yarn/cache/@newrelic-superagent-npm-4.0.0-6cc7e8ec57-5fb257ac05.zip b/.yarn/cache/@newrelic-superagent-npm-4.0.0-6cc7e8ec57-5fb257ac05.zip new file mode 100644 index 000000000..0dd61214c Binary files /dev/null and b/.yarn/cache/@newrelic-superagent-npm-4.0.0-6cc7e8ec57-5fb257ac05.zip differ diff --git a/.yarn/cache/@standardnotes-config-npm-2.0.1-5f34962133-5284e034f2.zip b/.yarn/cache/@standardnotes-config-npm-2.0.1-5f34962133-5284e034f2.zip new file mode 100644 index 000000000..93e3f878a Binary files /dev/null and b/.yarn/cache/@standardnotes-config-npm-2.0.1-5f34962133-5284e034f2.zip differ diff --git a/.yarn/cache/@types-busboy-npm-1.5.0-0e24e7f08d-ffa7bf25c0.zip b/.yarn/cache/@types-busboy-npm-1.5.0-0e24e7f08d-ffa7bf25c0.zip new file mode 100644 index 000000000..11fdf92f8 Binary files /dev/null and b/.yarn/cache/@types-busboy-npm-1.5.0-0e24e7f08d-ffa7bf25c0.zip differ diff --git a/.yarn/cache/@types-connect-busboy-npm-1.0.0-fca702448d-ccbf7bc42d.zip b/.yarn/cache/@types-connect-busboy-npm-1.0.0-fca702448d-ccbf7bc42d.zip new file mode 100644 index 000000000..6d524b698 Binary files /dev/null and b/.yarn/cache/@types-connect-busboy-npm-1.0.0-fca702448d-ccbf7bc42d.zip differ diff --git a/.yarn/cache/@typescript-eslint-eslint-plugin-npm-4.33.0-b5d1be4879-d74855d0a5.zip b/.yarn/cache/@typescript-eslint-eslint-plugin-npm-4.33.0-b5d1be4879-d74855d0a5.zip new file mode 100644 index 000000000..ae0ba92a9 Binary files /dev/null and b/.yarn/cache/@typescript-eslint-eslint-plugin-npm-4.33.0-b5d1be4879-d74855d0a5.zip differ diff --git a/.yarn/cache/@typescript-eslint-experimental-utils-npm-4.33.0-f10f287886-f859800ada.zip b/.yarn/cache/@typescript-eslint-experimental-utils-npm-4.33.0-f10f287886-f859800ada.zip new file mode 100644 index 000000000..ce83053cb Binary files /dev/null and b/.yarn/cache/@typescript-eslint-experimental-utils-npm-4.33.0-f10f287886-f859800ada.zip differ diff --git a/.yarn/cache/@typescript-eslint-parser-npm-4.33.0-799c6ce8d5-102457eae1.zip b/.yarn/cache/@typescript-eslint-parser-npm-4.33.0-799c6ce8d5-102457eae1.zip new file mode 100644 index 000000000..2e52119d4 Binary files /dev/null and b/.yarn/cache/@typescript-eslint-parser-npm-4.33.0-799c6ce8d5-102457eae1.zip differ diff --git a/.yarn/cache/@typescript-eslint-scope-manager-npm-4.33.0-28014c179d-9a25fb7ba7.zip b/.yarn/cache/@typescript-eslint-scope-manager-npm-4.33.0-28014c179d-9a25fb7ba7.zip new file mode 100644 index 000000000..ada83acdc Binary files /dev/null and b/.yarn/cache/@typescript-eslint-scope-manager-npm-4.33.0-28014c179d-9a25fb7ba7.zip differ diff --git a/.yarn/cache/@typescript-eslint-types-npm-4.33.0-9e9b956afa-3baae1ca35.zip b/.yarn/cache/@typescript-eslint-types-npm-4.33.0-9e9b956afa-3baae1ca35.zip new file mode 100644 index 000000000..a790352ff Binary files /dev/null and b/.yarn/cache/@typescript-eslint-types-npm-4.33.0-9e9b956afa-3baae1ca35.zip differ diff --git a/.yarn/cache/@typescript-eslint-typescript-estree-npm-4.33.0-b6b79c10d0-2566984390.zip b/.yarn/cache/@typescript-eslint-typescript-estree-npm-4.33.0-b6b79c10d0-2566984390.zip new file mode 100644 index 000000000..7cc21eeab Binary files /dev/null and b/.yarn/cache/@typescript-eslint-typescript-estree-npm-4.33.0-b6b79c10d0-2566984390.zip differ diff --git a/.yarn/cache/@typescript-eslint-visitor-keys-npm-4.33.0-8b7e72a3c9-59953e474a.zip b/.yarn/cache/@typescript-eslint-visitor-keys-npm-4.33.0-8b7e72a3c9-59953e474a.zip new file mode 100644 index 000000000..80d6a78e7 Binary files /dev/null and b/.yarn/cache/@typescript-eslint-visitor-keys-npm-4.33.0-8b7e72a3c9-59953e474a.zip differ diff --git a/.yarn/cache/agent-base-npm-5.1.1-d451a4ad62-61ae789f30.zip b/.yarn/cache/agent-base-npm-5.1.1-d451a4ad62-61ae789f30.zip new file mode 100644 index 000000000..ec90f9cf2 Binary files /dev/null and b/.yarn/cache/agent-base-npm-5.1.1-d451a4ad62-61ae789f30.zip differ diff --git a/.yarn/cache/busboy-npm-1.6.0-ebb5cbb04b-32801e2c01.zip b/.yarn/cache/busboy-npm-1.6.0-ebb5cbb04b-32801e2c01.zip new file mode 100644 index 000000000..ef174b22d Binary files /dev/null and b/.yarn/cache/busboy-npm-1.6.0-ebb5cbb04b-32801e2c01.zip differ diff --git a/.yarn/cache/connect-busboy-npm-1.0.0-9908d1785d-e4a8cece06.zip b/.yarn/cache/connect-busboy-npm-1.0.0-9908d1785d-e4a8cece06.zip new file mode 100644 index 000000000..73550eecf Binary files /dev/null and b/.yarn/cache/connect-busboy-npm-1.0.0-9908d1785d-e4a8cece06.zip differ diff --git a/.yarn/cache/dotenv-npm-8.6.0-2ce3e9f7bb-38e902c80b.zip b/.yarn/cache/dotenv-npm-8.6.0-2ce3e9f7bb-38e902c80b.zip new file mode 100644 index 000000000..21f3698c0 Binary files /dev/null and b/.yarn/cache/dotenv-npm-8.6.0-2ce3e9f7bb-38e902c80b.zip differ diff --git a/.yarn/cache/express-winston-npm-4.2.0-e4cfb26486-029529107f.zip b/.yarn/cache/express-winston-npm-4.2.0-e4cfb26486-029529107f.zip new file mode 100644 index 000000000..c591797e7 Binary files /dev/null and b/.yarn/cache/express-winston-npm-4.2.0-e4cfb26486-029529107f.zip differ diff --git a/.yarn/cache/helmet-npm-4.6.0-f244fd965c-139ad678d1.zip b/.yarn/cache/helmet-npm-4.6.0-f244fd965c-139ad678d1.zip new file mode 100644 index 000000000..39115ecbd Binary files /dev/null and b/.yarn/cache/helmet-npm-4.6.0-f244fd965c-139ad678d1.zip differ diff --git a/.yarn/cache/https-proxy-agent-npm-4.0.0-9021ec873f-19471d5aae.zip b/.yarn/cache/https-proxy-agent-npm-4.0.0-9021ec873f-19471d5aae.zip new file mode 100644 index 000000000..5f9e0b89d Binary files /dev/null and b/.yarn/cache/https-proxy-agent-npm-4.0.0-9021ec873f-19471d5aae.zip differ diff --git a/.yarn/cache/newrelic-npm-7.5.2-b949bcba7c-f6c67dbb7d.zip b/.yarn/cache/newrelic-npm-7.5.2-b949bcba7c-f6c67dbb7d.zip new file mode 100644 index 000000000..45bc5ff57 Binary files /dev/null and b/.yarn/cache/newrelic-npm-7.5.2-b949bcba7c-f6c67dbb7d.zip differ diff --git a/.yarn/cache/prettyjson-npm-1.2.5-a72b7bf823-e36e8ae4f7.zip b/.yarn/cache/prettyjson-npm-1.2.5-a72b7bf823-e36e8ae4f7.zip new file mode 100644 index 000000000..fbc4cf55b Binary files /dev/null and b/.yarn/cache/prettyjson-npm-1.2.5-a72b7bf823-e36e8ae4f7.zip differ diff --git a/.yarn/cache/streamsearch-npm-1.1.0-fc3ad6536d-1cce16cea8.zip b/.yarn/cache/streamsearch-npm-1.1.0-fc3ad6536d-1cce16cea8.zip new file mode 100644 index 000000000..68383b403 Binary files /dev/null and b/.yarn/cache/streamsearch-npm-1.1.0-fc3ad6536d-1cce16cea8.zip differ diff --git a/package.json b/package.json index fa602a278..0cf3bf6fc 100644 --- a/package.json +++ b/package.json @@ -15,21 +15,26 @@ "lint:auth": "yarn workspace @standardnotes/auth-server lint", "lint:scheduler": "yarn workspace @standardnotes/scheduler-server lint", "lint:syncing-server": "yarn workspace @standardnotes/syncing-server lint", + "lint:files": "yarn workspace @standardnotes/files-server lint", "test": "yarn workspaces foreach -p -j 10 --verbose run test", "test:auth": "yarn workspace @standardnotes/auth-server test", "test:scheduler": "yarn workspace @standardnotes/scheduler-server test", "test:syncing-server": "yarn workspace @standardnotes/syncing-server test", + "test:files": "yarn workspace @standardnotes/files-server test", "clean": "yarn workspaces foreach -p --verbose run clean", "setup:env": "yarn workspaces foreach -p --verbose run setup:env", "build": "yarn workspaces foreach -pt -j 10 --verbose run build", "build:auth": "yarn workspace @standardnotes/auth-server build", "build:scheduler": "yarn workspace @standardnotes/scheduler-server build", "build:syncing-server": "yarn workspace @standardnotes/syncing-server build", + "build:files": "yarn workspace @standardnotes/files-server build", "start:auth": "yarn workspace @standardnotes/auth-server start", "start:auth-worker": "yarn workspace @standardnotes/auth-server worker", "start:scheduler": "yarn workspace @standardnotes/scheduler-server worker", "start:syncing-server": "yarn workspace @standardnotes/syncing-server start", "start:syncing-server-worker": "yarn workspace @standardnotes/syncing-server worker", + "start:files": "yarn workspace @standardnotes/files-server start", + "start:files-worker": "yarn workspace @standardnotes/files-server worker", "release:beta": "lerna version --conventional-prerelease --conventional-commits --yes -m \"chore(release): publish\"" }, "devDependencies": { diff --git a/packages/files/.env.sample b/packages/files/.env.sample new file mode 100644 index 000000000..372470b70 --- /dev/null +++ b/packages/files/.env.sample @@ -0,0 +1,35 @@ +LOG_LEVEL=debug +NODE_ENV=development +VERSION=development + +PORT=3000 + +REDIS_URL=redis://cache +REDIS_EVENTS_CHANNEL=events + +VALET_TOKEN_SECRET=change-me-! + +MAX_CHUNK_BYTES=1000000 + +# (Optional) New Relic Setup +NEW_RELIC_ENABLED=false +NEW_RELIC_APP_NAME=Syncing Server JS +NEW_RELIC_LICENSE_KEY= +NEW_RELIC_NO_CONFIG_FILE=true +NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=false +NEW_RELIC_LOG_ENABLED=false +NEW_RELIC_LOG_LEVEL=info + +# (Optional) AWS Setup +AWS_ACCESS_KEY_ID= +AWS_SECRET_ACCESS_KEY= +S3_BUCKET_NAME= +S3_AWS_REGION= +S3_ENDPOINT= +SNS_TOPIC_ARN= +SNS_AWS_REGION= +SQS_QUEUE_URL= +SQS_AWS_REGION= + +# (Optional) File upload path (relative to root directory) +FILE_UPLOAD_PATH= diff --git a/packages/files/.eslintignore b/packages/files/.eslintignore new file mode 100644 index 000000000..4186e3d19 --- /dev/null +++ b/packages/files/.eslintignore @@ -0,0 +1,3 @@ +dist +test-setup.ts +data diff --git a/packages/files/.eslintrc b/packages/files/.eslintrc new file mode 100644 index 000000000..cb7136174 --- /dev/null +++ b/packages/files/.eslintrc @@ -0,0 +1,6 @@ +{ + "extends": "../../.eslintrc", + "parserOptions": { + "project": "./linter.tsconfig.json" + } +} diff --git a/packages/files/Dockerfile b/packages/files/Dockerfile new file mode 100644 index 000000000..b878705d6 --- /dev/null +++ b/packages/files/Dockerfile @@ -0,0 +1,25 @@ +FROM node:16.15.1-alpine AS builder + +# Install dependencies for building native libraries +RUN apk add --update git openssh-client python3 alpine-sdk + +WORKDIR /workspace + +# docker-build plugin copies everything needed for `yarn install` to `manifests` folder. +COPY manifests ./ + +RUN yarn install --immutable + +FROM node:16.15.1-alpine + +WORKDIR /workspace + +# Copy the installed dependencies from the previous stage. +COPY --from=builder /workspace ./ + +# docker-build plugin runs `yarn pack` in all workspace dependencies and copies them to `packs` folder. +COPY packs ./ + +ENTRYPOINT [ "/workspace/packages/auth/docker/entrypoint.sh" ] + +CMD [ "start-web" ] diff --git a/packages/files/bin/server.ts b/packages/files/bin/server.ts new file mode 100644 index 000000000..2e76e7868 --- /dev/null +++ b/packages/files/bin/server.ts @@ -0,0 +1,104 @@ +import 'reflect-metadata' + +import 'newrelic' + +import * as Sentry from '@sentry/node' +import * as busboy from 'connect-busboy' + +import '../src/Controller/HealthCheckController' +import '../src/Controller/FilesController' + +import * as helmet from 'helmet' +import * as cors from 'cors' +import { urlencoded, json, raw, Request, Response, NextFunction, RequestHandler, ErrorRequestHandler } from 'express' +import * as winston from 'winston' + +import { InversifyExpressServer } from 'inversify-express-utils' +import { ContainerConfigLoader } from '../src/Bootstrap/Container' +import TYPES from '../src/Bootstrap/Types' +import { Env } from '../src/Bootstrap/Env' + +const container = new ContainerConfigLoader() +void container.load().then((container) => { + const env: Env = new Env() + env.load() + + const server = new InversifyExpressServer(container) + + server.setConfig((app) => { + app.use((_request: Request, response: Response, next: NextFunction) => { + response.setHeader('X-Files-Version', container.get(TYPES.VERSION)) + next() + }) + app.use( + busboy({ + highWaterMark: 2 * 1024 * 1024, + }), + ) + /* eslint-disable */ + app.use(helmet({ + contentSecurityPolicy: { + directives: { + defaultSrc: ["https: 'self'"], + baseUri: ["'self'"], + childSrc: ["*", "blob:"], + connectSrc: ["*"], + fontSrc: ["*", "'self'"], + formAction: ["'self'"], + frameAncestors: ["*", "*.standardnotes.org", "*.standardnotes.com"], + frameSrc: ["*", "blob:"], + imgSrc: ["'self'", "*", "data:"], + manifestSrc: ["'self'"], + mediaSrc: ["'self'"], + objectSrc: ["'self'"], + scriptSrc: ["'self'"], + styleSrc: ["'self'"] + } + } + })) + /* eslint-enable */ + app.use(json({ limit: '50mb' })) + app.use(raw({ limit: '50mb', type: 'application/octet-stream' })) + app.use(urlencoded({ extended: true, limit: '50mb' })) + app.use( + cors({ + exposedHeaders: ['Content-Range', 'Accept-Ranges'], + }), + ) + + if (env.get('SENTRY_DSN', true)) { + Sentry.init({ + dsn: env.get('SENTRY_DSN'), + integrations: [new Sentry.Integrations.Http({ tracing: false, breadcrumbs: true })], + tracesSampleRate: 0, + }) + + app.use(Sentry.Handlers.requestHandler() as RequestHandler) + } + }) + + const logger: winston.Logger = container.get(TYPES.Logger) + + server.setErrorConfig((app) => { + if (env.get('SENTRY_DSN', true)) { + app.use(Sentry.Handlers.errorHandler() as ErrorRequestHandler) + } + + app.use((error: Record, _request: Request, response: Response, _next: NextFunction) => { + logger.error(error.stack) + + response.status(500).send({ + error: { + message: + "Unfortunately, we couldn't handle your request. Please try again or contact our support if the error persists.", + }, + }) + }) + }) + + const serverInstance = server.build() + + serverInstance.listen(env.get('PORT')) + + logger.info(`Server started on port ${process.env.PORT}`) +}) diff --git a/packages/files/bin/worker.ts b/packages/files/bin/worker.ts new file mode 100644 index 000000000..67004db20 --- /dev/null +++ b/packages/files/bin/worker.ts @@ -0,0 +1,29 @@ +import 'reflect-metadata' + +import 'newrelic' + +import { Logger } from 'winston' + +import { ContainerConfigLoader } from '../src/Bootstrap/Container' +import TYPES from '../src/Bootstrap/Types' +import { Env } from '../src/Bootstrap/Env' +import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-events' +import * as dayjs from 'dayjs' +import * as utc from 'dayjs/plugin/utc' + +const container = new ContainerConfigLoader() +void container.load().then((container) => { + dayjs.extend(utc) + + const env: Env = new Env() + env.load() + + const logger: Logger = container.get(TYPES.Logger) + + logger.info('Starting worker...') + + const subscriberFactory: DomainEventSubscriberFactoryInterface = container.get(TYPES.DomainEventSubscriberFactory) + subscriberFactory.create().start() + + setInterval(() => logger.info('Alive and kicking!'), 20 * 60 * 1000) +}) diff --git a/packages/files/docker/entrypoint.sh b/packages/files/docker/entrypoint.sh new file mode 100755 index 000000000..6b9568bc0 --- /dev/null +++ b/packages/files/docker/entrypoint.sh @@ -0,0 +1,27 @@ +#!/bin/sh +set -e + +COMMAND=$1 && shift 1 + +case "$COMMAND" in + 'start-local') + echo "Starting Web in Local Mode..." + yarn workspace @standardnotes/files-server start:local + ;; + + 'start-web' ) + echo "Starting Web..." + yarn workspace @standardnotes/files-server start + ;; + + 'start-worker' ) + echo "Starting Worker..." + yarn workspace @standardnotes/files-server worker + ;; + + * ) + echo "Unknown command" + ;; +esac + +exec "$@" diff --git a/packages/files/jest.config.js b/packages/files/jest.config.js new file mode 100644 index 000000000..723c799b4 --- /dev/null +++ b/packages/files/jest.config.js @@ -0,0 +1,19 @@ +// eslint-disable-next-line @typescript-eslint/no-var-requires +const base = require('../../jest.config'); + +module.exports = { + ...base, + globals: { + 'ts-jest': { + tsconfig: 'tsconfig.json', + }, + }, + coveragePathIgnorePatterns: [ + '/Bootstrap/', + 'HealthCheckController', + "/Infra/FS" + ], + setupFilesAfterEnv: [ + './test-setup.ts' + ] +}; diff --git a/packages/files/linter.tsconfig.json b/packages/files/linter.tsconfig.json new file mode 100644 index 000000000..67d92b038 --- /dev/null +++ b/packages/files/linter.tsconfig.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": ["dist", "test-setup.ts"] +} diff --git a/packages/files/package.json b/packages/files/package.json new file mode 100644 index 000000000..556b00c60 --- /dev/null +++ b/packages/files/package.json @@ -0,0 +1,73 @@ +{ + "name": "@standardnotes/files-server", + "version": "1.0.0", + "engines": { + "node": ">=16.0.0 <17.0.0" + }, + "description": "Standard Notes Files Server", + "main": "dist/src/index.js", + "typings": "dist/src/index.d.ts", + "repository": "git@github.com:standardnotes/files.git", + "authors": [ + "Karol Sójko " + ], + "license": "AGPL-3.0-or-later", + "scripts": { + "clean": "rm -fr dist", + "prebuild": "yarn clean", + "build": "tsc --rootDir ./", + "lint": "eslint . --ext .ts", + "pretest": "yarn lint && yarn build", + "test": "jest --coverage --config=./jest.config.js --maxWorkers=50%", + "start": "yarn node dist/bin/server.js", + "worker": "yarn node dist/bin/worker.js" + }, + "dependencies": { + "@newrelic/native-metrics": "7.0.2", + "@sentry/node": "^6.16.1", + "@standardnotes/auth": "^3.18.9", + "@standardnotes/common": "^1.19.4", + "@standardnotes/domain-events": "^2.27.6", + "@standardnotes/domain-events-infra": "^1.4.93", + "@standardnotes/sncrypto-common": "^1.3.0", + "@standardnotes/sncrypto-node": "^1.3.0", + "@standardnotes/time": "^1.4.5", + "aws-sdk": "^2.1158.0", + "connect-busboy": "^1.0.0", + "cors": "^2.8.5", + "dayjs": "^1.11.3", + "dotenv": "^8.2.0", + "express": "^4.17.1", + "express-winston": "^4.0.5", + "helmet": "^4.3.1", + "inversify": "^6.0.1", + "inversify-express-utils": "^6.4.3", + "ioredis": "^5.0.6", + "jsonwebtoken": "^8.5.1", + "newrelic": "^7.3.1", + "nodemon": "^2.0.15", + "prettyjson": "^1.2.1", + "reflect-metadata": "^0.1.13", + "ts-node": "^10.4.0", + "winston": "^3.3.3" + }, + "devDependencies": { + "@standardnotes/config": "2.0.1", + "@types/connect-busboy": "^1.0.0", + "@types/cors": "^2.8.9", + "@types/express": "^4.17.11", + "@types/ioredis": "^4.28.10", + "@types/jest": "^28.1.3", + "@types/jsonwebtoken": "^8.5.0", + "@types/newrelic": "^7.0.1", + "@types/prettyjson": "^0.0.29", + "@types/uuid": "^8.3.0", + "@typescript-eslint/eslint-plugin": "^5.29.0", + "eslint": "^8.14.0", + "eslint-plugin-prettier": "^4.0.0", + "jest": "^28.1.1", + "nodemon": "^2.0.16", + "ts-jest": "^28.0.1", + "uuid": "^8.3.2" + } +} diff --git a/packages/files/src/Bootstrap/Container.ts b/packages/files/src/Bootstrap/Container.ts new file mode 100644 index 000000000..e4592db9e --- /dev/null +++ b/packages/files/src/Bootstrap/Container.ts @@ -0,0 +1,226 @@ +import * as winston from 'winston' +import Redis from 'ioredis' +import * as AWS from 'aws-sdk' +import { Container } from 'inversify' + +import { Env } from './Env' +import TYPES from './Types' +import { UploadFileChunk } from '../Domain/UseCase/UploadFileChunk/UploadFileChunk' +import { ValetTokenAuthMiddleware } from '../Controller/ValetTokenAuthMiddleware' +import { TokenDecoder, TokenDecoderInterface, ValetTokenData } from '@standardnotes/auth' +import { Timer, TimerInterface } from '@standardnotes/time' +import { DomainEventFactoryInterface } from '../Domain/Event/DomainEventFactoryInterface' +import { DomainEventFactory } from '../Domain/Event/DomainEventFactory' +import { + RedisDomainEventPublisher, + RedisDomainEventSubscriberFactory, + RedisEventMessageHandler, + SNSDomainEventPublisher, + SQSDomainEventSubscriberFactory, + SQSEventMessageHandler, + SQSNewRelicEventMessageHandler, +} from '@standardnotes/domain-events-infra' +import { StreamDownloadFile } from '../Domain/UseCase/StreamDownloadFile/StreamDownloadFile' +import { FileDownloaderInterface } from '../Domain/Services/FileDownloaderInterface' +import { S3FileDownloader } from '../Infra/S3/S3FileDownloader' +import { FileUploaderInterface } from '../Domain/Services/FileUploaderInterface' +import { S3FileUploader } from '../Infra/S3/S3FileUploader' +import { FSFileDownloader } from '../Infra/FS/FSFileDownloader' +import { FSFileUploader } from '../Infra/FS/FSFileUploader' +import { CreateUploadSession } from '../Domain/UseCase/CreateUploadSession/CreateUploadSession' +import { FinishUploadSession } from '../Domain/UseCase/FinishUploadSession/FinishUploadSession' +import { UploadRepositoryInterface } from '../Domain/Upload/UploadRepositoryInterface' +import { RedisUploadRepository } from '../Infra/Redis/RedisUploadRepository' +import { GetFileMetadata } from '../Domain/UseCase/GetFileMetadata/GetFileMetadata' +import { FileRemoverInterface } from '../Domain/Services/FileRemoverInterface' +import { S3FileRemover } from '../Infra/S3/S3FileRemover' +import { FSFileRemover } from '../Infra/FS/FSFileRemover' +import { RemoveFile } from '../Domain/UseCase/RemoveFile/RemoveFile' +import { + DomainEventHandlerInterface, + DomainEventMessageHandlerInterface, + DomainEventSubscriberFactoryInterface, +} from '@standardnotes/domain-events' +import { MarkFilesToBeRemoved } from '../Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved' +import { AccountDeletionRequestedEventHandler } from '../Domain/Handler/AccountDeletionRequestedEventHandler' +import { SharedSubscriptionInvitationCanceledEventHandler } from '../Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler' + +export class ContainerConfigLoader { + async load(): Promise { + const env: Env = new Env() + env.load() + + const container = new Container() + + const logger = this.createLogger({ env }) + container.bind(TYPES.Logger).toConstantValue(logger) + + // env vars + container.bind(TYPES.S3_BUCKET_NAME).toConstantValue(env.get('S3_BUCKET_NAME', true)) + container.bind(TYPES.S3_AWS_REGION).toConstantValue(env.get('S3_AWS_REGION', true)) + container.bind(TYPES.VALET_TOKEN_SECRET).toConstantValue(env.get('VALET_TOKEN_SECRET')) + container.bind(TYPES.SNS_TOPIC_ARN).toConstantValue(env.get('SNS_TOPIC_ARN', true)) + container.bind(TYPES.SNS_AWS_REGION).toConstantValue(env.get('SNS_AWS_REGION', true)) + container.bind(TYPES.REDIS_URL).toConstantValue(env.get('REDIS_URL')) + container.bind(TYPES.REDIS_EVENTS_CHANNEL).toConstantValue(env.get('REDIS_EVENTS_CHANNEL')) + container.bind(TYPES.MAX_CHUNK_BYTES).toConstantValue(+env.get('MAX_CHUNK_BYTES')) + container.bind(TYPES.VERSION).toConstantValue(env.get('VERSION')) + container.bind(TYPES.SQS_QUEUE_URL).toConstantValue(env.get('SQS_QUEUE_URL', true)) + container + .bind(TYPES.FILE_UPLOAD_PATH) + .toConstantValue(env.get('FILE_UPLOAD_PATH', true) ?? `${__dirname}/../../uploads`) + + const redisUrl = container.get(TYPES.REDIS_URL) as string + const isRedisInClusterMode = redisUrl.indexOf(',') > 0 + let redis + if (isRedisInClusterMode) { + redis = new Redis.Cluster(redisUrl.split(',')) + } else { + redis = new Redis(redisUrl) + } + + container.bind(TYPES.Redis).toConstantValue(redis) + + if (env.get('AWS_ACCESS_KEY_ID', true)) { + AWS.config.credentials = new AWS.EnvironmentCredentials('AWS') + } + + if (env.get('S3_AWS_REGION', true) || env.get('S3_ENDPOINT', true)) { + const s3Opts: AWS.S3.Types.ClientConfiguration = { + apiVersion: 'latest', + } + if (env.get('S3_AWS_REGION', true)) { + s3Opts.region = env.get('S3_AWS_REGION', true) + } + if (env.get('S3_ENDPOINT', true)) { + s3Opts.endpoint = new AWS.Endpoint(env.get('S3_ENDPOINT', true)) + } + const s3Client = new AWS.S3(s3Opts) + container.bind(TYPES.S3).toConstantValue(s3Client) + container.bind(TYPES.FileDownloader).to(S3FileDownloader) + container.bind(TYPES.FileUploader).to(S3FileUploader) + container.bind(TYPES.FileRemover).to(S3FileRemover) + } else { + container.bind(TYPES.FileDownloader).to(FSFileDownloader) + container + .bind(TYPES.FileUploader) + .toConstantValue(new FSFileUploader(container.get(TYPES.FILE_UPLOAD_PATH), container.get(TYPES.Logger))) + container.bind(TYPES.FileRemover).to(FSFileRemover) + } + + if (env.get('SNS_AWS_REGION', true)) { + container.bind(TYPES.SNS).toConstantValue( + new AWS.SNS({ + apiVersion: 'latest', + region: env.get('SNS_AWS_REGION', true), + }), + ) + } + + if (env.get('SQS_QUEUE_URL', true)) { + const sqsConfig: AWS.SQS.Types.ClientConfiguration = { + apiVersion: 'latest', + region: env.get('SQS_AWS_REGION', true), + } + if (env.get('SQS_ACCESS_KEY_ID', true) && env.get('SQS_SECRET_ACCESS_KEY', true)) { + sqsConfig.credentials = { + accessKeyId: env.get('SQS_ACCESS_KEY_ID', true), + secretAccessKey: env.get('SQS_SECRET_ACCESS_KEY', true), + } + } + container.bind(TYPES.SQS).toConstantValue(new AWS.SQS(sqsConfig)) + } + + // use cases + container.bind(TYPES.UploadFileChunk).to(UploadFileChunk) + container.bind(TYPES.StreamDownloadFile).to(StreamDownloadFile) + container.bind(TYPES.CreateUploadSession).to(CreateUploadSession) + container.bind(TYPES.FinishUploadSession).to(FinishUploadSession) + container.bind(TYPES.GetFileMetadata).to(GetFileMetadata) + container.bind(TYPES.RemoveFile).to(RemoveFile) + container.bind(TYPES.MarkFilesToBeRemoved).to(MarkFilesToBeRemoved) + + // middleware + container.bind(TYPES.ValetTokenAuthMiddleware).to(ValetTokenAuthMiddleware) + + // services + container + .bind>(TYPES.ValetTokenDecoder) + .toConstantValue(new TokenDecoder(container.get(TYPES.VALET_TOKEN_SECRET))) + container.bind(TYPES.Timer).toConstantValue(new Timer()) + container.bind(TYPES.DomainEventFactory).to(DomainEventFactory) + + // repositories + container.bind(TYPES.UploadRepository).to(RedisUploadRepository) + + if (env.get('SNS_TOPIC_ARN', true)) { + container + .bind(TYPES.DomainEventPublisher) + .toConstantValue(new SNSDomainEventPublisher(container.get(TYPES.SNS), container.get(TYPES.SNS_TOPIC_ARN))) + } else { + container + .bind(TYPES.DomainEventPublisher) + .toConstantValue( + new RedisDomainEventPublisher(container.get(TYPES.Redis), container.get(TYPES.REDIS_EVENTS_CHANNEL)), + ) + } + + // Handlers + container + .bind(TYPES.AccountDeletionRequestedEventHandler) + .to(AccountDeletionRequestedEventHandler) + container + .bind(TYPES.SharedSubscriptionInvitationCanceledEventHandler) + .to(SharedSubscriptionInvitationCanceledEventHandler) + + const eventHandlers: Map = new Map([ + ['ACCOUNT_DELETION_REQUESTED', container.get(TYPES.AccountDeletionRequestedEventHandler)], + [ + 'SHARED_SUBSCRIPTION_INVITATION_CANCELED', + container.get(TYPES.SharedSubscriptionInvitationCanceledEventHandler), + ], + ]) + + if (env.get('SQS_QUEUE_URL', true)) { + container + .bind(TYPES.DomainEventMessageHandler) + .toConstantValue( + env.get('NEW_RELIC_ENABLED', true) === 'true' + ? new SQSNewRelicEventMessageHandler(eventHandlers, container.get(TYPES.Logger)) + : new SQSEventMessageHandler(eventHandlers, container.get(TYPES.Logger)), + ) + container + .bind(TYPES.DomainEventSubscriberFactory) + .toConstantValue( + new SQSDomainEventSubscriberFactory( + container.get(TYPES.SQS), + container.get(TYPES.SQS_QUEUE_URL), + container.get(TYPES.DomainEventMessageHandler), + ), + ) + } else { + container + .bind(TYPES.DomainEventMessageHandler) + .toConstantValue(new RedisEventMessageHandler(eventHandlers, container.get(TYPES.Logger))) + container + .bind(TYPES.DomainEventSubscriberFactory) + .toConstantValue( + new RedisDomainEventSubscriberFactory( + container.get(TYPES.Redis), + container.get(TYPES.DomainEventMessageHandler), + container.get(TYPES.REDIS_EVENTS_CHANNEL), + ), + ) + } + + return container + } + + createLogger({ env }: { env: Env }): winston.Logger { + return winston.createLogger({ + level: env.get('LOG_LEVEL') || 'info', + format: winston.format.combine(winston.format.splat(), winston.format.json()), + transports: [new winston.transports.Console({ level: env.get('LOG_LEVEL') || 'info' })], + }) + } +} diff --git a/packages/files/src/Bootstrap/Env.ts b/packages/files/src/Bootstrap/Env.ts new file mode 100644 index 000000000..b26b07aca --- /dev/null +++ b/packages/files/src/Bootstrap/Env.ts @@ -0,0 +1,24 @@ +import { config, DotenvParseOutput } from 'dotenv' +import { injectable } from 'inversify' + +@injectable() +export class Env { + private env?: DotenvParseOutput + + public load(): void { + const output = config() + this.env = output.parsed + } + + public get(key: string, optional = false): string { + if (!this.env) { + this.load() + } + + if (!process.env[key] && !optional) { + throw new Error(`Environment variable ${key} not set`) + } + + return process.env[key] + } +} diff --git a/packages/files/src/Bootstrap/Types.ts b/packages/files/src/Bootstrap/Types.ts new file mode 100644 index 000000000..ab087e4b1 --- /dev/null +++ b/packages/files/src/Bootstrap/Types.ts @@ -0,0 +1,58 @@ +const TYPES = { + Logger: Symbol.for('Logger'), + HTTPClient: Symbol.for('HTTPClient'), + Redis: Symbol.for('Redis'), + S3: Symbol.for('S3'), + SNS: Symbol.for('SNS'), + SQS: Symbol.for('SQS'), + + // use cases + UploadFileChunk: Symbol.for('UploadFileChunk'), + StreamDownloadFile: Symbol.for('StreamDownloadFile'), + CreateUploadSession: Symbol.for('CreateUploadSession'), + FinishUploadSession: Symbol.for('FinishUploadSession'), + GetFileMetadata: Symbol.for('GetFileMetadata'), + RemoveFile: Symbol.for('RemoveFile'), + MarkFilesToBeRemoved: Symbol.for('MarkFilesToBeRemoved'), + + // services + ValetTokenDecoder: Symbol.for('ValetTokenDecoder'), + Timer: Symbol.for('Timer'), + DomainEventFactory: Symbol.for('DomainEventFactory'), + DomainEventPublisher: Symbol.for('DomainEventPublisher'), + FileUploader: Symbol.for('FileUploader'), + FileDownloader: Symbol.for('FileDownloader'), + FileRemover: Symbol.for('FileRemover'), + + // repositories + UploadRepository: Symbol.for('UploadRepository'), + + // middleware + ValetTokenAuthMiddleware: Symbol.for('ValetTokenAuthMiddleware'), + + // env vars + AWS_ACCESS_KEY_ID: Symbol.for('AWS_ACCESS_KEY_ID'), + AWS_SECRET_ACCESS_KEY: Symbol.for('AWS_SECRET_ACCESS_KEY'), + S3_ENDPOINT: Symbol.for('S3_ENDPOINT'), + S3_BUCKET_NAME: Symbol.for('S3_BUCKET_NAME'), + S3_AWS_REGION: Symbol.for('S3_AWS_REGION'), + SNS_TOPIC_ARN: Symbol.for('SNS_TOPIC_ARN'), + SNS_AWS_REGION: Symbol.for('SNS_AWS_REGION'), + SQS_QUEUE_URL: Symbol.for('SQS_QUEUE_URL'), + SQS_AWS_REGION: Symbol.for('SQS_AWS_REGION'), + VALET_TOKEN_SECRET: Symbol.for('VALET_TOKEN_SECRET'), + REDIS_URL: Symbol.for('REDIS_URL'), + REDIS_EVENTS_CHANNEL: Symbol.for('REDIS_EVENTS_CHANNEL'), + MAX_CHUNK_BYTES: Symbol.for('MAX_CHUNK_BYTES'), + VERSION: Symbol.for('VERSION'), + NEW_RELIC_ENABLED: Symbol.for('NEW_RELIC_ENABLED'), + FILE_UPLOAD_PATH: Symbol.for('FILE_UPLOAD_PATH'), + + // Handlers + DomainEventMessageHandler: Symbol.for('DomainEventMessageHandler'), + DomainEventSubscriberFactory: Symbol.for('DomainEventSubscriberFactory'), + AccountDeletionRequestedEventHandler: Symbol.for('AccountDeletionRequestedEventHandler'), + SharedSubscriptionInvitationCanceledEventHandler: Symbol.for('SharedSubscriptionInvitationCanceledEventHandler'), +} + +export default TYPES diff --git a/packages/files/src/Controller/FilesController.spec.ts b/packages/files/src/Controller/FilesController.spec.ts new file mode 100644 index 000000000..e60d81e50 --- /dev/null +++ b/packages/files/src/Controller/FilesController.spec.ts @@ -0,0 +1,259 @@ +import 'reflect-metadata' + +import { CreateUploadSession } from '../Domain/UseCase/CreateUploadSession/CreateUploadSession' +import { FinishUploadSession } from '../Domain/UseCase/FinishUploadSession/FinishUploadSession' +import { StreamDownloadFile } from '../Domain/UseCase/StreamDownloadFile/StreamDownloadFile' +import { UploadFileChunk } from '../Domain/UseCase/UploadFileChunk/UploadFileChunk' + +import { Request, Response } from 'express' +import { Writable, Readable } from 'stream' +import { FilesController } from './FilesController' +import { GetFileMetadata } from '../Domain/UseCase/GetFileMetadata/GetFileMetadata' +import { results } from 'inversify-express-utils' +import { RemoveFile } from '../Domain/UseCase/RemoveFile/RemoveFile' + +describe('FilesController', () => { + let uploadFileChunk: UploadFileChunk + let createUploadSession: CreateUploadSession + let finishUploadSession: FinishUploadSession + let streamDownloadFile: StreamDownloadFile + let getFileMetadata: GetFileMetadata + let removeFile: RemoveFile + let request: Request + let response: Response + let readStream: Readable + const maxChunkBytes = 100_000 + + const createController = () => + new FilesController( + uploadFileChunk, + createUploadSession, + finishUploadSession, + streamDownloadFile, + getFileMetadata, + removeFile, + maxChunkBytes, + ) + + beforeEach(() => { + readStream = {} as jest.Mocked + readStream.pipe = jest.fn().mockReturnValue(new Writable()) + + streamDownloadFile = {} as jest.Mocked + streamDownloadFile.execute = jest.fn().mockReturnValue({ success: true, readStream }) + + uploadFileChunk = {} as jest.Mocked + uploadFileChunk.execute = jest.fn().mockReturnValue({ success: true }) + + createUploadSession = {} as jest.Mocked + createUploadSession.execute = jest.fn().mockReturnValue({ success: true, uploadId: '123' }) + + finishUploadSession = {} as jest.Mocked + finishUploadSession.execute = jest.fn().mockReturnValue({ success: true }) + + getFileMetadata = {} as jest.Mocked + getFileMetadata.execute = jest.fn().mockReturnValue({ success: true, size: 555_555 }) + + removeFile = {} as jest.Mocked + removeFile.execute = jest.fn().mockReturnValue({ success: true }) + + request = { + body: {}, + headers: {}, + } as jest.Mocked + response = { + locals: {}, + } as jest.Mocked + response.locals.userUuid = '1-2-3' + response.locals.permittedResources = [ + { + remoteIdentifier: '2-3-4', + unencryptedFileSize: 123, + }, + ] + response.writeHead = jest.fn() + }) + + it('should return a writable stream upon file download', async () => { + request.headers['range'] = 'bytes=0-' + + const result = (await createController().download(request, response)) as () => Writable + + expect(response.writeHead).toHaveBeenCalledWith(206, { + 'Accept-Ranges': 'bytes', + 'Content-Length': 100000, + 'Content-Range': 'bytes 0-99999/555555', + 'Content-Type': 'application/octet-stream', + }) + + expect(result()).toBeInstanceOf(Writable) + }) + + it('should return proper byte range on consecutive calls', async () => { + request.headers['range'] = 'bytes=0-' + ;(await createController().download(request, response)) as () => Writable + + request.headers['range'] = 'bytes=100000-' + ;(await createController().download(request, response)) as () => Writable + + expect(response.writeHead).toHaveBeenNthCalledWith(1, 206, { + 'Accept-Ranges': 'bytes', + 'Content-Length': 100000, + 'Content-Range': 'bytes 0-99999/555555', + 'Content-Type': 'application/octet-stream', + }) + + expect(response.writeHead).toHaveBeenNthCalledWith(2, 206, { + 'Accept-Ranges': 'bytes', + 'Content-Length': 100000, + 'Content-Range': 'bytes 100000-199999/555555', + 'Content-Type': 'application/octet-stream', + }) + }) + + it('should return a writable stream with custom chunk size', async () => { + request.headers['x-chunk-size'] = '50000' + request.headers['range'] = 'bytes=0-' + + const result = (await createController().download(request, response)) as () => Writable + + expect(response.writeHead).toHaveBeenCalledWith(206, { + 'Accept-Ranges': 'bytes', + 'Content-Length': 50000, + 'Content-Range': 'bytes 0-49999/555555', + 'Content-Type': 'application/octet-stream', + }) + + expect(result()).toBeInstanceOf(Writable) + }) + + it('should default to maximum chunk size if custom chunk size is too large', async () => { + request.headers['x-chunk-size'] = '200000' + request.headers['range'] = 'bytes=0-' + + const result = (await createController().download(request, response)) as () => Writable + + expect(response.writeHead).toHaveBeenCalledWith(206, { + 'Accept-Ranges': 'bytes', + 'Content-Length': 100000, + 'Content-Range': 'bytes 0-99999/555555', + 'Content-Type': 'application/octet-stream', + }) + + expect(result()).toBeInstanceOf(Writable) + }) + + it('should not return a writable stream if bytes range is not provided', async () => { + const httpResponse = await createController().download(request, response) + + expect(httpResponse).toBeInstanceOf(results.BadRequestErrorMessageResult) + }) + + it('should not return a writable stream if getting file metadata fails', async () => { + request.headers['range'] = 'bytes=0-' + + getFileMetadata.execute = jest.fn().mockReturnValue({ success: false, message: 'error' }) + + const httpResponse = await createController().download(request, response) + + expect(httpResponse).toBeInstanceOf(results.BadRequestErrorMessageResult) + }) + + it('should not return a writable stream if creating download stream fails', async () => { + request.headers['range'] = 'bytes=0-' + + streamDownloadFile.execute = jest.fn().mockReturnValue({ success: false, message: 'error' }) + + const httpResponse = await createController().download(request, response) + + expect(httpResponse).toBeInstanceOf(results.BadRequestErrorMessageResult) + }) + + it('should create an upload session', async () => { + await createController().startUpload(request, response) + + expect(createUploadSession.execute).toHaveBeenCalledWith({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }) + }) + + it('should return bad request if upload session could not be created', async () => { + createUploadSession.execute = jest.fn().mockReturnValue({ success: false }) + + const httpResponse = await createController().startUpload(request, response) + const result = await httpResponse.executeAsync() + + expect(result.statusCode).toEqual(400) + }) + + it('should finish an upload session', async () => { + await createController().finishUpload(request, response) + + expect(finishUploadSession.execute).toHaveBeenCalledWith({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }) + }) + + it('should return bad request if upload session could not be finished', async () => { + finishUploadSession.execute = jest.fn().mockReturnValue({ success: false }) + + const httpResponse = await createController().finishUpload(request, response) + const result = await httpResponse.executeAsync() + + expect(result.statusCode).toEqual(400) + }) + + it('should remove a file', async () => { + await createController().remove(request, response) + + expect(removeFile.execute).toHaveBeenCalledWith({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }) + }) + + it('should return bad request if file removal could not be completed', async () => { + removeFile.execute = jest.fn().mockReturnValue({ success: false }) + + const httpResponse = await createController().remove(request, response) + const result = await httpResponse.executeAsync() + + expect(result.statusCode).toEqual(400) + }) + + it('should upload a chunk to an upload session', async () => { + request.headers['x-chunk-id'] = '2' + request.body = Buffer.from([123]) + + await createController().uploadChunk(request, response) + + expect(uploadFileChunk.execute).toHaveBeenCalledWith({ + chunkId: 2, + data: Buffer.from([123]), + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }) + }) + + it('should return bad request if chunk could not be uploaded', async () => { + request.headers['x-chunk-id'] = '2' + request.body = Buffer.from([123]) + uploadFileChunk.execute = jest.fn().mockReturnValue({ success: false }) + + const httpResponse = await createController().uploadChunk(request, response) + const result = await httpResponse.executeAsync() + + expect(result.statusCode).toEqual(400) + }) + + it('should return bad request if chunk id is missing', async () => { + request.body = Buffer.from([123]) + + const httpResponse = await createController().uploadChunk(request, response) + const result = await httpResponse.executeAsync() + + expect(result.statusCode).toEqual(400) + }) +}) diff --git a/packages/files/src/Controller/FilesController.ts b/packages/files/src/Controller/FilesController.ts new file mode 100644 index 000000000..7da7bb92b --- /dev/null +++ b/packages/files/src/Controller/FilesController.ts @@ -0,0 +1,154 @@ +import { BaseHttpController, controller, httpDelete, httpGet, httpPost, results } from 'inversify-express-utils' +import { Request, Response } from 'express' +import { inject } from 'inversify' +import { Writable } from 'stream' +import TYPES from '../Bootstrap/Types' +import { UploadFileChunk } from '../Domain/UseCase/UploadFileChunk/UploadFileChunk' +import { StreamDownloadFile } from '../Domain/UseCase/StreamDownloadFile/StreamDownloadFile' +import { CreateUploadSession } from '../Domain/UseCase/CreateUploadSession/CreateUploadSession' +import { FinishUploadSession } from '../Domain/UseCase/FinishUploadSession/FinishUploadSession' +import { GetFileMetadata } from '../Domain/UseCase/GetFileMetadata/GetFileMetadata' +import { RemoveFile } from '../Domain/UseCase/RemoveFile/RemoveFile' + +@controller('/v1/files', TYPES.ValetTokenAuthMiddleware) +export class FilesController extends BaseHttpController { + constructor( + @inject(TYPES.UploadFileChunk) private uploadFileChunk: UploadFileChunk, + @inject(TYPES.CreateUploadSession) private createUploadSession: CreateUploadSession, + @inject(TYPES.FinishUploadSession) private finishUploadSession: FinishUploadSession, + @inject(TYPES.StreamDownloadFile) private streamDownloadFile: StreamDownloadFile, + @inject(TYPES.GetFileMetadata) private getFileMetadata: GetFileMetadata, + @inject(TYPES.RemoveFile) private removeFile: RemoveFile, + @inject(TYPES.MAX_CHUNK_BYTES) private maxChunkBytes: number, + ) { + super() + } + + @httpPost('/upload/create-session') + async startUpload( + _request: Request, + response: Response, + ): Promise { + const result = await this.createUploadSession.execute({ + userUuid: response.locals.userUuid, + resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier, + }) + + if (!result.success) { + return this.badRequest(result.message) + } + + return this.json({ success: true, uploadId: result.uploadId }) + } + + @httpPost('/upload/chunk') + async uploadChunk( + request: Request, + response: Response, + ): Promise { + const chunkId = +(request.headers['x-chunk-id'] as string) + if (!chunkId) { + return this.badRequest('Missing x-chunk-id header in request.') + } + + const result = await this.uploadFileChunk.execute({ + userUuid: response.locals.userUuid, + resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier, + chunkId, + data: request.body, + }) + + if (!result.success) { + return this.badRequest(result.message) + } + + return this.json({ success: true, message: 'Chunk uploaded successfully' }) + } + + @httpPost('/upload/close-session') + public async finishUpload( + _request: Request, + response: Response, + ): Promise { + const result = await this.finishUploadSession.execute({ + userUuid: response.locals.userUuid, + resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier, + uploadBytesLimit: response.locals.uploadBytesLimit, + uploadBytesUsed: response.locals.uploadBytesUsed, + }) + + if (!result.success) { + return this.badRequest(result.message) + } + + return this.json({ success: true, message: 'File uploaded successfully' }) + } + + @httpDelete('/') + async remove( + _request: Request, + response: Response, + ): Promise { + const result = await this.removeFile.execute({ + userUuid: response.locals.userUuid, + resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier, + regularSubscriptionUuid: response.locals.regularSubscriptionUuid, + }) + + if (!result.success) { + return this.badRequest(result.message) + } + + return this.json({ success: true, message: 'File removed successfully' }) + } + + @httpGet('/') + async download( + request: Request, + response: Response, + ): Promise Writable)> { + const range = request.headers['range'] + if (!range) { + return this.badRequest('File download requires range header to be set.') + } + + let chunkSize = +(request.headers['x-chunk-size'] as string) + if (!chunkSize || chunkSize > this.maxChunkBytes) { + chunkSize = this.maxChunkBytes + } + + const fileMetadata = await this.getFileMetadata.execute({ + userUuid: response.locals.userUuid, + resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier, + }) + + if (!fileMetadata.success) { + return this.badRequest(fileMetadata.message) + } + + const startRange = Number(range.replace(/\D/g, '')) + const endRange = Math.min(startRange + chunkSize - 1, fileMetadata.size - 1) + + const headers = { + 'Content-Range': `bytes ${startRange}-${endRange}/${fileMetadata.size}`, + 'Accept-Ranges': 'bytes', + 'Content-Length': endRange - startRange + 1, + 'Content-Type': 'application/octet-stream', + } + + response.writeHead(206, headers) + + const result = await this.streamDownloadFile.execute({ + userUuid: response.locals.userUuid, + resourceRemoteIdentifier: response.locals.permittedResources[0].remoteIdentifier, + startRange, + endRange, + }) + + if (!result.success) { + return this.badRequest(result.message) + } + + return () => result.readStream.pipe(response) + } +} diff --git a/packages/files/src/Controller/HealthCheckController.spec.ts b/packages/files/src/Controller/HealthCheckController.spec.ts new file mode 100644 index 000000000..9ead2fccb --- /dev/null +++ b/packages/files/src/Controller/HealthCheckController.spec.ts @@ -0,0 +1,12 @@ +import 'reflect-metadata' + +import { HealthCheckController } from './HealthCheckController' + +describe('HealthCheckController', () => { + const createController = () => new HealthCheckController() + + it('should return OK', async () => { + const response = (await createController().get()) as string + expect(response).toEqual('OK') + }) +}) diff --git a/packages/files/src/Controller/HealthCheckController.ts b/packages/files/src/Controller/HealthCheckController.ts new file mode 100644 index 000000000..431e74056 --- /dev/null +++ b/packages/files/src/Controller/HealthCheckController.ts @@ -0,0 +1,9 @@ +import { controller, httpGet } from 'inversify-express-utils' + +@controller('/healthcheck') +export class HealthCheckController { + @httpGet('/') + public async get(): Promise { + return 'OK' + } +} diff --git a/packages/files/src/Controller/ValetTokenAuthMiddleware.spec.ts b/packages/files/src/Controller/ValetTokenAuthMiddleware.spec.ts new file mode 100644 index 000000000..8ca052493 --- /dev/null +++ b/packages/files/src/Controller/ValetTokenAuthMiddleware.spec.ts @@ -0,0 +1,203 @@ +import 'reflect-metadata' + +import { ValetTokenAuthMiddleware } from './ValetTokenAuthMiddleware' +import { NextFunction, Request, Response } from 'express' +import { Logger } from 'winston' +import { TokenDecoderInterface, ValetTokenData } from '@standardnotes/auth' + +describe('ValetTokenAuthMiddleware', () => { + let tokenDecoder: TokenDecoderInterface + let request: Request + let response: Response + let next: NextFunction + + const logger = { + debug: jest.fn(), + } as unknown as jest.Mocked + + const createMiddleware = () => new ValetTokenAuthMiddleware(tokenDecoder, logger) + + beforeEach(() => { + tokenDecoder = {} as jest.Mocked> + tokenDecoder.decodeToken = jest.fn().mockReturnValue({ + userUuid: '1-2-3', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 30, + }, + ], + permittedOperation: 'write', + uploadBytesLimit: 100, + uploadBytesUsed: 80, + }) + + request = { + headers: {}, + query: {}, + body: {}, + } as jest.Mocked + response = { + locals: {}, + } as jest.Mocked + response.status = jest.fn().mockReturnThis() + response.send = jest.fn() + next = jest.fn() + }) + + it('should authorize user with a valet token', async () => { + tokenDecoder.decodeToken = jest.fn().mockReturnValue({ + userUuid: '1-2-3', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 30, + }, + ], + permittedOperation: 'write', + uploadBytesLimit: -1, + uploadBytesUsed: 80, + }) + + request.headers['x-valet-token'] = 'valet-token' + + await createMiddleware().handler(request, response, next) + + expect(response.locals).toEqual({ + userUuid: '1-2-3', + permittedOperation: 'write', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 30, + }, + ], + uploadBytesLimit: -1, + uploadBytesUsed: 80, + }) + + expect(next).toHaveBeenCalled() + }) + + it('should authorize user with unlimited upload with a valet token', async () => { + request.headers['x-valet-token'] = 'valet-token' + tokenDecoder.decodeToken = jest.fn().mockReturnValue({ + userUuid: '1-2-3', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 10, + }, + ], + permittedOperation: 'write', + uploadBytesLimit: -1, + uploadBytesUsed: 80, + }) + + await createMiddleware().handler(request, response, next) + + expect(response.locals).toEqual({ + userUuid: '1-2-3', + permittedOperation: 'write', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 10, + }, + ], + uploadBytesLimit: -1, + uploadBytesUsed: 80, + }) + + expect(next).toHaveBeenCalled() + }) + + it('should not authorize user with no space left for upload', async () => { + request.headers['x-valet-token'] = 'valet-token' + tokenDecoder.decodeToken = jest.fn().mockReturnValue({ + userUuid: '1-2-3', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 21, + }, + ], + permittedOperation: 'write', + uploadBytesLimit: 100, + uploadBytesUsed: 80, + }) + + await createMiddleware().handler(request, response, next) + + expect(response.status).toHaveBeenCalledWith(403) + expect(next).not.toHaveBeenCalled() + }) + + it('should authorize user with no space left for upload for download operations', async () => { + request.headers['x-valet-token'] = 'valet-token' + + tokenDecoder.decodeToken = jest.fn().mockReturnValue({ + userUuid: '1-2-3', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 21, + }, + ], + permittedOperation: 'read', + uploadBytesLimit: 100, + uploadBytesUsed: 80, + }) + + await createMiddleware().handler(request, response, next) + + expect(response.locals).toEqual({ + userUuid: '1-2-3', + permittedOperation: 'read', + permittedResources: [ + { + remoteIdentifier: '1-2-3/2-3-4', + unencryptedFileSize: 21, + }, + ], + uploadBytesLimit: 100, + uploadBytesUsed: 80, + }) + + expect(next).toHaveBeenCalled() + }) + + it('should not authorize if request is missing valet token in headers', async () => { + await createMiddleware().handler(request, response, next) + + expect(response.status).toHaveBeenCalledWith(401) + expect(next).not.toHaveBeenCalled() + }) + + it('should not authorize if auth valet token is malformed', async () => { + request.headers['x-valet-token'] = 'valet-token' + + tokenDecoder.decodeToken = jest.fn().mockReturnValue(undefined) + + await createMiddleware().handler(request, response, next) + + expect(response.status).toHaveBeenCalledWith(401) + expect(next).not.toHaveBeenCalled() + }) + + it('should pass the error to next middleware if one occurres', async () => { + request.headers['x-valet-token'] = 'valet-token' + + const error = new Error('Ooops') + + tokenDecoder.decodeToken = jest.fn().mockImplementation(() => { + throw error + }) + + await createMiddleware().handler(request, response, next) + + expect(response.status).not.toHaveBeenCalled() + + expect(next).toHaveBeenCalledWith(error) + }) +}) diff --git a/packages/files/src/Controller/ValetTokenAuthMiddleware.ts b/packages/files/src/Controller/ValetTokenAuthMiddleware.ts new file mode 100644 index 000000000..529b712d5 --- /dev/null +++ b/packages/files/src/Controller/ValetTokenAuthMiddleware.ts @@ -0,0 +1,90 @@ +import { TokenDecoderInterface, ValetTokenData } from '@standardnotes/auth' +import { NextFunction, Request, Response } from 'express' +import { inject, injectable } from 'inversify' +import { BaseMiddleware } from 'inversify-express-utils' +import { Logger } from 'winston' +import TYPES from '../Bootstrap/Types' + +@injectable() +export class ValetTokenAuthMiddleware extends BaseMiddleware { + constructor( + @inject(TYPES.ValetTokenDecoder) private tokenDecoder: TokenDecoderInterface, + @inject(TYPES.Logger) private logger: Logger, + ) { + super() + } + + async handler(request: Request, response: Response, next: NextFunction): Promise { + try { + const valetToken = request.headers['x-valet-token'] || request.body.valetToken || request.query.valetToken + if (!valetToken) { + this.logger.debug('ValetTokenAuthMiddleware missing valet token.') + + response.status(401).send({ + error: { + tag: 'invalid-auth', + message: 'Invalid valet token.', + }, + }) + + return + } + + const valetTokenData = this.tokenDecoder.decodeToken(valetToken) + + if (valetTokenData === undefined) { + this.logger.debug('ValetTokenAuthMiddleware authentication failure.') + + response.status(401).send({ + error: { + tag: 'invalid-auth', + message: 'Invalid valet token.', + }, + }) + + return + } + + if (this.userHasNoSpaceToUpload(valetTokenData)) { + response.status(403).send({ + error: { + tag: 'no-space', + message: 'The file you are trying to upload is too big. Please upgrade your subscription', + }, + }) + + return + } + + response.locals.userUuid = valetTokenData.userUuid + response.locals.permittedResources = valetTokenData.permittedResources + response.locals.permittedOperation = valetTokenData.permittedOperation + response.locals.uploadBytesUsed = valetTokenData.uploadBytesUsed + response.locals.uploadBytesLimit = valetTokenData.uploadBytesLimit + response.locals.regularSubscriptionUuid = valetTokenData.regularSubscriptionUuid + + return next() + } catch (error) { + return next(error) + } + } + + private userHasNoSpaceToUpload(valetTokenData: ValetTokenData) { + if (valetTokenData.permittedOperation !== 'write') { + return false + } + + if (valetTokenData.uploadBytesLimit === -1) { + return false + } + + const remainingUploadSpace = valetTokenData.uploadBytesLimit - valetTokenData.uploadBytesUsed + + let consideredUploadSize = 0 + for (const resource of valetTokenData.permittedResources) { + consideredUploadSize += resource.unencryptedFileSize as number + } + + return remainingUploadSpace - consideredUploadSize <= 0 + } +} diff --git a/packages/files/src/Domain/Event/DomainEventFactory.spec.ts b/packages/files/src/Domain/Event/DomainEventFactory.spec.ts new file mode 100644 index 000000000..260778b4d --- /dev/null +++ b/packages/files/src/Domain/Event/DomainEventFactory.spec.ts @@ -0,0 +1,72 @@ +import 'reflect-metadata' + +import { TimerInterface } from '@standardnotes/time' + +import { DomainEventFactory } from './DomainEventFactory' + +describe('DomainEventFactory', () => { + let timer: TimerInterface + + const createFactory = () => new DomainEventFactory(timer) + + beforeEach(() => { + timer = {} as jest.Mocked + timer.getUTCDate = jest.fn().mockReturnValue(new Date(1)) + }) + + it('should create a FILE_UPLOADED event', () => { + expect( + createFactory().createFileUploadedEvent({ + fileByteSize: 123, + fileName: '2-3-4', + filePath: '1-2-3/2-3-4', + userUuid: '1-2-3', + }), + ).toEqual({ + createdAt: new Date(1), + meta: { + correlation: { + userIdentifier: '1-2-3', + userIdentifierType: 'uuid', + }, + origin: 'files', + }, + payload: { + fileByteSize: 123, + fileName: '2-3-4', + filePath: '1-2-3/2-3-4', + userUuid: '1-2-3', + }, + type: 'FILE_UPLOADED', + }) + }) + + it('should create a FILE_REMOVED event', () => { + expect( + createFactory().createFileRemovedEvent({ + fileByteSize: 123, + fileName: '2-3-4', + filePath: '1-2-3/2-3-4', + userUuid: '1-2-3', + regularSubscriptionUuid: '1-2-3', + }), + ).toEqual({ + createdAt: new Date(1), + meta: { + correlation: { + userIdentifier: '1-2-3', + userIdentifierType: 'uuid', + }, + origin: 'files', + }, + payload: { + fileByteSize: 123, + fileName: '2-3-4', + filePath: '1-2-3/2-3-4', + userUuid: '1-2-3', + regularSubscriptionUuid: '1-2-3', + }, + type: 'FILE_REMOVED', + }) + }) +}) diff --git a/packages/files/src/Domain/Event/DomainEventFactory.ts b/packages/files/src/Domain/Event/DomainEventFactory.ts new file mode 100644 index 000000000..09bb26c3e --- /dev/null +++ b/packages/files/src/Domain/Event/DomainEventFactory.ts @@ -0,0 +1,53 @@ +import { Uuid } from '@standardnotes/common' +import { FileUploadedEvent, FileRemovedEvent, DomainEventService } from '@standardnotes/domain-events' +import { TimerInterface } from '@standardnotes/time' +import { inject, injectable } from 'inversify' + +import TYPES from '../../Bootstrap/Types' +import { DomainEventFactoryInterface } from './DomainEventFactoryInterface' + +@injectable() +export class DomainEventFactory implements DomainEventFactoryInterface { + constructor(@inject(TYPES.Timer) private timer: TimerInterface) {} + + createFileRemovedEvent(payload: { + userUuid: string + filePath: string + fileName: string + fileByteSize: number + regularSubscriptionUuid: Uuid + }): FileRemovedEvent { + return { + type: 'FILE_REMOVED', + createdAt: this.timer.getUTCDate(), + meta: { + correlation: { + userIdentifier: payload.userUuid, + userIdentifierType: 'uuid', + }, + origin: DomainEventService.Files, + }, + payload, + } + } + + createFileUploadedEvent(payload: { + userUuid: string + filePath: string + fileName: string + fileByteSize: number + }): FileUploadedEvent { + return { + type: 'FILE_UPLOADED', + createdAt: this.timer.getUTCDate(), + meta: { + correlation: { + userIdentifier: payload.userUuid, + userIdentifierType: 'uuid', + }, + origin: DomainEventService.Files, + }, + payload, + } + } +} diff --git a/packages/files/src/Domain/Event/DomainEventFactoryInterface.ts b/packages/files/src/Domain/Event/DomainEventFactoryInterface.ts new file mode 100644 index 000000000..8c9fdbac5 --- /dev/null +++ b/packages/files/src/Domain/Event/DomainEventFactoryInterface.ts @@ -0,0 +1,18 @@ +import { Uuid } from '@standardnotes/common' +import { FileUploadedEvent, FileRemovedEvent } from '@standardnotes/domain-events' + +export interface DomainEventFactoryInterface { + createFileUploadedEvent(payload: { + userUuid: string + filePath: string + fileName: string + fileByteSize: number + }): FileUploadedEvent + createFileRemovedEvent(payload: { + userUuid: string + filePath: string + fileName: string + fileByteSize: number + regularSubscriptionUuid: Uuid + }): FileRemovedEvent +} diff --git a/packages/files/src/Domain/File/RemovedFileDescription.ts b/packages/files/src/Domain/File/RemovedFileDescription.ts new file mode 100644 index 000000000..6925326fe --- /dev/null +++ b/packages/files/src/Domain/File/RemovedFileDescription.ts @@ -0,0 +1,8 @@ +import { Uuid } from '@standardnotes/common' + +export type RemovedFileDescription = { + userUuid: Uuid + filePath: string + fileName: string + fileByteSize: number +} diff --git a/packages/files/src/Domain/Handler/AccountDeletionRequestedEventHandler.spec.ts b/packages/files/src/Domain/Handler/AccountDeletionRequestedEventHandler.spec.ts new file mode 100644 index 000000000..4fb6380ed --- /dev/null +++ b/packages/files/src/Domain/Handler/AccountDeletionRequestedEventHandler.spec.ts @@ -0,0 +1,73 @@ +import 'reflect-metadata' + +import { + AccountDeletionRequestedEvent, + AccountDeletionRequestedEventPayload, + DomainEventPublisherInterface, + FileRemovedEvent, +} from '@standardnotes/domain-events' +import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved' + +import { AccountDeletionRequestedEventHandler } from './AccountDeletionRequestedEventHandler' +import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface' +import { RemovedFileDescription } from '../File/RemovedFileDescription' + +describe('AccountDeletionRequestedEventHandler', () => { + let markFilesToBeRemoved: MarkFilesToBeRemoved + let event: AccountDeletionRequestedEvent + let domainEventPublisher: DomainEventPublisherInterface + let domainEventFactory: DomainEventFactoryInterface + + const createHandler = () => + new AccountDeletionRequestedEventHandler(markFilesToBeRemoved, domainEventPublisher, domainEventFactory) + + beforeEach(() => { + markFilesToBeRemoved = {} as jest.Mocked + markFilesToBeRemoved.execute = jest.fn().mockReturnValue({ + success: true, + filesRemoved: [{} as jest.Mocked], + }) + + event = {} as jest.Mocked + event.payload = { + userUuid: '1-2-3', + regularSubscriptionUuid: '1-2-3', + } as jest.Mocked + + domainEventPublisher = {} as jest.Mocked + domainEventPublisher.publish = jest.fn() + + domainEventFactory = {} as jest.Mocked + domainEventFactory.createFileRemovedEvent = jest.fn().mockReturnValue({} as jest.Mocked) + }) + + it('should mark files to be remove for user', async () => { + await createHandler().handle(event) + + expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' }) + + expect(domainEventPublisher.publish).toHaveBeenCalled() + }) + + it('should not mark files to be remove for user if user has no regular subscription', async () => { + event.payload.regularSubscriptionUuid = undefined + + await createHandler().handle(event) + + expect(markFilesToBeRemoved.execute).not.toHaveBeenCalled() + + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) + + it('should not publish events if failed to mark files to be removed', async () => { + markFilesToBeRemoved.execute = jest.fn().mockReturnValue({ + success: false, + }) + + await createHandler().handle(event) + + expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' }) + + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) +}) diff --git a/packages/files/src/Domain/Handler/AccountDeletionRequestedEventHandler.ts b/packages/files/src/Domain/Handler/AccountDeletionRequestedEventHandler.ts new file mode 100644 index 000000000..15bfd2146 --- /dev/null +++ b/packages/files/src/Domain/Handler/AccountDeletionRequestedEventHandler.ts @@ -0,0 +1,42 @@ +import { + AccountDeletionRequestedEvent, + DomainEventHandlerInterface, + DomainEventPublisherInterface, +} from '@standardnotes/domain-events' +import { inject, injectable } from 'inversify' + +import TYPES from '../../Bootstrap/Types' +import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface' +import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved' + +@injectable() +export class AccountDeletionRequestedEventHandler implements DomainEventHandlerInterface { + constructor( + @inject(TYPES.MarkFilesToBeRemoved) private markFilesToBeRemoved: MarkFilesToBeRemoved, + @inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface, + @inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface, + ) {} + + async handle(event: AccountDeletionRequestedEvent): Promise { + if (event.payload.regularSubscriptionUuid === undefined) { + return + } + + const response = await this.markFilesToBeRemoved.execute({ + userUuid: event.payload.userUuid, + }) + + if (!response.success) { + return + } + + for (const fileRemoved of response.filesRemoved) { + await this.domainEventPublisher.publish( + this.domainEventFactory.createFileRemovedEvent({ + regularSubscriptionUuid: event.payload.regularSubscriptionUuid, + ...fileRemoved, + }), + ) + } + } +} diff --git a/packages/files/src/Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler.spec.ts b/packages/files/src/Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler.spec.ts new file mode 100644 index 000000000..5bbcc036f --- /dev/null +++ b/packages/files/src/Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler.spec.ts @@ -0,0 +1,73 @@ +import 'reflect-metadata' + +import { + SharedSubscriptionInvitationCanceledEvent, + SharedSubscriptionInvitationCanceledEventPayload, + DomainEventPublisherInterface, + FileRemovedEvent, +} from '@standardnotes/domain-events' +import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved' + +import { SharedSubscriptionInvitationCanceledEventHandler } from './SharedSubscriptionInvitationCanceledEventHandler' +import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface' +import { RemovedFileDescription } from '../File/RemovedFileDescription' + +describe('SharedSubscriptionInvitationCanceledEventHandler', () => { + let markFilesToBeRemoved: MarkFilesToBeRemoved + let event: SharedSubscriptionInvitationCanceledEvent + let domainEventPublisher: DomainEventPublisherInterface + let domainEventFactory: DomainEventFactoryInterface + + const createHandler = () => + new SharedSubscriptionInvitationCanceledEventHandler(markFilesToBeRemoved, domainEventPublisher, domainEventFactory) + + beforeEach(() => { + markFilesToBeRemoved = {} as jest.Mocked + markFilesToBeRemoved.execute = jest.fn().mockReturnValue({ + success: true, + filesRemoved: [{} as jest.Mocked], + }) + + event = {} as jest.Mocked + event.payload = { + inviteeIdentifier: '1-2-3', + inviteeIdentifierType: 'uuid', + } as jest.Mocked + + domainEventPublisher = {} as jest.Mocked + domainEventPublisher.publish = jest.fn() + + domainEventFactory = {} as jest.Mocked + domainEventFactory.createFileRemovedEvent = jest.fn().mockReturnValue({} as jest.Mocked) + }) + + it('should mark files to be remove for user', async () => { + await createHandler().handle(event) + + expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' }) + + expect(domainEventPublisher.publish).toHaveBeenCalled() + }) + + it('should not mark files to be remove for user if identifier is not of uuid type', async () => { + event.payload.inviteeIdentifierType = 'email' + + await createHandler().handle(event) + + expect(markFilesToBeRemoved.execute).not.toHaveBeenCalled() + + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) + + it('should not publish events if failed to mark files to be removed', async () => { + markFilesToBeRemoved.execute = jest.fn().mockReturnValue({ + success: false, + }) + + await createHandler().handle(event) + + expect(markFilesToBeRemoved.execute).toHaveBeenCalledWith({ userUuid: '1-2-3' }) + + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) +}) diff --git a/packages/files/src/Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler.ts b/packages/files/src/Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler.ts new file mode 100644 index 000000000..bc7d2ea09 --- /dev/null +++ b/packages/files/src/Domain/Handler/SharedSubscriptionInvitationCanceledEventHandler.ts @@ -0,0 +1,42 @@ +import { + SharedSubscriptionInvitationCanceledEvent, + DomainEventHandlerInterface, + DomainEventPublisherInterface, +} from '@standardnotes/domain-events' +import { inject, injectable } from 'inversify' + +import TYPES from '../../Bootstrap/Types' +import { DomainEventFactoryInterface } from '../Event/DomainEventFactoryInterface' +import { MarkFilesToBeRemoved } from '../UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved' + +@injectable() +export class SharedSubscriptionInvitationCanceledEventHandler implements DomainEventHandlerInterface { + constructor( + @inject(TYPES.MarkFilesToBeRemoved) private markFilesToBeRemoved: MarkFilesToBeRemoved, + @inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface, + @inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface, + ) {} + + async handle(event: SharedSubscriptionInvitationCanceledEvent): Promise { + if (event.payload.inviteeIdentifierType !== 'uuid') { + return + } + + const response = await this.markFilesToBeRemoved.execute({ + userUuid: event.payload.inviteeIdentifier, + }) + + if (!response.success) { + return + } + + for (const fileRemoved of response.filesRemoved) { + await this.domainEventPublisher.publish( + this.domainEventFactory.createFileRemovedEvent({ + regularSubscriptionUuid: event.payload.inviterSubscriptionUuid, + ...fileRemoved, + }), + ) + } + } +} diff --git a/packages/files/src/Domain/Services/FileDownloaderInterface.ts b/packages/files/src/Domain/Services/FileDownloaderInterface.ts new file mode 100644 index 000000000..1a1e6095b --- /dev/null +++ b/packages/files/src/Domain/Services/FileDownloaderInterface.ts @@ -0,0 +1,6 @@ +import { Readable } from 'stream' + +export interface FileDownloaderInterface { + createDownloadStream(filePath: string, startRange: number, endRange: number): Readable + getFileSize(filePath: string): Promise +} diff --git a/packages/files/src/Domain/Services/FileRemoverInterface.ts b/packages/files/src/Domain/Services/FileRemoverInterface.ts new file mode 100644 index 000000000..f5211110f --- /dev/null +++ b/packages/files/src/Domain/Services/FileRemoverInterface.ts @@ -0,0 +1,6 @@ +import { RemovedFileDescription } from '../File/RemovedFileDescription' + +export interface FileRemoverInterface { + remove(filePath: string): Promise + markFilesToBeRemoved(userUuid: string): Promise> +} diff --git a/packages/files/src/Domain/Services/FileUploaderInterface.ts b/packages/files/src/Domain/Services/FileUploaderInterface.ts new file mode 100644 index 000000000..e3626b5bc --- /dev/null +++ b/packages/files/src/Domain/Services/FileUploaderInterface.ts @@ -0,0 +1,9 @@ +import { ChunkId } from '../Upload/ChunkId' +import { UploadChunkResult } from '../Upload/UploadChunkResult' +import { UploadId } from '../Upload/UploadId' + +export interface FileUploaderInterface { + createUploadSession(filePath: string): Promise + uploadFileChunk(dto: { uploadId: string; data: Uint8Array; filePath: string; chunkId: ChunkId }): Promise + finishUploadSession(uploadId: string, filePath: string, uploadChunkResults: Array): Promise +} diff --git a/packages/files/src/Domain/Upload/ChunkId.ts b/packages/files/src/Domain/Upload/ChunkId.ts new file mode 100644 index 000000000..43203543d --- /dev/null +++ b/packages/files/src/Domain/Upload/ChunkId.ts @@ -0,0 +1 @@ +export type ChunkId = number diff --git a/packages/files/src/Domain/Upload/UploadChunkResult.ts b/packages/files/src/Domain/Upload/UploadChunkResult.ts new file mode 100644 index 000000000..bd1461ddc --- /dev/null +++ b/packages/files/src/Domain/Upload/UploadChunkResult.ts @@ -0,0 +1,7 @@ +import { ChunkId } from './ChunkId' + +export type UploadChunkResult = { + chunkId: ChunkId + tag: string + chunkSize: number +} diff --git a/packages/files/src/Domain/Upload/UploadId.ts b/packages/files/src/Domain/Upload/UploadId.ts new file mode 100644 index 000000000..579a79280 --- /dev/null +++ b/packages/files/src/Domain/Upload/UploadId.ts @@ -0,0 +1 @@ +export type UploadId = string diff --git a/packages/files/src/Domain/Upload/UploadRepositoryInterface.ts b/packages/files/src/Domain/Upload/UploadRepositoryInterface.ts new file mode 100644 index 000000000..616a10646 --- /dev/null +++ b/packages/files/src/Domain/Upload/UploadRepositoryInterface.ts @@ -0,0 +1,9 @@ +import { UploadChunkResult } from './UploadChunkResult' +import { UploadId } from './UploadId' + +export interface UploadRepositoryInterface { + storeUploadSession(filePath: string, uploadId: UploadId): Promise + retrieveUploadSessionId(filePath: string): Promise + storeUploadChunkResult(uploadId: UploadId, uploadChunkResult: UploadChunkResult): Promise + retrieveUploadChunkResults(uploadId: UploadId): Promise> +} diff --git a/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSession.spec.ts b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSession.spec.ts new file mode 100644 index 000000000..fe3d74890 --- /dev/null +++ b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSession.spec.ts @@ -0,0 +1,53 @@ +import 'reflect-metadata' +import { Logger } from 'winston' +import { FileUploaderInterface } from '../../Services/FileUploaderInterface' +import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface' + +import { CreateUploadSession } from './CreateUploadSession' + +describe('CreateUploadSession', () => { + let fileUploader: FileUploaderInterface + let uploadRepository: UploadRepositoryInterface + let logger: Logger + + const createUseCase = () => new CreateUploadSession(fileUploader, uploadRepository, logger) + + beforeEach(() => { + fileUploader = {} as jest.Mocked + fileUploader.createUploadSession = jest.fn().mockReturnValue('123') + + uploadRepository = {} as jest.Mocked + uploadRepository.storeUploadSession = jest.fn() + + logger = {} as jest.Mocked + logger.debug = jest.fn() + logger.error = jest.fn() + logger.warn = jest.fn() + }) + + it('should indicate of an error in creating the upload session', async () => { + uploadRepository.storeUploadSession = jest.fn().mockImplementation(() => { + throw new Error('oops') + }) + + expect( + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }), + ).toEqual({ + success: false, + message: 'Could not create upload session', + }) + }) + + it('should create an upload session', async () => { + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }) + + expect(fileUploader.createUploadSession).toHaveBeenCalledWith('1-2-3/2-3-4') + expect(uploadRepository.storeUploadSession).toHaveBeenCalledWith('1-2-3/2-3-4', '123') + }) +}) diff --git a/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSession.ts b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSession.ts new file mode 100644 index 000000000..d6865fe3c --- /dev/null +++ b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSession.ts @@ -0,0 +1,46 @@ +import { inject, injectable } from 'inversify' +import { Logger } from 'winston' + +import TYPES from '../../../Bootstrap/Types' +import { UseCaseInterface } from '../UseCaseInterface' +import { CreateUploadSessionDTO } from './CreateUploadSessionDTO' +import { CreateUploadSessionResponse } from './CreateUploadSessionResponse' +import { FileUploaderInterface } from '../../Services/FileUploaderInterface' +import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface' + +@injectable() +export class CreateUploadSession implements UseCaseInterface { + constructor( + @inject(TYPES.FileUploader) private fileUploader: FileUploaderInterface, + @inject(TYPES.UploadRepository) private uploadRepository: UploadRepositoryInterface, + @inject(TYPES.Logger) private logger: Logger, + ) {} + + async execute(dto: CreateUploadSessionDTO): Promise { + try { + this.logger.debug(`Creating upload session for resource: ${dto.resourceRemoteIdentifier}`) + + const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}` + + const uploadId = await this.fileUploader.createUploadSession(filePath) + + this.logger.debug(`Created upload session with id: ${uploadId}`) + + await this.uploadRepository.storeUploadSession(filePath, uploadId) + + return { + success: true, + uploadId, + } + } catch (error) { + this.logger.error( + `Could not create upload session for resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`, + ) + + return { + success: false, + message: 'Could not create upload session', + } + } + } +} diff --git a/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSessionDTO.ts b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSessionDTO.ts new file mode 100644 index 000000000..486f7691d --- /dev/null +++ b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSessionDTO.ts @@ -0,0 +1,6 @@ +import { Uuid } from '@standardnotes/common' + +export type CreateUploadSessionDTO = { + userUuid: Uuid + resourceRemoteIdentifier: string +} diff --git a/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSessionResponse.ts b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSessionResponse.ts new file mode 100644 index 000000000..af4b6ddcc --- /dev/null +++ b/packages/files/src/Domain/UseCase/CreateUploadSession/CreateUploadSessionResponse.ts @@ -0,0 +1,11 @@ +import { UploadId } from '../../Upload/UploadId' + +export type CreateUploadSessionResponse = + | { + success: true + uploadId: UploadId + } + | { + success: false + message: string + } diff --git a/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSession.spec.ts b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSession.spec.ts new file mode 100644 index 000000000..9c9900c88 --- /dev/null +++ b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSession.spec.ts @@ -0,0 +1,112 @@ +import 'reflect-metadata' + +import { DomainEventPublisherInterface, FileUploadedEvent } from '@standardnotes/domain-events' +import { Logger } from 'winston' +import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface' +import { FileUploaderInterface } from '../../Services/FileUploaderInterface' +import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface' + +import { FinishUploadSession } from './FinishUploadSession' + +describe('FinishUploadSession', () => { + let fileUploader: FileUploaderInterface + let uploadRepository: UploadRepositoryInterface + let domainEventPublisher: DomainEventPublisherInterface + let domainEventFactory: DomainEventFactoryInterface + let logger: Logger + + const createUseCase = () => + new FinishUploadSession(fileUploader, uploadRepository, domainEventPublisher, domainEventFactory, logger) + + beforeEach(() => { + fileUploader = {} as jest.Mocked + fileUploader.finishUploadSession = jest.fn().mockReturnValue('ETag123') + + uploadRepository = {} as jest.Mocked + uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue('123') + uploadRepository.retrieveUploadChunkResults = jest.fn().mockReturnValue([{ tag: '123', chunkId: 1, chunkSize: 1 }]) + + domainEventPublisher = {} as jest.Mocked + domainEventPublisher.publish = jest.fn() + + domainEventFactory = {} as jest.Mocked + domainEventFactory.createFileUploadedEvent = jest.fn().mockReturnValue({} as jest.Mocked) + + logger = {} as jest.Mocked + logger.debug = jest.fn() + logger.error = jest.fn() + logger.warn = jest.fn() + }) + + it('should not finish an upload session if non existing', async () => { + uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue(undefined) + + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + uploadBytesLimit: 100, + uploadBytesUsed: 0, + }) + + expect(fileUploader.finishUploadSession).not.toHaveBeenCalled() + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) + + it('should indicate of an error in finishing session fails', async () => { + uploadRepository.retrieveUploadSessionId = jest.fn().mockImplementation(() => { + throw new Error('oops') + }) + + expect( + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + uploadBytesLimit: 100, + uploadBytesUsed: 0, + }), + ).toEqual({ + success: false, + message: 'Could not finish upload session', + }) + + expect(fileUploader.finishUploadSession).not.toHaveBeenCalled() + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) + + it('should finish an upload session', async () => { + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + uploadBytesLimit: 100, + uploadBytesUsed: 0, + }) + + expect(fileUploader.finishUploadSession).toHaveBeenCalledWith('123', '1-2-3/2-3-4', [ + { tag: '123', chunkId: 1, chunkSize: 1 }, + ]) + expect(domainEventPublisher.publish).toHaveBeenCalled() + }) + + it('should not finish an upload session if the file size exceeds storage quota', async () => { + uploadRepository.retrieveUploadChunkResults = jest.fn().mockReturnValue([ + { tag: '123', chunkId: 1, chunkSize: 60 }, + { tag: '234', chunkId: 2, chunkSize: 10 }, + { tag: '345', chunkId: 3, chunkSize: 20 }, + ]) + + expect( + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + uploadBytesLimit: 100, + uploadBytesUsed: 20, + }), + ).toEqual({ + success: false, + message: 'Could not finish upload session. You are out of space.', + }) + + expect(fileUploader.finishUploadSession).not.toHaveBeenCalled() + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) +}) diff --git a/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSession.ts b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSession.ts new file mode 100644 index 000000000..2fbe5724d --- /dev/null +++ b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSession.ts @@ -0,0 +1,79 @@ +import { inject, injectable } from 'inversify' +import { Logger } from 'winston' + +import TYPES from '../../../Bootstrap/Types' +import { UseCaseInterface } from '../UseCaseInterface' +import { FinishUploadSessionDTO } from './FinishUploadSessionDTO' +import { FinishUploadSessionResponse } from './FinishUploadSessionResponse' +import { FileUploaderInterface } from '../../Services/FileUploaderInterface' +import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface' +import { DomainEventPublisherInterface } from '@standardnotes/domain-events' +import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface' + +@injectable() +export class FinishUploadSession implements UseCaseInterface { + constructor( + @inject(TYPES.FileUploader) private fileUploader: FileUploaderInterface, + @inject(TYPES.UploadRepository) private uploadRepository: UploadRepositoryInterface, + @inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface, + @inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface, + @inject(TYPES.Logger) private logger: Logger, + ) {} + + async execute(dto: FinishUploadSessionDTO): Promise { + try { + this.logger.debug(`Finishing upload session for resource: ${dto.resourceRemoteIdentifier}`) + + const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}` + + const uploadId = await this.uploadRepository.retrieveUploadSessionId(filePath) + if (uploadId === undefined) { + this.logger.warn(`Could not find upload session for file path: ${filePath}`) + + return { + success: false, + message: 'Could not finish upload session', + } + } + + const uploadChunkResults = await this.uploadRepository.retrieveUploadChunkResults(uploadId) + + let totalFileSize = 0 + for (const uploadChunkResult of uploadChunkResults) { + totalFileSize += uploadChunkResult.chunkSize + } + + const remainingSpaceLeft = dto.uploadBytesLimit - dto.uploadBytesUsed + if (remainingSpaceLeft < totalFileSize) { + return { + success: false, + message: 'Could not finish upload session. You are out of space.', + } + } + + await this.fileUploader.finishUploadSession(uploadId, filePath, uploadChunkResults) + + await this.domainEventPublisher.publish( + this.domainEventFactory.createFileUploadedEvent({ + userUuid: dto.userUuid, + filePath: `${dto.userUuid}/${dto.resourceRemoteIdentifier}`, + fileName: dto.resourceRemoteIdentifier, + fileByteSize: totalFileSize, + }), + ) + + return { + success: true, + } + } catch (error) { + this.logger.error( + `Could not finish upload session for resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`, + ) + + return { + success: false, + message: 'Could not finish upload session', + } + } + } +} diff --git a/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSessionDTO.ts b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSessionDTO.ts new file mode 100644 index 000000000..d57245a22 --- /dev/null +++ b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSessionDTO.ts @@ -0,0 +1,8 @@ +import { Uuid } from '@standardnotes/common' + +export type FinishUploadSessionDTO = { + userUuid: Uuid + resourceRemoteIdentifier: string + uploadBytesUsed: number + uploadBytesLimit: number +} diff --git a/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSessionResponse.ts b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSessionResponse.ts new file mode 100644 index 000000000..21aa83aa9 --- /dev/null +++ b/packages/files/src/Domain/UseCase/FinishUploadSession/FinishUploadSessionResponse.ts @@ -0,0 +1,8 @@ +export type FinishUploadSessionResponse = + | { + success: true + } + | { + success: false + message: string + } diff --git a/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadata.spec.ts b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadata.spec.ts new file mode 100644 index 000000000..0735ae12e --- /dev/null +++ b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadata.spec.ts @@ -0,0 +1,38 @@ +import 'reflect-metadata' +import { Logger } from 'winston' +import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface' + +import { GetFileMetadata } from './GetFileMetadata' + +describe('GetFileMetadata', () => { + let fileDownloader: FileDownloaderInterface + let logger: Logger + + const createUseCase = () => new GetFileMetadata(fileDownloader, logger) + + beforeEach(() => { + fileDownloader = {} as jest.Mocked + fileDownloader.getFileSize = jest.fn().mockReturnValue(123) + + logger = {} as jest.Mocked + logger.error = jest.fn() + }) + + it('should return the file metadata', async () => { + expect(await createUseCase().execute({ resourceRemoteIdentifier: '1-2-3', userUuid: '2-3-4' })).toEqual({ + success: true, + size: 123, + }) + }) + + it('should not return the file metadata if it fails', async () => { + fileDownloader.getFileSize = jest.fn().mockImplementation(() => { + throw new Error('ooops') + }) + + expect(await createUseCase().execute({ resourceRemoteIdentifier: '1-2-3', userUuid: '2-3-4' })).toEqual({ + success: false, + message: 'Could not get file metadata.', + }) + }) +}) diff --git a/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadata.ts b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadata.ts new file mode 100644 index 000000000..bca0d05db --- /dev/null +++ b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadata.ts @@ -0,0 +1,32 @@ +import { inject, injectable } from 'inversify' +import { Logger } from 'winston' +import TYPES from '../../../Bootstrap/Types' +import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface' +import { UseCaseInterface } from '../UseCaseInterface' +import { GetFileMetadataDTO } from './GetFileMetadataDTO' +import { GetFileMetadataResponse } from './GetFileMetadataResponse' + +@injectable() +export class GetFileMetadata implements UseCaseInterface { + constructor( + @inject(TYPES.FileDownloader) private fileDownloader: FileDownloaderInterface, + @inject(TYPES.Logger) private logger: Logger, + ) {} + + async execute(dto: GetFileMetadataDTO): Promise { + try { + const size = await this.fileDownloader.getFileSize(`${dto.userUuid}/${dto.resourceRemoteIdentifier}`) + + return { + success: true, + size, + } + } catch (error) { + this.logger.error(`Could not get file metadata for resource: ${dto.userUuid}/${dto.resourceRemoteIdentifier}`) + return { + success: false, + message: 'Could not get file metadata.', + } + } + } +} diff --git a/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadataDTO.ts b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadataDTO.ts new file mode 100644 index 000000000..37263cda0 --- /dev/null +++ b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadataDTO.ts @@ -0,0 +1,4 @@ +export type GetFileMetadataDTO = { + userUuid: string + resourceRemoteIdentifier: string +} diff --git a/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadataResponse.ts b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadataResponse.ts new file mode 100644 index 000000000..a1f18812a --- /dev/null +++ b/packages/files/src/Domain/UseCase/GetFileMetadata/GetFileMetadataResponse.ts @@ -0,0 +1,9 @@ +export type GetFileMetadataResponse = + | { + success: true + size: number + } + | { + success: false + message: string + } diff --git a/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved.spec.ts b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved.spec.ts new file mode 100644 index 000000000..72afb2dcf --- /dev/null +++ b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved.spec.ts @@ -0,0 +1,39 @@ +import 'reflect-metadata' +import { Logger } from 'winston' +import { FileRemoverInterface } from '../../Services/FileRemoverInterface' + +import { MarkFilesToBeRemoved } from './MarkFilesToBeRemoved' + +describe('MarkFilesToBeRemoved', () => { + let fileRemover: FileRemoverInterface + let logger: Logger + + const createUseCase = () => new MarkFilesToBeRemoved(fileRemover, logger) + + beforeEach(() => { + fileRemover = {} as jest.Mocked + fileRemover.markFilesToBeRemoved = jest.fn() + + logger = {} as jest.Mocked + logger.debug = jest.fn() + logger.error = jest.fn() + logger.warn = jest.fn() + }) + + it('should mark files for being removed', async () => { + expect(await createUseCase().execute({ userUuid: '1-2-3' })).toEqual({ success: true }) + + expect(fileRemover.markFilesToBeRemoved).toHaveBeenCalledWith('1-2-3') + }) + + it('should indicate if marking files for being removed goes wrong', async () => { + fileRemover.markFilesToBeRemoved = jest.fn().mockImplementation(() => { + throw new Error('Oops') + }) + + expect(await createUseCase().execute({ userUuid: '1-2-3' })).toEqual({ + success: false, + message: 'Could not mark resources for removal', + }) + }) +}) diff --git a/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved.ts b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved.ts new file mode 100644 index 000000000..a6c7af331 --- /dev/null +++ b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemoved.ts @@ -0,0 +1,36 @@ +import { inject, injectable } from 'inversify' +import { Logger } from 'winston' + +import TYPES from '../../../Bootstrap/Types' +import { FileRemoverInterface } from '../../Services/FileRemoverInterface' +import { UseCaseInterface } from '../UseCaseInterface' +import { MarkFilesToBeRemovedDTO } from './MarkFilesToBeRemovedDTO' +import { MarkFilesToBeRemovedResponse } from './MarkFilesToBeRemovedResponse' + +@injectable() +export class MarkFilesToBeRemoved implements UseCaseInterface { + constructor( + @inject(TYPES.FileRemover) private fileRemover: FileRemoverInterface, + @inject(TYPES.Logger) private logger: Logger, + ) {} + + async execute(dto: MarkFilesToBeRemovedDTO): Promise { + try { + this.logger.debug(`Marking files for later removal for user: ${dto.userUuid}`) + + const filesRemoved = await this.fileRemover.markFilesToBeRemoved(dto.userUuid) + + return { + success: true, + filesRemoved, + } + } catch (error) { + this.logger.error(`Could not mark resources for removal: ${dto.userUuid} - ${(error as Error).message}`) + + return { + success: false, + message: 'Could not mark resources for removal', + } + } + } +} diff --git a/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemovedDTO.ts b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemovedDTO.ts new file mode 100644 index 000000000..75067ddcc --- /dev/null +++ b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemovedDTO.ts @@ -0,0 +1,5 @@ +import { Uuid } from '@standardnotes/common' + +export type MarkFilesToBeRemovedDTO = { + userUuid: Uuid +} diff --git a/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemovedResponse.ts b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemovedResponse.ts new file mode 100644 index 000000000..ac2d7886f --- /dev/null +++ b/packages/files/src/Domain/UseCase/MarkFilesToBeRemoved/MarkFilesToBeRemovedResponse.ts @@ -0,0 +1,11 @@ +import { RemovedFileDescription } from '../../File/RemovedFileDescription' + +export type MarkFilesToBeRemovedResponse = + | { + success: true + filesRemoved: Array + } + | { + success: false + message: string + } diff --git a/packages/files/src/Domain/UseCase/RemoveFile/RemoveFile.spec.ts b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFile.spec.ts new file mode 100644 index 000000000..bdd580bb6 --- /dev/null +++ b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFile.spec.ts @@ -0,0 +1,63 @@ +import 'reflect-metadata' + +import { DomainEventPublisherInterface, FileRemovedEvent } from '@standardnotes/domain-events' +import { Logger } from 'winston' +import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface' + +import { RemoveFile } from './RemoveFile' +import { FileRemoverInterface } from '../../Services/FileRemoverInterface' + +describe('RemoveFile', () => { + let fileRemover: FileRemoverInterface + let domainEventPublisher: DomainEventPublisherInterface + let domainEventFactory: DomainEventFactoryInterface + let logger: Logger + + const createUseCase = () => new RemoveFile(fileRemover, domainEventPublisher, domainEventFactory, logger) + + beforeEach(() => { + fileRemover = {} as jest.Mocked + fileRemover.remove = jest.fn().mockReturnValue(413) + + domainEventPublisher = {} as jest.Mocked + domainEventPublisher.publish = jest.fn() + + domainEventFactory = {} as jest.Mocked + domainEventFactory.createFileRemovedEvent = jest.fn().mockReturnValue({} as jest.Mocked) + + logger = {} as jest.Mocked + logger.debug = jest.fn() + logger.error = jest.fn() + logger.warn = jest.fn() + }) + + it('should indicate of an error in removing fails', async () => { + fileRemover.remove = jest.fn().mockImplementation(() => { + throw new Error('oops') + }) + + expect( + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + regularSubscriptionUuid: '3-4-5', + }), + ).toEqual({ + success: false, + message: 'Could not remove resource', + }) + + expect(domainEventPublisher.publish).not.toHaveBeenCalled() + }) + + it('should remove a file', async () => { + await createUseCase().execute({ + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + regularSubscriptionUuid: '3-4-5', + }) + + expect(fileRemover.remove).toHaveBeenCalledWith('1-2-3/2-3-4') + expect(domainEventPublisher.publish).toHaveBeenCalled() + }) +}) diff --git a/packages/files/src/Domain/UseCase/RemoveFile/RemoveFile.ts b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFile.ts new file mode 100644 index 000000000..4e75b09f1 --- /dev/null +++ b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFile.ts @@ -0,0 +1,51 @@ +import { DomainEventPublisherInterface } from '@standardnotes/domain-events' +import { inject, injectable } from 'inversify' +import { Logger } from 'winston' + +import TYPES from '../../../Bootstrap/Types' +import { DomainEventFactoryInterface } from '../../Event/DomainEventFactoryInterface' +import { FileRemoverInterface } from '../../Services/FileRemoverInterface' +import { UseCaseInterface } from '../UseCaseInterface' +import { RemoveFileDTO } from './RemoveFileDTO' +import { RemoveFileResponse } from './RemoveFileResponse' + +@injectable() +export class RemoveFile implements UseCaseInterface { + constructor( + @inject(TYPES.FileRemover) private fileRemover: FileRemoverInterface, + @inject(TYPES.DomainEventPublisher) private domainEventPublisher: DomainEventPublisherInterface, + @inject(TYPES.DomainEventFactory) private domainEventFactory: DomainEventFactoryInterface, + @inject(TYPES.Logger) private logger: Logger, + ) {} + + async execute(dto: RemoveFileDTO): Promise { + try { + this.logger.debug(`Removing file: ${dto.resourceRemoteIdentifier}`) + + const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}` + + const removedFileSize = await this.fileRemover.remove(filePath) + + await this.domainEventPublisher.publish( + this.domainEventFactory.createFileRemovedEvent({ + userUuid: dto.userUuid, + filePath: `${dto.userUuid}/${dto.resourceRemoteIdentifier}`, + fileName: dto.resourceRemoteIdentifier, + fileByteSize: removedFileSize, + regularSubscriptionUuid: dto.regularSubscriptionUuid, + }), + ) + + return { + success: true, + } + } catch (error) { + this.logger.error(`Could not remove resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`) + + return { + success: false, + message: 'Could not remove resource', + } + } + } +} diff --git a/packages/files/src/Domain/UseCase/RemoveFile/RemoveFileDTO.ts b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFileDTO.ts new file mode 100644 index 000000000..5e3d10c49 --- /dev/null +++ b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFileDTO.ts @@ -0,0 +1,7 @@ +import { Uuid } from '@standardnotes/common' + +export type RemoveFileDTO = { + userUuid: Uuid + resourceRemoteIdentifier: string + regularSubscriptionUuid: Uuid +} diff --git a/packages/files/src/Domain/UseCase/RemoveFile/RemoveFileResponse.ts b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFileResponse.ts new file mode 100644 index 000000000..2ed8da997 --- /dev/null +++ b/packages/files/src/Domain/UseCase/RemoveFile/RemoveFileResponse.ts @@ -0,0 +1,8 @@ +export type RemoveFileResponse = + | { + success: true + } + | { + success: false + message: string + } diff --git a/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFile.spec.ts b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFile.spec.ts new file mode 100644 index 000000000..235062fe5 --- /dev/null +++ b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFile.spec.ts @@ -0,0 +1,48 @@ +import 'reflect-metadata' + +import { Readable } from 'stream' +import { Logger } from 'winston' +import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface' + +import { StreamDownloadFile } from './StreamDownloadFile' + +describe('StreamDownloadFile', () => { + let fileDownloader: FileDownloaderInterface + let logger: Logger + + const createUseCase = () => new StreamDownloadFile(fileDownloader, logger) + + beforeEach(() => { + fileDownloader = {} as jest.Mocked + fileDownloader.createDownloadStream = jest.fn().mockReturnValue(new Readable()) + + logger = {} as jest.Mocked + logger.error = jest.fn() + }) + + it('should stream download file contents from S3', async () => { + const result = await createUseCase().execute({ + userUuid: '2-3-4', + resourceRemoteIdentifier: '1-2-3', + startRange: 0, + endRange: 200, + }) + + expect(result.success).toBeTruthy() + }) + + it('should not stream download file contents from S3 if it fails', async () => { + fileDownloader.createDownloadStream = jest.fn().mockImplementation(() => { + throw new Error('oops') + }) + + const result = await createUseCase().execute({ + userUuid: '2-3-4', + resourceRemoteIdentifier: '1-2-3', + startRange: 0, + endRange: 200, + }) + + expect(result.success).toBeFalsy() + }) +}) diff --git a/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFile.ts b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFile.ts new file mode 100644 index 000000000..19e55546a --- /dev/null +++ b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFile.ts @@ -0,0 +1,39 @@ +import { inject, injectable } from 'inversify' +import { Logger } from 'winston' +import TYPES from '../../../Bootstrap/Types' +import { FileDownloaderInterface } from '../../Services/FileDownloaderInterface' +import { UseCaseInterface } from '../UseCaseInterface' +import { StreamDownloadFileDTO } from './StreamDownloadFileDTO' +import { StreamDownloadFileResponse } from './StreamDownloadFileResponse' + +@injectable() +export class StreamDownloadFile implements UseCaseInterface { + constructor( + @inject(TYPES.FileDownloader) private fileDownloader: FileDownloaderInterface, + @inject(TYPES.Logger) private logger: Logger, + ) {} + + async execute(dto: StreamDownloadFileDTO): Promise { + try { + const readStream = this.fileDownloader.createDownloadStream( + `${dto.userUuid}/${dto.resourceRemoteIdentifier}`, + dto.startRange, + dto.endRange, + ) + + return { + success: true, + readStream, + } + } catch (error) { + this.logger.error( + `Could not create a download stream for resource: ${dto.userUuid}/${dto.resourceRemoteIdentifier}`, + ) + + return { + success: false, + message: 'Could not create download stream', + } + } + } +} diff --git a/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFileDTO.ts b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFileDTO.ts new file mode 100644 index 000000000..bc17fe208 --- /dev/null +++ b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFileDTO.ts @@ -0,0 +1,6 @@ +export type StreamDownloadFileDTO = { + userUuid: string + resourceRemoteIdentifier: string + startRange: number + endRange: number +} diff --git a/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFileResponse.ts b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFileResponse.ts new file mode 100644 index 000000000..97d06c0fd --- /dev/null +++ b/packages/files/src/Domain/UseCase/StreamDownloadFile/StreamDownloadFileResponse.ts @@ -0,0 +1,11 @@ +import { Readable } from 'stream' + +export type StreamDownloadFileResponse = + | { + success: true + readStream: Readable + } + | { + success: false + message: string + } diff --git a/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunk.spec.ts b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunk.spec.ts new file mode 100644 index 000000000..fdd131a76 --- /dev/null +++ b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunk.spec.ts @@ -0,0 +1,84 @@ +import 'reflect-metadata' +import { Logger } from 'winston' +import { FileUploaderInterface } from '../../Services/FileUploaderInterface' +import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface' + +import { UploadFileChunk } from './UploadFileChunk' + +describe('UploadFileChunk', () => { + let fileUploader: FileUploaderInterface + let uploadRepository: UploadRepositoryInterface + let logger: Logger + + const createUseCase = () => new UploadFileChunk(fileUploader, uploadRepository, logger) + + beforeEach(() => { + fileUploader = {} as jest.Mocked + fileUploader.uploadFileChunk = jest.fn().mockReturnValue('ETag123') + + uploadRepository = {} as jest.Mocked + uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue('123') + uploadRepository.storeUploadChunkResult = jest.fn() + + logger = {} as jest.Mocked + logger.debug = jest.fn() + logger.error = jest.fn() + logger.warn = jest.fn() + }) + + it('should not upload a data chunk to a non existing file upload session', async () => { + uploadRepository.retrieveUploadSessionId = jest.fn().mockReturnValue(undefined) + + await createUseCase().execute({ + chunkId: 2, + data: new Uint8Array([123]), + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }) + + expect(fileUploader.uploadFileChunk).not.toHaveBeenCalled() + expect(uploadRepository.storeUploadChunkResult).not.toHaveBeenCalled() + }) + + it('should indicate of an error in uploading the chunk', async () => { + uploadRepository.retrieveUploadSessionId = jest.fn().mockImplementation(() => { + throw new Error('oops') + }) + + expect( + await createUseCase().execute({ + chunkId: 2, + data: new Uint8Array([123]), + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }), + ).toEqual({ + success: false, + message: 'Could not upload file chunk', + }) + + expect(fileUploader.uploadFileChunk).not.toHaveBeenCalled() + expect(uploadRepository.storeUploadChunkResult).not.toHaveBeenCalled() + }) + + it('should upload a data chunk to an existing file upload session', async () => { + await createUseCase().execute({ + chunkId: 2, + data: new Uint8Array([123]), + resourceRemoteIdentifier: '2-3-4', + userUuid: '1-2-3', + }) + + expect(fileUploader.uploadFileChunk).toHaveBeenCalledWith({ + chunkId: 2, + data: new Uint8Array([123]), + filePath: '1-2-3/2-3-4', + uploadId: '123', + }) + expect(uploadRepository.storeUploadChunkResult).toHaveBeenCalledWith('123', { + tag: 'ETag123', + chunkId: 2, + chunkSize: 1, + }) + }) +}) diff --git a/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunk.ts b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunk.ts new file mode 100644 index 000000000..9bd5444a4 --- /dev/null +++ b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunk.ts @@ -0,0 +1,64 @@ +import { inject, injectable } from 'inversify' +import { Logger } from 'winston' + +import TYPES from '../../../Bootstrap/Types' +import { UseCaseInterface } from '../UseCaseInterface' +import { UploadFileChunkDTO } from './UploadFileChunkDTO' +import { UploadFileChunkResponse } from './UploadFileChunkResponse' +import { FileUploaderInterface } from '../../Services/FileUploaderInterface' +import { UploadRepositoryInterface } from '../../Upload/UploadRepositoryInterface' + +@injectable() +export class UploadFileChunk implements UseCaseInterface { + constructor( + @inject(TYPES.FileUploader) private fileUploader: FileUploaderInterface, + @inject(TYPES.UploadRepository) private uploadRepository: UploadRepositoryInterface, + @inject(TYPES.Logger) private logger: Logger, + ) {} + + async execute(dto: UploadFileChunkDTO): Promise { + try { + this.logger.debug( + `Starting upload file chunk ${dto.chunkId} with ${dto.data.byteLength} bytes for resource: ${dto.resourceRemoteIdentifier}`, + ) + + const filePath = `${dto.userUuid}/${dto.resourceRemoteIdentifier}` + + const uploadId = await this.uploadRepository.retrieveUploadSessionId(filePath) + if (uploadId === undefined) { + this.logger.warn(`Could not find upload session for file path: ${filePath}`) + + return { + success: false, + message: 'Could not find upload session', + } + } + + const uploadFileChunkETag = await this.fileUploader.uploadFileChunk({ + uploadId, + data: dto.data, + chunkId: dto.chunkId, + filePath, + }) + + await this.uploadRepository.storeUploadChunkResult(uploadId, { + tag: uploadFileChunkETag, + chunkId: dto.chunkId, + chunkSize: dto.data.byteLength, + }) + + return { + success: true, + } + } catch (error) { + this.logger.error( + `Could not upload file chunk for resource: ${dto.resourceRemoteIdentifier} - ${(error as Error).message}`, + ) + + return { + success: false, + message: 'Could not upload file chunk', + } + } + } +} diff --git a/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunkDTO.ts b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunkDTO.ts new file mode 100644 index 000000000..31fa673b5 --- /dev/null +++ b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunkDTO.ts @@ -0,0 +1,8 @@ +import { ChunkId } from '../../Upload/ChunkId' + +export type UploadFileChunkDTO = { + data: Uint8Array + chunkId: ChunkId + userUuid: string + resourceRemoteIdentifier: string +} diff --git a/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunkResponse.ts b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunkResponse.ts new file mode 100644 index 000000000..4b85509cd --- /dev/null +++ b/packages/files/src/Domain/UseCase/UploadFileChunk/UploadFileChunkResponse.ts @@ -0,0 +1,8 @@ +export type UploadFileChunkResponse = + | { + success: true + } + | { + success: false + message: string + } diff --git a/packages/files/src/Domain/UseCase/UseCaseInterface.ts b/packages/files/src/Domain/UseCase/UseCaseInterface.ts new file mode 100644 index 000000000..7c8405a9a --- /dev/null +++ b/packages/files/src/Domain/UseCase/UseCaseInterface.ts @@ -0,0 +1,3 @@ +export interface UseCaseInterface { + execute(...args: any[]): Promise> +} diff --git a/packages/files/src/Infra/FS/FSFileDownloader.ts b/packages/files/src/Infra/FS/FSFileDownloader.ts new file mode 100644 index 000000000..019c1f5a0 --- /dev/null +++ b/packages/files/src/Infra/FS/FSFileDownloader.ts @@ -0,0 +1,19 @@ +import { Readable } from 'stream' +import { createReadStream, promises } from 'fs' +import { inject, injectable } from 'inversify' + +import { FileDownloaderInterface } from '../../Domain/Services/FileDownloaderInterface' +import TYPES from '../../Bootstrap/Types' + +@injectable() +export class FSFileDownloader implements FileDownloaderInterface { + constructor(@inject(TYPES.FILE_UPLOAD_PATH) private fileUploadPath: string) {} + + async getFileSize(filePath: string): Promise { + return (await promises.stat(`${this.fileUploadPath}/${filePath}`)).size + } + + createDownloadStream(filePath: string, startRange: number, endRange: number): Readable { + return createReadStream(`${this.fileUploadPath}/${filePath}`, { start: startRange, end: endRange }) + } +} diff --git a/packages/files/src/Infra/FS/FSFileRemover.ts b/packages/files/src/Infra/FS/FSFileRemover.ts new file mode 100644 index 000000000..8ae509413 --- /dev/null +++ b/packages/files/src/Infra/FS/FSFileRemover.ts @@ -0,0 +1,26 @@ +import { inject, injectable } from 'inversify' +import { promises } from 'fs' + +import { FileRemoverInterface } from '../../Domain/Services/FileRemoverInterface' +import { RemovedFileDescription } from '../../Domain/File/RemovedFileDescription' +import TYPES from '../../Bootstrap/Types' + +@injectable() +export class FSFileRemover implements FileRemoverInterface { + constructor(@inject(TYPES.FILE_UPLOAD_PATH) private fileUploadPath: string) {} + + async markFilesToBeRemoved(userUuid: string): Promise> { + await promises.rmdir(`${this.fileUploadPath}/${userUuid}`) + + return [] + } + + async remove(filePath: string): Promise { + const fullPath = `${this.fileUploadPath}/${filePath}` + const fileSize = (await promises.stat(fullPath)).size + + await promises.rm(fullPath) + + return fileSize + } +} diff --git a/packages/files/src/Infra/FS/FSFileUploader.ts b/packages/files/src/Infra/FS/FSFileUploader.ts new file mode 100644 index 000000000..f1a976213 --- /dev/null +++ b/packages/files/src/Infra/FS/FSFileUploader.ts @@ -0,0 +1,67 @@ +import { promises } from 'fs' +import { dirname } from 'path' +import { inject, injectable } from 'inversify' + +import { FileUploaderInterface } from '../../Domain/Services/FileUploaderInterface' +import { UploadChunkResult } from '../../Domain/Upload/UploadChunkResult' +import { Logger } from 'winston' +import TYPES from '../../Bootstrap/Types' + +@injectable() +export class FSFileUploader implements FileUploaderInterface { + private inMemoryChunks: Map> + + constructor( + @inject(TYPES.FILE_UPLOAD_PATH) private fileUploadPath: string, + @inject(TYPES.Logger) private logger: Logger, + ) { + this.inMemoryChunks = new Map>() + } + + async uploadFileChunk(dto: { + uploadId: string + data: Uint8Array + filePath: string + chunkId: number + }): Promise { + if (!this.inMemoryChunks.has(dto.uploadId)) { + this.inMemoryChunks.set(dto.uploadId, new Map()) + } + + const fileChunks = this.inMemoryChunks.get(dto.uploadId) as Map + + this.logger.debug(`FS storing file chunk ${dto.chunkId} in memory for ${dto.uploadId}`) + + fileChunks.set(dto.chunkId, dto.data) + + return dto.uploadId + } + + async finishUploadSession( + uploadId: string, + filePath: string, + _uploadChunkResults: UploadChunkResult[], + ): Promise { + this.logger.debug(`FS finishing upload for ${uploadId}`) + + const fileChunks = this.inMemoryChunks.get(uploadId) + if (!fileChunks) { + throw new Error(`Could not find chunks for upload ${uploadId}`) + } + + const orderedKeys = [...fileChunks.keys()].sort((a, b) => a - b) + for (const orderedKey of orderedKeys) { + await promises.appendFile(`${this.fileUploadPath}/${filePath}`, fileChunks.get(orderedKey) as Uint8Array) + } + + this.inMemoryChunks.delete(uploadId) + } + + async createUploadSession(filePath: string): Promise { + const fullPath = `${this.fileUploadPath}/${filePath}` + + await promises.mkdir(dirname(fullPath), { recursive: true }) + + return fullPath + } +} diff --git a/packages/files/src/Infra/Redis/RedisUploadRepository.spec.ts b/packages/files/src/Infra/Redis/RedisUploadRepository.spec.ts new file mode 100644 index 000000000..a17df5f8d --- /dev/null +++ b/packages/files/src/Infra/Redis/RedisUploadRepository.spec.ts @@ -0,0 +1,60 @@ +import 'reflect-metadata' + +import * as IORedis from 'ioredis' + +import { RedisUploadRepository } from './RedisUploadRepository' + +describe('RedisUploadRepository', () => { + let redisClient: IORedis.Redis + + const createRepository = () => new RedisUploadRepository(redisClient) + + beforeEach(() => { + redisClient = {} as jest.Mocked + redisClient.setex = jest.fn() + redisClient.get = jest.fn().mockReturnValue('123') + redisClient.lpush = jest.fn() + redisClient.expire = jest.fn() + redisClient.lrange = jest.fn().mockReturnValue(['{"tag":"123","chunkId":3}', '{"tag":"123","chunkId":1}']) + }) + + it('should store an upload session', async () => { + await createRepository().storeUploadSession('1-2-3/2-3-4', '123') + + expect(redisClient.setex).toHaveBeenCalledWith('upload-session:1-2-3/2-3-4', 7200, '123') + }) + + it('should retrieve an upload session id', async () => { + await createRepository().retrieveUploadSessionId('1-2-3/2-3-4') + + expect(redisClient.get).toHaveBeenCalledWith('upload-session:1-2-3/2-3-4') + }) + + it('should return undefied on an non existing upload session', async () => { + redisClient.get = jest.fn().mockReturnValue(null) + + expect(await createRepository().retrieveUploadSessionId('1-2-3/2-3-4')).toBeUndefined() + }) + + it('should store and upload chunk result', async () => { + await createRepository().storeUploadChunkResult('123', { tag: '123', chunkId: 3, chunkSize: 100 }) + + expect(redisClient.lpush).toHaveBeenCalledWith('upload-chunks:123', '{"tag":"123","chunkId":3,"chunkSize":100}') + expect(redisClient.expire).toHaveBeenCalledWith('upload-chunks:123', 7200) + }) + + it('should retrieve upload chunk results', async () => { + expect(await createRepository().retrieveUploadChunkResults('123')).toEqual([ + { + tag: '123', + chunkId: 1, + }, + { + tag: '123', + chunkId: 3, + }, + ]) + + expect(redisClient.lrange).toHaveBeenCalledWith('upload-chunks:123', 0, -1) + }) +}) diff --git a/packages/files/src/Infra/Redis/RedisUploadRepository.ts b/packages/files/src/Infra/Redis/RedisUploadRepository.ts new file mode 100644 index 000000000..931650d8d --- /dev/null +++ b/packages/files/src/Infra/Redis/RedisUploadRepository.ts @@ -0,0 +1,50 @@ +import * as IORedis from 'ioredis' +import { inject, injectable } from 'inversify' +import TYPES from '../../Bootstrap/Types' +import { UploadRepositoryInterface } from '../../Domain/Upload/UploadRepositoryInterface' +import { UploadChunkResult } from '../../Domain/Upload/UploadChunkResult' + +@injectable() +export class RedisUploadRepository implements UploadRepositoryInterface { + private readonly UPLOAD_SESSION_PREFIX = 'upload-session' + private readonly UPLOAD_CHUNKS_PREFIX = 'upload-chunks' + private readonly UPLOAD_SESSION_DEFAULT_TTL = 7200 + + constructor(@inject(TYPES.Redis) private redisClient: IORedis.Redis) {} + + async storeUploadSession(filePath: string, uploadId: string): Promise { + await this.redisClient.setex(`${this.UPLOAD_SESSION_PREFIX}:${filePath}`, this.UPLOAD_SESSION_DEFAULT_TTL, uploadId) + } + + async retrieveUploadSessionId(filePath: string): Promise { + const uploadId = await this.redisClient.get(`${this.UPLOAD_SESSION_PREFIX}:${filePath}`) + if (!uploadId) { + return undefined + } + + return uploadId + } + + async storeUploadChunkResult(uploadId: string, uploadChunkResult: UploadChunkResult): Promise { + await this.redisClient.lpush(`${this.UPLOAD_CHUNKS_PREFIX}:${uploadId}`, JSON.stringify(uploadChunkResult)) + await this.redisClient.expire(`${this.UPLOAD_CHUNKS_PREFIX}:${uploadId}`, this.UPLOAD_SESSION_DEFAULT_TTL) + } + + async retrieveUploadChunkResults(uploadId: string): Promise { + const stringifiedUploadChunkResults = await this.redisClient.lrange( + `${this.UPLOAD_CHUNKS_PREFIX}:${uploadId}`, + 0, + -1, + ) + const uploadChunksResults: UploadChunkResult[] = [] + for (const stringifiedUploadChunkResult of stringifiedUploadChunkResults) { + uploadChunksResults.push(JSON.parse(stringifiedUploadChunkResult)) + } + + const sortedResults = uploadChunksResults.sort((a, b) => { + return a.chunkId - b.chunkId + }) + + return sortedResults + } +} diff --git a/packages/files/src/Infra/S3/S3FileDownloader.spec.ts b/packages/files/src/Infra/S3/S3FileDownloader.spec.ts new file mode 100644 index 000000000..a04e68b41 --- /dev/null +++ b/packages/files/src/Infra/S3/S3FileDownloader.spec.ts @@ -0,0 +1,33 @@ +import 'reflect-metadata' + +import * as AWS from 'aws-sdk' +import { Readable } from 'stream' + +import { S3FileDownloader } from './S3FileDownloader' + +describe('S3FileDownloader', () => { + let s3Client: AWS.S3 + const s3BuckeName = 'test' + + const createService = () => new S3FileDownloader(s3Client, s3BuckeName) + + beforeEach(() => { + const awsRequest = {} as jest.Mocked> + awsRequest.createReadStream = jest.fn().mockReturnValue(new Readable()) + + s3Client = {} as jest.Mocked + s3Client.getObject = jest.fn().mockReturnValue(awsRequest) + + const headRequest = {} as jest.Mocked> + headRequest.promise = jest.fn().mockReturnValue(Promise.resolve({ ContentLength: 200 })) + s3Client.headObject = jest.fn().mockReturnValue(headRequest) + }) + + it('should create a download stream', () => { + expect(createService().createDownloadStream('test.txt', 0, 200)).toBeInstanceOf(Readable) + }) + + it('should get file size', async () => { + expect(await createService().getFileSize('test.txt')).toEqual(200) + }) +}) diff --git a/packages/files/src/Infra/S3/S3FileDownloader.ts b/packages/files/src/Infra/S3/S3FileDownloader.ts new file mode 100644 index 000000000..6557116f1 --- /dev/null +++ b/packages/files/src/Infra/S3/S3FileDownloader.ts @@ -0,0 +1,32 @@ +import { inject, injectable } from 'inversify' +import * as AWS from 'aws-sdk' +import { Readable } from 'stream' + +import TYPES from '../../Bootstrap/Types' +import { FileDownloaderInterface } from '../../Domain/Services/FileDownloaderInterface' + +@injectable() +export class S3FileDownloader implements FileDownloaderInterface { + constructor(@inject(TYPES.S3) private s3Client: AWS.S3, @inject(TYPES.S3_BUCKET_NAME) private s3BuckeName: string) {} + + createDownloadStream(filePath: string, startRange: number, endRange: number): Readable { + return this.s3Client + .getObject({ + Bucket: this.s3BuckeName, + Key: filePath, + Range: `bytes=${startRange}-${endRange}`, + }) + .createReadStream() + } + + async getFileSize(filePath: string): Promise { + const head = await this.s3Client + .headObject({ + Bucket: this.s3BuckeName, + Key: filePath, + }) + .promise() + + return head.ContentLength as number + } +} diff --git a/packages/files/src/Infra/S3/S3FileRemover.spec.ts b/packages/files/src/Infra/S3/S3FileRemover.spec.ts new file mode 100644 index 000000000..a3f1d0536 --- /dev/null +++ b/packages/files/src/Infra/S3/S3FileRemover.spec.ts @@ -0,0 +1,113 @@ +import 'reflect-metadata' + +import * as AWS from 'aws-sdk' + +import { S3FileRemover } from './S3FileRemover' + +describe('S3FileRemover', () => { + let s3Client: AWS.S3 + const s3BuckeName = 'test' + + const createService = () => new S3FileRemover(s3Client, s3BuckeName) + + beforeEach(() => { + const deleteObjectRequest = {} as jest.Mocked> + deleteObjectRequest.promise = jest.fn() + + s3Client = {} as jest.Mocked + s3Client.deleteObject = jest.fn().mockReturnValue(deleteObjectRequest) + + const headRequest = {} as jest.Mocked> + headRequest.promise = jest.fn().mockReturnValue(Promise.resolve({ ContentLength: 200 })) + s3Client.headObject = jest.fn().mockReturnValue(headRequest) + }) + + it('should delete a file', async () => { + expect(await createService().remove('123/234')).toEqual(200) + + expect(s3Client.deleteObject).toHaveBeenCalledWith({ + Bucket: 'test', + Key: '123/234', + }) + }) + + it('should mark user files for removal', async () => { + const copyObjectRequest = {} as jest.Mocked> + copyObjectRequest.promise = jest.fn() + + s3Client.copyObject = jest.fn().mockReturnValue(copyObjectRequest) + + const listObjectsRequest = {} as jest.Mocked> + listObjectsRequest.promise = jest.fn().mockReturnValue({ + Contents: [ + { + Key: '123/2-3-4', + Size: 123, + }, + { + Key: '123/3-4-5', + Size: 234, + }, + {}, + ], + } as jest.Mocked) + + s3Client.listObjectsV2 = jest.fn().mockReturnValue(listObjectsRequest) + + expect(await createService().markFilesToBeRemoved('123')).toEqual([ + { + fileByteSize: 123, + fileName: '2-3-4', + filePath: '123/2-3-4', + userUuid: '123', + }, + { + fileByteSize: 234, + fileName: '3-4-5', + filePath: '123/3-4-5', + userUuid: '123', + }, + ]) + + expect(s3Client.copyObject).toHaveBeenCalledTimes(2) + expect(s3Client.copyObject).toHaveBeenNthCalledWith(1, { + Bucket: 'test', + CopySource: 'test/123/2-3-4', + Key: 'expiration-chamber/123/2-3-4', + StorageClass: 'DEEP_ARCHIVE', + }) + expect(s3Client.copyObject).toHaveBeenNthCalledWith(2, { + Bucket: 'test', + CopySource: 'test/123/3-4-5', + Key: 'expiration-chamber/123/3-4-5', + StorageClass: 'DEEP_ARCHIVE', + }) + + expect(s3Client.deleteObject).toHaveBeenCalledTimes(2) + expect(s3Client.deleteObject).toHaveBeenNthCalledWith(1, { + Bucket: 'test', + Key: '123/2-3-4', + }) + expect(s3Client.deleteObject).toHaveBeenNthCalledWith(2, { + Bucket: 'test', + Key: '123/3-4-5', + }) + }) + + it('should not mark user files for removal if there none', async () => { + const copyObjectRequest = {} as jest.Mocked> + copyObjectRequest.promise = jest.fn() + + s3Client.copyObject = jest.fn().mockReturnValue(copyObjectRequest) + + const listObjectsRequest = {} as jest.Mocked> + listObjectsRequest.promise = jest.fn().mockReturnValue({} as jest.Mocked) + + s3Client.listObjectsV2 = jest.fn().mockReturnValue(listObjectsRequest) + + expect(await createService().markFilesToBeRemoved('123')).toEqual([]) + + expect(s3Client.copyObject).not.toHaveBeenCalled() + expect(s3Client.deleteObject).not.toHaveBeenCalled() + }) +}) diff --git a/packages/files/src/Infra/S3/S3FileRemover.ts b/packages/files/src/Infra/S3/S3FileRemover.ts new file mode 100644 index 000000000..72931c888 --- /dev/null +++ b/packages/files/src/Infra/S3/S3FileRemover.ts @@ -0,0 +1,79 @@ +import { inject, injectable } from 'inversify' +import * as AWS from 'aws-sdk' + +import TYPES from '../../Bootstrap/Types' +import { FileRemoverInterface } from '../../Domain/Services/FileRemoverInterface' +import { RemovedFileDescription } from '../../Domain/File/RemovedFileDescription' + +@injectable() +export class S3FileRemover implements FileRemoverInterface { + constructor(@inject(TYPES.S3) private s3Client: AWS.S3, @inject(TYPES.S3_BUCKET_NAME) private s3BuckeName: string) {} + + async markFilesToBeRemoved(userUuid: string): Promise> { + const filesResponse = await this.s3Client + .listObjectsV2({ + Bucket: this.s3BuckeName, + Prefix: `${userUuid}/`, + }) + .promise() + + if (filesResponse.Contents === undefined) { + return [] + } + + const files = filesResponse.Contents + + const removedFileDescriptions: Array = [] + + for (const file of files) { + if (file.Key === undefined) { + continue + } + + await this.s3Client + .copyObject({ + Bucket: this.s3BuckeName, + Key: `expiration-chamber/${file.Key}`, + CopySource: `${this.s3BuckeName}/${file.Key}`, + StorageClass: 'DEEP_ARCHIVE', + }) + .promise() + + await this.s3Client + .deleteObject({ + Bucket: this.s3BuckeName, + Key: file.Key, + }) + .promise() + + removedFileDescriptions.push({ + fileByteSize: file.Size as number, + fileName: file.Key.replace(`${userUuid}/`, ''), + filePath: file.Key, + userUuid, + }) + } + + return removedFileDescriptions + } + + async remove(filePath: string): Promise { + const head = await this.s3Client + .headObject({ + Bucket: this.s3BuckeName, + Key: filePath, + }) + .promise() + + const fileSize = head.ContentLength as number + + await this.s3Client + .deleteObject({ + Bucket: this.s3BuckeName, + Key: filePath, + }) + .promise() + + return fileSize + } +} diff --git a/packages/files/src/Infra/S3/S3FileUploader.spec.ts b/packages/files/src/Infra/S3/S3FileUploader.spec.ts new file mode 100644 index 000000000..a2fe26043 --- /dev/null +++ b/packages/files/src/Infra/S3/S3FileUploader.spec.ts @@ -0,0 +1,70 @@ +import 'reflect-metadata' +import * as AWS from 'aws-sdk' + +import { S3FileUploader } from './S3FileUploader' + +describe('S3FileUploader', () => { + let s3Client: AWS.S3 + const s3BuckeName = 'test-bucket' + + const createUploader = () => new S3FileUploader(s3Client, s3BuckeName) + + beforeEach(() => { + s3Client = {} as jest.Mocked + s3Client.createMultipartUpload = jest.fn().mockReturnValue({ + promise: jest.fn().mockReturnValue({ UploadId: '1-2-3' }), + }) + s3Client.uploadPart = jest.fn().mockReturnValue({ + promise: jest.fn().mockReturnValue({ ETag: '1-2-3' }), + }) + s3Client.completeMultipartUpload = jest.fn().mockReturnValue({ + promise: jest.fn().mockReturnValue(true), + }) + }) + + it('should create an upload session', async () => { + await createUploader().createUploadSession('1-2-3/2-3-4') + + expect(s3Client.createMultipartUpload).toHaveBeenCalledWith({ + ACL: 'private', + Bucket: 'test-bucket', + Key: '1-2-3/2-3-4', + StorageClass: 'INTELLIGENT_TIERING', + }) + }) + + it('should finish an upload session', async () => { + await createUploader().finishUploadSession('123', '1-2-3/2-3-4', [{ chunkId: 1, tag: '123123', chunkSize: 100 }]) + + expect(s3Client.completeMultipartUpload).toHaveBeenCalledWith({ + Bucket: 'test-bucket', + Key: '1-2-3/2-3-4', + MultipartUpload: { + Parts: [ + { + ETag: '123123', + PartNumber: 1, + }, + ], + }, + UploadId: '123', + }) + }) + + it('should upload a data chunk to an upload session', async () => { + await createUploader().uploadFileChunk({ + chunkId: 1, + data: new Uint8Array([123]), + filePath: '1-2-3/2-3-4', + uploadId: '123', + }) + + expect(s3Client.uploadPart).toHaveBeenCalledWith({ + Body: new Uint8Array([123]), + Bucket: 'test-bucket', + Key: '1-2-3/2-3-4', + PartNumber: 1, + UploadId: '123', + }) + }) +}) diff --git a/packages/files/src/Infra/S3/S3FileUploader.ts b/packages/files/src/Infra/S3/S3FileUploader.ts new file mode 100644 index 000000000..bc7da9161 --- /dev/null +++ b/packages/files/src/Infra/S3/S3FileUploader.ts @@ -0,0 +1,66 @@ +import { inject, injectable } from 'inversify' +import * as AWS from 'aws-sdk' +import TYPES from '../../Bootstrap/Types' +import { FileUploaderInterface } from '../../Domain/Services/FileUploaderInterface' +import { UploadId } from '../../Domain/Upload/UploadId' +import { UploadChunkResult } from '../../Domain/Upload/UploadChunkResult' +import { ChunkId } from '../../Domain/Upload/ChunkId' + +@injectable() +export class S3FileUploader implements FileUploaderInterface { + constructor(@inject(TYPES.S3) private s3Client: AWS.S3, @inject(TYPES.S3_BUCKET_NAME) private s3BuckeName: string) {} + + async createUploadSession(filePath: string): Promise { + const uploadSessionCreationResult = await this.s3Client + .createMultipartUpload({ + Bucket: this.s3BuckeName, + Key: filePath, + ACL: 'private', + StorageClass: 'INTELLIGENT_TIERING', + }) + .promise() + + return uploadSessionCreationResult.UploadId as string + } + + async uploadFileChunk(dto: { + uploadId: string + data: Uint8Array + filePath: string + chunkId: ChunkId + }): Promise { + const uploadResult = await this.s3Client + .uploadPart({ + Body: dto.data, + Bucket: this.s3BuckeName, + Key: dto.filePath, + PartNumber: dto.chunkId, + UploadId: dto.uploadId, + }) + .promise() + + return uploadResult.ETag as string + } + + async finishUploadSession( + uploadId: string, + filePath: string, + uploadChunkResults: UploadChunkResult[], + ): Promise { + const multipartUploadParts = uploadChunkResults.map((uploadChunkResult) => ({ + ETag: uploadChunkResult.tag, + PartNumber: uploadChunkResult.chunkId, + })) + + await this.s3Client + .completeMultipartUpload({ + Bucket: this.s3BuckeName, + Key: filePath, + MultipartUpload: { + Parts: multipartUploadParts, + }, + UploadId: uploadId, + }) + .promise() + } +} diff --git a/packages/files/test-setup.ts b/packages/files/test-setup.ts new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/packages/files/test-setup.ts @@ -0,0 +1 @@ + diff --git a/packages/files/tsconfig.json b/packages/files/tsconfig.json new file mode 100644 index 000000000..d87b89eeb --- /dev/null +++ b/packages/files/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "composite": true, + "outDir": "./dist", + }, + "include": [ + "src/**/*", + "bin/**/*", + "migrations/**/*", + ], + "references": [] +} diff --git a/packages/files/uploads/.gitkeep b/packages/files/uploads/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/packages/files/wait-for.sh b/packages/files/wait-for.sh new file mode 100755 index 000000000..72c73894e --- /dev/null +++ b/packages/files/wait-for.sh @@ -0,0 +1,17 @@ +#!/bin/sh + +set -e + +host="$1" +shift +port="$1" +shift +cmd="$@" + +while ! nc -vz $host $port; do + >&2 echo "waiting for $host:$port..." + sleep 1 +done + +>&2 echo "$host:$port is up - executing command" +exec $cmd diff --git a/tsconfig.json b/tsconfig.json index abae2e428..a9c25d446 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -30,6 +30,9 @@ }, { "path": "./packages/syncing-server" + }, + { + "path": "./packages/files" } ] } diff --git a/yarn.lock b/yarn.lock index 002b3ca88..cd14f1a2c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1218,6 +1218,15 @@ __metadata: languageName: node linkType: hard +"@newrelic/aws-sdk@npm:^3.1.0": + version: 3.1.0 + resolution: "@newrelic/aws-sdk@npm:3.1.0" + peerDependencies: + newrelic: ">=6.11.0" + checksum: 5601d90c78f82d3216d9cacb664c7a74c1b06acfff44471e758a878a36345ac46449896f9ea4a0b44673b7a72308ee7717d999c184e6e87f3478d5b2d3a14d0c + languageName: node + linkType: hard + "@newrelic/aws-sdk@npm:^4.0.1, @newrelic/aws-sdk@npm:^4.1.1": version: 4.1.2 resolution: "@newrelic/aws-sdk@npm:4.1.2" @@ -1229,6 +1238,17 @@ __metadata: languageName: node linkType: hard +"@newrelic/koa@npm:^5.0.0": + version: 5.0.0 + resolution: "@newrelic/koa@npm:5.0.0" + dependencies: + methods: ^1.1.2 + peerDependencies: + newrelic: ">=6.11.0" + checksum: e98d921b96d043817b623bf83752bdae4e8ca9e594e47ad23109cb516d9a8715ca4b79e4949c5fc5e76ca806d99b2e46e2b7afa2861e0af408f6d647b18f292f + languageName: node + linkType: hard + "@newrelic/koa@npm:^6.0.1, @newrelic/koa@npm:^6.1.1": version: 6.1.2 resolution: "@newrelic/koa@npm:6.1.2" @@ -1249,6 +1269,17 @@ __metadata: languageName: node linkType: hard +"@newrelic/native-metrics@npm:^6.0.0": + version: 6.0.2 + resolution: "@newrelic/native-metrics@npm:6.0.2" + dependencies: + nan: ^2.14.2 + node-gyp: latest + semver: ^5.5.1 + checksum: 78f92bbe7feb662699b1a148a869b3d963653a16d3c6ffc63159c4b8bea7905e26140e55631444e167dbd4291e6b7c2261bcd3ce4b8917852fe1d92ffb3936f9 + languageName: node + linkType: hard + "@newrelic/native-metrics@npm:^7.0.1": version: 7.1.2 resolution: "@newrelic/native-metrics@npm:7.1.2" @@ -1273,6 +1304,17 @@ __metadata: languageName: node linkType: hard +"@newrelic/superagent@npm:^4.0.0": + version: 4.0.0 + resolution: "@newrelic/superagent@npm:4.0.0" + dependencies: + methods: ^1.1.2 + peerDependencies: + newrelic: ">=6.11.0" + checksum: 5fb257ac0530f91bae58265678500c96c28f164bd6fcec5c0dff51958c8e72bc721dd04d1d2c48bace9e077b57b233c47efb8af61f2a005b71d4636e29ae9728 + languageName: node + linkType: hard + "@newrelic/superagent@npm:^5.0.1, @newrelic/superagent@npm:^5.1.0": version: 5.1.1 resolution: "@newrelic/superagent@npm:5.1.1" @@ -1874,7 +1916,7 @@ __metadata: languageName: unknown linkType: soft -"@standardnotes/auth@npm:^3.19.2, @standardnotes/auth@npm:^3.19.3": +"@standardnotes/auth@npm:^3.18.9, @standardnotes/auth@npm:^3.19.2, @standardnotes/auth@npm:^3.19.3": version: 3.19.3 resolution: "@standardnotes/auth@npm:3.19.3" dependencies: @@ -1884,14 +1926,24 @@ __metadata: languageName: node linkType: hard -"@standardnotes/common@npm:^1.19.1, @standardnotes/common@npm:^1.22.0, @standardnotes/common@npm:^1.23.0": +"@standardnotes/common@npm:^1.19.1, @standardnotes/common@npm:^1.19.4, @standardnotes/common@npm:^1.22.0, @standardnotes/common@npm:^1.23.0": version: 1.23.0 resolution: "@standardnotes/common@npm:1.23.0" checksum: 9c51bf76e7b9b28862e355aa66d0af6658e9baf9701c986d6f3b17d382b90e8da03350a4ab72b10cd16b16f1e739b8200991b3561719456972f3ce273dac3986 languageName: node linkType: hard -"@standardnotes/domain-events-infra@npm:^1.4.135, @standardnotes/domain-events-infra@npm:^1.5.0, @standardnotes/domain-events-infra@npm:^1.5.2": +"@standardnotes/config@npm:2.0.1": + version: 2.0.1 + resolution: "@standardnotes/config@npm:2.0.1" + dependencies: + "@typescript-eslint/eslint-plugin": ^4.14.0 + "@typescript-eslint/parser": ^4.14.0 + checksum: 5284e034f267019257163af30c8603d13af3213eeb0575e789b49ba08390a01a72d559387120320a77e17a18dd3ab0454d0f5cebefb05a43b14ebfc00c7ec342 + languageName: node + linkType: hard + +"@standardnotes/domain-events-infra@npm:^1.4.135, @standardnotes/domain-events-infra@npm:^1.4.93, @standardnotes/domain-events-infra@npm:^1.5.0, @standardnotes/domain-events-infra@npm:^1.5.2": version: 1.5.2 resolution: "@standardnotes/domain-events-infra@npm:1.5.2" dependencies: @@ -1906,7 +1958,7 @@ __metadata: languageName: node linkType: hard -"@standardnotes/domain-events@npm:^2.31.1, @standardnotes/domain-events@npm:^2.32.0, @standardnotes/domain-events@npm:^2.32.2": +"@standardnotes/domain-events@npm:^2.27.6, @standardnotes/domain-events@npm:^2.31.1, @standardnotes/domain-events@npm:^2.32.0, @standardnotes/domain-events@npm:^2.32.2": version: 2.32.2 resolution: "@standardnotes/domain-events@npm:2.32.2" dependencies: @@ -1938,6 +1990,56 @@ __metadata: languageName: node linkType: hard +"@standardnotes/files-server@workspace:packages/files": + version: 0.0.0-use.local + resolution: "@standardnotes/files-server@workspace:packages/files" + dependencies: + "@newrelic/native-metrics": 7.0.2 + "@sentry/node": ^6.16.1 + "@standardnotes/auth": ^3.18.9 + "@standardnotes/common": ^1.19.4 + "@standardnotes/config": 2.0.1 + "@standardnotes/domain-events": ^2.27.6 + "@standardnotes/domain-events-infra": ^1.4.93 + "@standardnotes/sncrypto-common": ^1.3.0 + "@standardnotes/sncrypto-node": ^1.3.0 + "@standardnotes/time": ^1.4.5 + "@types/connect-busboy": ^1.0.0 + "@types/cors": ^2.8.9 + "@types/express": ^4.17.11 + "@types/ioredis": ^4.28.10 + "@types/jest": ^28.1.3 + "@types/jsonwebtoken": ^8.5.0 + "@types/newrelic": ^7.0.1 + "@types/prettyjson": ^0.0.29 + "@types/uuid": ^8.3.0 + "@typescript-eslint/eslint-plugin": ^5.29.0 + aws-sdk: ^2.1158.0 + connect-busboy: ^1.0.0 + cors: ^2.8.5 + dayjs: ^1.11.3 + dotenv: ^8.2.0 + eslint: ^8.14.0 + eslint-plugin-prettier: ^4.0.0 + express: ^4.17.1 + express-winston: ^4.0.5 + helmet: ^4.3.1 + inversify: ^6.0.1 + inversify-express-utils: ^6.4.3 + ioredis: ^5.0.6 + jest: ^28.1.1 + jsonwebtoken: ^8.5.1 + newrelic: ^7.3.1 + nodemon: ^2.0.16 + prettyjson: ^1.2.1 + reflect-metadata: ^0.1.13 + ts-jest: ^28.0.1 + ts-node: ^10.4.0 + uuid: ^8.3.2 + winston: ^3.3.3 + languageName: unknown + linkType: soft + "@standardnotes/models@npm:^1.11.10": version: 1.11.10 resolution: "@standardnotes/models@npm:1.11.10" @@ -2051,14 +2153,14 @@ __metadata: languageName: node linkType: hard -"@standardnotes/sncrypto-common@npm:^1.8.1, @standardnotes/sncrypto-common@npm:^1.9.0": +"@standardnotes/sncrypto-common@npm:^1.3.0, @standardnotes/sncrypto-common@npm:^1.8.1, @standardnotes/sncrypto-common@npm:^1.9.0": version: 1.9.0 resolution: "@standardnotes/sncrypto-common@npm:1.9.0" checksum: 42252d71984b52756dff44ec3721961858e9f4227ca6555c0d60551852cb5f0a938b2b4969177c23c85d34e7f182369393f8b795afc65cff65b1c30b139f8f68 languageName: node linkType: hard -"@standardnotes/sncrypto-node@npm:^1.8.1": +"@standardnotes/sncrypto-node@npm:^1.3.0, @standardnotes/sncrypto-node@npm:^1.8.1": version: 1.8.3 resolution: "@standardnotes/sncrypto-node@npm:1.8.3" dependencies: @@ -2121,7 +2223,7 @@ __metadata: languageName: unknown linkType: soft -"@standardnotes/time@npm:^1.6.8, @standardnotes/time@npm:^1.7.0": +"@standardnotes/time@npm:^1.4.5, @standardnotes/time@npm:^1.6.8, @standardnotes/time@npm:^1.7.0": version: 1.7.0 resolution: "@standardnotes/time@npm:1.7.0" dependencies: @@ -2252,6 +2354,26 @@ __metadata: languageName: node linkType: hard +"@types/busboy@npm:*": + version: 1.5.0 + resolution: "@types/busboy@npm:1.5.0" + dependencies: + "@types/node": "*" + checksum: ffa7bf25c0395f6927526b7d97e70cd2df789e4ca0d231e41855fb08542fa236891ce457d83cc50cac6e5cef6be092ab80597070dcf1413f736462690a23e987 + languageName: node + linkType: hard + +"@types/connect-busboy@npm:^1.0.0": + version: 1.0.0 + resolution: "@types/connect-busboy@npm:1.0.0" + dependencies: + "@types/busboy": "*" + "@types/express": "*" + "@types/node": "*" + checksum: ccbf7bc42d2fd65aefabcba51247ccd6580633601092619dfacb026b7d3ffe4ab291087fa181fae614ce3201b0d130c51aadf5253c71ced7c79964b3c67d0bf8 + languageName: node + linkType: hard + "@types/connect@npm:*": version: 3.4.35 resolution: "@types/connect@npm:3.4.35" @@ -2288,7 +2410,7 @@ __metadata: languageName: node linkType: hard -"@types/express@npm:^4.17.11, @types/express@npm:^4.17.9": +"@types/express@npm:*, @types/express@npm:^4.17.11, @types/express@npm:^4.17.9": version: 4.17.13 resolution: "@types/express@npm:4.17.13" dependencies: @@ -2372,7 +2494,7 @@ __metadata: languageName: node linkType: hard -"@types/json-schema@npm:^7.0.9": +"@types/json-schema@npm:^7.0.7, @types/json-schema@npm:^7.0.9": version: 7.0.11 resolution: "@types/json-schema@npm:7.0.11" checksum: 527bddfe62db9012fccd7627794bd4c71beb77601861055d87e3ee464f2217c85fca7a4b56ae677478367bbd248dbde13553312b7d4dbc702a2f2bbf60c4018d @@ -2425,7 +2547,7 @@ __metadata: languageName: node linkType: hard -"@types/newrelic@npm:^7.0.2": +"@types/newrelic@npm:^7.0.1, @types/newrelic@npm:^7.0.2": version: 7.0.3 resolution: "@types/newrelic@npm:7.0.3" checksum: 31156f61c5cf6c22e3cd227966499d758e4af278a2b0097194578feee9579339473f7a08b07556530046a2f64ba449656a220a553116b475c4bbd8cca177dfd3 @@ -2546,6 +2668,28 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/eslint-plugin@npm:^4.14.0": + version: 4.33.0 + resolution: "@typescript-eslint/eslint-plugin@npm:4.33.0" + dependencies: + "@typescript-eslint/experimental-utils": 4.33.0 + "@typescript-eslint/scope-manager": 4.33.0 + debug: ^4.3.1 + functional-red-black-tree: ^1.0.1 + ignore: ^5.1.8 + regexpp: ^3.1.0 + semver: ^7.3.5 + tsutils: ^3.21.0 + peerDependencies: + "@typescript-eslint/parser": ^4.0.0 + eslint: ^5.0.0 || ^6.0.0 || ^7.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: d74855d0a5ffe0b2f362ec02fcd9301d39a53fb4155b9bd0cb15a0a31d065143129ebf98df9d86af4b6f74de1d423a4c0d8c0095520844068117453afda5bc4f + languageName: node + linkType: hard + "@typescript-eslint/eslint-plugin@npm:^5.29.0": version: 5.29.0 resolution: "@typescript-eslint/eslint-plugin@npm:5.29.0" @@ -2569,6 +2713,39 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/experimental-utils@npm:4.33.0": + version: 4.33.0 + resolution: "@typescript-eslint/experimental-utils@npm:4.33.0" + dependencies: + "@types/json-schema": ^7.0.7 + "@typescript-eslint/scope-manager": 4.33.0 + "@typescript-eslint/types": 4.33.0 + "@typescript-eslint/typescript-estree": 4.33.0 + eslint-scope: ^5.1.1 + eslint-utils: ^3.0.0 + peerDependencies: + eslint: "*" + checksum: f859800ada0884f92db6856f24efcb1d073ac9883ddc2b1aa9339f392215487895bed8447ebce3741e8141bb32e545244abef62b73193ba9a8a0527c523aabae + languageName: node + linkType: hard + +"@typescript-eslint/parser@npm:^4.14.0": + version: 4.33.0 + resolution: "@typescript-eslint/parser@npm:4.33.0" + dependencies: + "@typescript-eslint/scope-manager": 4.33.0 + "@typescript-eslint/types": 4.33.0 + "@typescript-eslint/typescript-estree": 4.33.0 + debug: ^4.3.1 + peerDependencies: + eslint: ^5.0.0 || ^6.0.0 || ^7.0.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 102457eae1acd516211098fea081c8a2ed728522bbda7f5a557b6ef23d88970514f9a0f6285d53fca134d3d4d7d17822b5d5e12438d5918df4d1f89cc9e67d57 + languageName: node + linkType: hard + "@typescript-eslint/parser@npm:^5.29.0": version: 5.29.0 resolution: "@typescript-eslint/parser@npm:5.29.0" @@ -2586,6 +2763,16 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/scope-manager@npm:4.33.0": + version: 4.33.0 + resolution: "@typescript-eslint/scope-manager@npm:4.33.0" + dependencies: + "@typescript-eslint/types": 4.33.0 + "@typescript-eslint/visitor-keys": 4.33.0 + checksum: 9a25fb7ba7c725ea7227a24d315b0f6aacbad002e2549a049edf723c1d3615c22f5c301f0d7d615b377f2cdf2f3519d97e79af0c459de6ef8d2aaf0906dff13e + languageName: node + linkType: hard + "@typescript-eslint/scope-manager@npm:5.29.0": version: 5.29.0 resolution: "@typescript-eslint/scope-manager@npm:5.29.0" @@ -2612,6 +2799,13 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/types@npm:4.33.0": + version: 4.33.0 + resolution: "@typescript-eslint/types@npm:4.33.0" + checksum: 3baae1ca35872421b4eb60f5d3f3f32dc1d513f2ae0a67dee28c7d159fd7a43ed0d11a8a5a0f0c2d38507ffa036fc7c511cb0f18a5e8ac524b3ebde77390ec53 + languageName: node + linkType: hard + "@typescript-eslint/types@npm:5.29.0": version: 5.29.0 resolution: "@typescript-eslint/types@npm:5.29.0" @@ -2619,6 +2813,24 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/typescript-estree@npm:4.33.0": + version: 4.33.0 + resolution: "@typescript-eslint/typescript-estree@npm:4.33.0" + dependencies: + "@typescript-eslint/types": 4.33.0 + "@typescript-eslint/visitor-keys": 4.33.0 + debug: ^4.3.1 + globby: ^11.0.3 + is-glob: ^4.0.1 + semver: ^7.3.5 + tsutils: ^3.21.0 + peerDependenciesMeta: + typescript: + optional: true + checksum: 2566984390c76bd95f43240057215c068c69769e406e27aba41e9f21fd300074d6772e4983fa58fe61e80eb5550af1548d2e31e80550d92ba1d051bb00fe6f5c + languageName: node + linkType: hard + "@typescript-eslint/typescript-estree@npm:5.29.0": version: 5.29.0 resolution: "@typescript-eslint/typescript-estree@npm:5.29.0" @@ -2653,6 +2865,16 @@ __metadata: languageName: node linkType: hard +"@typescript-eslint/visitor-keys@npm:4.33.0": + version: 4.33.0 + resolution: "@typescript-eslint/visitor-keys@npm:4.33.0" + dependencies: + "@typescript-eslint/types": 4.33.0 + eslint-visitor-keys: ^2.0.0 + checksum: 59953e474ad4610c1aa23b2b1a964445e2c6201521da6367752f37939d854352bbfced5c04ea539274065e012b1337ba3ffa49c2647a240a4e87155378ba9873 + languageName: node + linkType: hard + "@typescript-eslint/visitor-keys@npm:5.29.0": version: 5.29.0 resolution: "@typescript-eslint/visitor-keys@npm:5.29.0" @@ -2731,6 +2953,13 @@ __metadata: languageName: node linkType: hard +"agent-base@npm:5": + version: 5.1.1 + resolution: "agent-base@npm:5.1.1" + checksum: 61ae789f3019f1dc10e8cba6d3ae9826949299a4e54aaa1cfa2fa37c95a108e70e95423b963bb987d7891a703fd9a5c383a506f4901819f3ee56f3147c0aa8ab + languageName: node + linkType: hard + "agent-base@npm:6, agent-base@npm:^6.0.2": version: 6.0.2 resolution: "agent-base@npm:6.0.2" @@ -3303,6 +3532,15 @@ __metadata: languageName: node linkType: hard +"busboy@npm:^1.0.0": + version: 1.6.0 + resolution: "busboy@npm:1.6.0" + dependencies: + streamsearch: ^1.1.0 + checksum: 32801e2c0164e12106bf236291a00795c3c4e4b709ae02132883fe8478ba2ae23743b11c5735a0aae8afe65ac4b6ca4568b91f0d9fed1fdbc32ede824a73746e + languageName: node + linkType: hard + "byte-size@npm:^7.0.1": version: 7.0.1 resolution: "byte-size@npm:7.0.1" @@ -3440,7 +3678,7 @@ __metadata: languageName: node linkType: hard -"chalk@npm:^2.0.0": +"chalk@npm:^2.0.0, chalk@npm:^2.4.2": version: 2.4.2 resolution: "chalk@npm:2.4.2" dependencies: @@ -3705,7 +3943,7 @@ __metadata: languageName: node linkType: hard -"colors@npm:^1.1.2": +"colors@npm:1.4.0, colors@npm:^1.1.2": version: 1.4.0 resolution: "colors@npm:1.4.0" checksum: 98aa2c2418ad87dedf25d781be69dc5fc5908e279d9d30c34d8b702e586a0474605b3a189511482b9d5ed0d20c867515d22749537f7bc546256c6014f3ebdcec @@ -3785,6 +4023,15 @@ __metadata: languageName: node linkType: hard +"connect-busboy@npm:^1.0.0": + version: 1.0.0 + resolution: "connect-busboy@npm:1.0.0" + dependencies: + busboy: ^1.0.0 + checksum: e4a8cece06735e2c3e6ae4d49c61ef96ec28d14f559e9b6cfdea2c4a9be62dbe8f921d361278d426d11836cddf5b2ea32cd3b15870e34d9495f670896178ec1c + languageName: node + linkType: hard + "console-control-strings@npm:^1.1.0": version: 1.1.0 resolution: "console-control-strings@npm:1.1.0" @@ -3974,7 +4221,7 @@ __metadata: languageName: node linkType: hard -"cors@npm:2.8.5": +"cors@npm:2.8.5, cors@npm:^2.8.5": version: 2.8.5 resolution: "cors@npm:2.8.5" dependencies: @@ -4319,6 +4566,13 @@ __metadata: languageName: node linkType: hard +"dotenv@npm:^8.2.0": + version: 8.6.0 + resolution: "dotenv@npm:8.6.0" + checksum: 38e902c80b0666ab59e9310a3d24ed237029a7ce34d976796349765ac96b8d769f6df19090f1f471b77a25ca391971efde8a1ea63bb83111bd8bec8e5cc9b2cd + languageName: node + linkType: hard + "duplexer3@npm:^0.1.4": version: 0.1.4 resolution: "duplexer3@npm:0.1.4" @@ -4710,6 +4964,18 @@ __metadata: languageName: node linkType: hard +"express-winston@npm:^4.0.5": + version: 4.2.0 + resolution: "express-winston@npm:4.2.0" + dependencies: + chalk: ^2.4.2 + lodash: ^4.17.21 + peerDependencies: + winston: ">=3.x <4" + checksum: 029529107f0bb72363c87d83d8c05a997bcbb4f8431d47dd2ab7f61fa165a56747c1e2e3c8621cbacb0190798a9b5cb9975506dcdfd6e27036fab1d8d193ec09 + languageName: node + linkType: hard + "express@npm:4.17.1": version: 4.17.1 resolution: "express@npm:4.17.1" @@ -5314,7 +5580,7 @@ __metadata: languageName: node linkType: hard -"globby@npm:^11.1.0": +"globby@npm:^11.0.3, globby@npm:^11.1.0": version: 11.1.0 resolution: "globby@npm:11.1.0" dependencies: @@ -5430,6 +5696,13 @@ __metadata: languageName: node linkType: hard +"helmet@npm:^4.3.1": + version: 4.6.0 + resolution: "helmet@npm:4.6.0" + checksum: 139ad678d1cab207b043c206f50f6744eff2ef1f463e4626d36718b45b337485c77d10260ef9d89d292fa678da5153d86b08172b3b365cc8e680241015ed3a49 + languageName: node + linkType: hard + "highlight.js@npm:^10.7.1": version: 10.7.3 resolution: "highlight.js@npm:10.7.3" @@ -5544,6 +5817,16 @@ __metadata: languageName: node linkType: hard +"https-proxy-agent@npm:^4.0.0": + version: 4.0.0 + resolution: "https-proxy-agent@npm:4.0.0" + dependencies: + agent-base: 5 + debug: 4 + checksum: 19471d5aae3e747b1c98b17556647e2a1362e68220c6b19585a8527498f32e62e03c41d2872d059d8720d56846bd7460a80ac06f876bccfa786468ff40dd5eef + languageName: node + linkType: hard + "https-proxy-agent@npm:^5.0.0": version: 5.0.1 resolution: "https-proxy-agent@npm:5.0.1" @@ -5618,7 +5901,7 @@ __metadata: languageName: node linkType: hard -"ignore@npm:^5.2.0": +"ignore@npm:^5.1.8, ignore@npm:^5.2.0": version: 5.2.0 resolution: "ignore@npm:5.2.0" checksum: 6b1f926792d614f64c6c83da3a1f9c83f6196c2839aa41e1e32dd7b8d174cef2e329d75caabb62cb61ce9dc432f75e67d07d122a037312db7caa73166a1bdb77 @@ -7188,7 +7471,7 @@ __metadata: languageName: node linkType: hard -"methods@npm:~1.1.2": +"methods@npm:^1.1.2, methods@npm:~1.1.2": version: 1.1.2 resolution: "methods@npm:1.1.2" checksum: 0917ff4041fa8e2f2fda5425a955fe16ca411591fbd123c0d722fcf02b73971ed6f764d85f0a6f547ce49ee0221ce2c19a5fa692157931cecb422984f1dcd13a @@ -7549,6 +7832,32 @@ __metadata: languageName: node linkType: hard +"newrelic@npm:^7.3.1": + version: 7.5.2 + resolution: "newrelic@npm:7.5.2" + dependencies: + "@grpc/grpc-js": ^1.2.11 + "@grpc/proto-loader": ^0.5.6 + "@newrelic/aws-sdk": ^3.1.0 + "@newrelic/koa": ^5.0.0 + "@newrelic/native-metrics": ^6.0.0 + "@newrelic/superagent": ^4.0.0 + "@tyriar/fibonacci-heap": ^2.0.7 + async: ^3.2.0 + concat-stream: ^2.0.0 + https-proxy-agent: ^4.0.0 + json-stringify-safe: ^5.0.0 + readable-stream: ^3.6.0 + semver: ^5.3.0 + dependenciesMeta: + "@newrelic/native-metrics": + optional: true + bin: + newrelic-naming-rules: bin/test-naming-rules.js + checksum: f6c67dbb7dfc265eaf46ece7bb5fc0fdab4cdb84010be724f52e5d903474d1c108a35f743a801c0f9ef2a3fa9d1c1c17a1b32bf091f3d4f1fcd323d8130a1e36 + languageName: node + linkType: hard + "newrelic@npm:^8.8.0": version: 8.14.1 resolution: "newrelic@npm:8.14.1" @@ -8437,6 +8746,18 @@ __metadata: languageName: node linkType: hard +"prettyjson@npm:^1.2.1": + version: 1.2.5 + resolution: "prettyjson@npm:1.2.5" + dependencies: + colors: 1.4.0 + minimist: ^1.2.0 + bin: + prettyjson: bin/prettyjson + checksum: e36e8ae4f77065160028fea33c6ae8e91936f86a4fd1751ceb19f88f336bc54e6f7d232ece38d4da3f7734e5c5e1fc3114a0d92da1987e7cf1515dcb29d447d1 + languageName: node + linkType: hard + "proc-log@npm:^2.0.0": version: 2.0.1 resolution: "proc-log@npm:2.0.1" @@ -8838,7 +9159,7 @@ __metadata: languageName: node linkType: hard -"regexpp@npm:^3.2.0": +"regexpp@npm:^3.1.0, regexpp@npm:^3.2.0": version: 3.2.0 resolution: "regexpp@npm:3.2.0" checksum: a78dc5c7158ad9ddcfe01aa9144f46e192ddbfa7b263895a70a5c6c73edd9ce85faf7c0430e59ac38839e1734e275b9c3de5c57ee3ab6edc0e0b1bdebefccef8 @@ -9469,6 +9790,13 @@ __metadata: languageName: node linkType: hard +"streamsearch@npm:^1.1.0": + version: 1.1.0 + resolution: "streamsearch@npm:1.1.0" + checksum: 1cce16cea8405d7a233d32ca5e00a00169cc0e19fbc02aa839959985f267335d435c07f96e5e0edd0eadc6d39c98d5435fb5bbbdefc62c41834eadc5622ad942 + languageName: node + linkType: hard + "strict-uri-encode@npm:^2.0.0": version: 2.0.0 resolution: "strict-uri-encode@npm:2.0.0" @@ -9877,7 +10205,7 @@ __metadata: languageName: node linkType: hard -"ts-node@npm:^10.8.0, ts-node@npm:^10.8.1": +"ts-node@npm:^10.4.0, ts-node@npm:^10.8.0, ts-node@npm:^10.8.1": version: 10.8.1 resolution: "ts-node@npm:10.8.1" dependencies: @@ -10476,7 +10804,7 @@ __metadata: languageName: node linkType: hard -"winston@npm:^3.6.0": +"winston@npm:^3.3.3, winston@npm:^3.6.0": version: 3.7.2 resolution: "winston@npm:3.7.2" dependencies: