Compare commits

..

7 Commits

Author SHA1 Message Date
Karol Sójko
55f8f65c3f wip 2022-12-13 13:27:19 +01:00
Karol Sójko
3953dbc6b4 feat(settings): add unsubscribe token for muting emails 2022-12-13 13:18:15 +01:00
Karol Sójko
0b205287d1 fix(settings): binding for controller 2022-12-13 13:18:15 +01:00
Karol Sójko
4f0bc57b1a feat(settings): add controller for muting all emails 2022-12-13 13:18:15 +01:00
Karol Sójko
7d43316597 feat(settings): add mutting all emails use case 2022-12-13 13:18:15 +01:00
Karol Sójko
65d31f011b chore: remove settings dependency from all packages 2022-12-13 13:18:13 +01:00
Karol Sójko
80dd6efae3 feat(settings): replace setting with a domain entity 2022-12-13 13:13:28 +01:00
4814 changed files with 153384 additions and 96381 deletions

View File

@@ -1,13 +0,0 @@
dist
coverage
.yarn/*
!.yarn/cache
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/unplugged
!.yarn/sdks
!.yarn/versions
data/*

View File

@@ -1,26 +1,6 @@
######
# DB #
######
DB_HOST=db
DB_PORT=3306 DB_PORT=3306
DB_USERNAME=std_notes_user DB_USERNAME=std_notes_user
DB_PASSWORD=changeme123 DB_PASSWORD=changeme123
DB_DATABASE=standard_notes_db DB_DATABASE=standard_notes_db
DB_TYPE=mysql
#########
# CACHE #
#########
REDIS_PORT=6379 REDIS_PORT=6379
REDIS_HOST=cache
CACHE_TYPE=redis
########
# KEYS #
########
AUTH_JWT_SECRET=
AUTH_SERVER_ENCRYPTION_SERVER_KEY=
VALET_TOKEN_SECRET=

View File

@@ -1,5 +1,6 @@
{ {
"root": true, "root": true,
"parser": "@typescript-eslint/parser",
"parserOptions": { "parserOptions": {
"project": "./tsconfig.json" "project": "./tsconfig.json"
}, },

30
.github/ci.env vendored
View File

@@ -1,30 +0,0 @@
PUBLIC_FILES_SERVER_URL=http://localhost:3125
DB_HOST=db
DB_USERNAME=std_notes_user
DB_PASSWORD=changeme123
DB_DATABASE=standard_notes_db
DB_PORT=3306
DB_DEBUG_LEVEL=all
DB_SQLITE_DATABASE_PATH=standard_notes_db
REDIS_PORT=6379
REDIS_HOST=cache
AUTH_SERVER_ACCESS_TOKEN_AGE=4
AUTH_SERVER_REFRESH_TOKEN_AGE=10
AUTH_SERVER_EPHEMERAL_SESSION_AGE=300
SYNCING_SERVER_REVISIONS_FREQUENCY=2
AUTH_SERVER_LOG_LEVEL=debug
SYNCING_SERVER_LOG_LEVEL=debug
FILES_SERVER_LOG_LEVEL=debug
REVISIONS_SERVER_LOG_LEVEL=debug
API_GATEWAY_LOG_LEVEL=debug
MYSQL_DATABASE=standard_notes_db
MYSQL_USER=std_notes_user
MYSQL_PASSWORD=changeme123
MYSQL_ROOT_PASSWORD=changeme123
AUTH_JWT_SECRET=f95259c5e441f5a4646d76422cfb3df4c4488842901aa50b6c51b8be2e0040e9
AUTH_SERVER_ENCRYPTION_SERVER_KEY=1087415dfde3093797f9a7ca93a49e7d7aa1861735eb0d32aae9c303b8c3d060
VALET_TOKEN_SECRET=4b886819ebe1e908077c6cae96311b48a8416bd60cc91c03060e15bdf6b30d1f
SYNCING_SERVER_CONTENT_SIZE_TRANSFER_LIMIT=1000000

View File

@@ -9,134 +9,86 @@ updates:
directory: "/" directory: "/"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/analytics" directory: "/packages/analytics"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/api-gateway" directory: "/packages/api-gateway"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/auth" directory: "/packages/auth"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/common" directory: "/packages/common"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/domain-core"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/domain-events" directory: "/packages/domain-events"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/domain-events-infra" directory: "/packages/domain-events-infra"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct" - package-ecosystem: "npm"
directory: "/packages/event-store"
schedule:
interval: "daily"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/files" directory: "/packages/files"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/home-server"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/predicates" directory: "/packages/predicates"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm"
directory: "/packages/revisions"
schedule:
interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/scheduler" directory: "/packages/scheduler"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/security" directory: "/packages/security"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/settings" directory: "/packages/settings"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/sncrypto-node" directory: "/packages/sncrypto-node"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/syncing-server" directory: "/packages/syncing-server"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/time" directory: "/packages/time"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "npm" - package-ecosystem: "npm"
directory: "/packages/websockets" directory: "/packages/workspace"
schedule: schedule:
interval: "daily" interval: "daily"
allow:
- dependency-type: "direct"
- package-ecosystem: "github-actions" - package-ecosystem: "github-actions"
directory: "/" directory: "/"

View File

@@ -17,6 +17,23 @@ jobs:
with: with:
service_name: analytics service_name: analytics
workspace_name: "@standardnotes/analytics" workspace_name: "@standardnotes/analytics"
e2e_tag_parameter_name: analytics_image_tag
deploy_web: false deploy_web: false
package_path: packages/analytics package_path: packages/analytics
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_ANALYTICS_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -17,6 +17,21 @@ jobs:
with: with:
service_name: api-gateway service_name: api-gateway
workspace_name: "@standardnotes/api-gateway" workspace_name: "@standardnotes/api-gateway"
e2e_tag_parameter_name: api_gateway_image_tag
deploy_worker: false deploy_worker: false
package_path: packages/api-gateway package_path: packages/api-gateway
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_API_GATEWAY_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -17,5 +17,30 @@ jobs:
with: with:
service_name: auth service_name: auth
workspace_name: "@standardnotes/auth-server" workspace_name: "@standardnotes/auth-server"
e2e_tag_parameter_name: auth_image_tag
package_path: packages/auth package_path: packages/auth
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_AUTH_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_AUTH_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -24,7 +24,7 @@ jobs:
steps: steps:
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v3 uses: aws-actions/configure-aws-credentials@v1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -50,6 +50,6 @@ jobs:
uses: aws-actions/amazon-ecs-deploy-task-definition@v1 uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with: with:
task-definition: ${{ steps.task-def-prod.outputs.task-definition }} task-definition: ${{ steps.task-def-prod.outputs.task-definition }}
service: ${{ inputs.service_name }} service: ${{ inputs.service_name }}-prod
cluster: prod cluster: prod
wait-for-service-stability: true wait-for-service-stability: true

View File

@@ -6,6 +6,9 @@ on:
service_name: service_name:
required: true required: true
type: string type: string
bundle_dir:
required: true
type: string
package_path: package_path:
required: true required: true
type: string type: string
@@ -27,11 +30,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Cache build - name: Cache build
id: cache-build id: cache-build
@@ -39,6 +38,7 @@ jobs:
with: with:
path: | path: |
packages/**/dist packages/**/dist
${{ inputs.bundle_dir }}
key: ${{ runner.os }}-${{ inputs.service_name }}-build-${{ github.sha }} key: ${{ runner.os }}-${{ inputs.service_name }}-build-${{ github.sha }}
- name: Set up Node - name: Set up Node
@@ -47,21 +47,22 @@ jobs:
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc' node-version-file: '.nvmrc'
- name: Install dependencies
run: yarn install --immutable
- name: Build - name: Build
if: steps.cache-build.outputs.cache-hit != 'true' if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build run: yarn build ${{ inputs.package_path }}
- name: Bundle
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn workspace ${{ inputs.workspace_name }} bundle --no-compress --output-directory ${{ inputs.bundle_dir }}
- name: Login to Docker Hub - name: Login to Docker Hub
uses: docker/login-action@v3 uses: docker/login-action@v2
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Configure AWS credentials - name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v3 uses: aws-actions/configure-aws-credentials@v1
with: with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
@@ -69,7 +70,7 @@ jobs:
- name: Login to Amazon ECR - name: Login to Amazon ECR
id: login-ecr id: login-ecr
uses: aws-actions/amazon-ecr-login@v2 uses: aws-actions/amazon-ecr-login@v1
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@master uses: docker/setup-qemu-action@master
@@ -81,11 +82,11 @@ jobs:
uses: docker/setup-buildx-action@master uses: docker/setup-buildx-action@master
- name: Publish Docker image - name: Publish Docker image
uses: docker/build-push-action@v4 uses: docker/build-push-action@v3
with: with:
builder: ${{ steps.buildx.outputs.name }} builder: ${{ steps.buildx.outputs.name }}
context: . context: ${{ inputs.bundle_dir }}
file: ${{ inputs.package_path }}/Dockerfile file: ${{ inputs.bundle_dir }}/${{ inputs.package_path }}/Dockerfile
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: true
tags: | tags: |

View File

@@ -1,26 +0,0 @@
name: Reusable Run E2E Test Suite Workflow
on:
workflow_call:
inputs:
snjs_image_tag:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
jobs:
e2e-self-hosted:
uses: standardnotes/server/.github/workflows/e2e-self-hosted.yml@main
with:
snjs_image_tag: ${{ inputs.snjs_image_tag }}
suite: ${{ inputs.suite }}
e2e-home-server:
uses: standardnotes/server/.github/workflows/e2e-home-server.yml@main
with:
snjs_image_tag: ${{ inputs.snjs_image_tag }}
suite: ${{ inputs.suite }}

View File

@@ -1,47 +0,0 @@
name: Reusable Publish Docker Self Hosting Image Workflow
on:
workflow_call:
secrets:
DOCKER_USERNAME:
required: true
DOCKER_PASSWORD:
required: true
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Publish Docker image
uses: docker/build-push-action@v4
with:
builder: ${{ steps.buildx.outputs.name }}
context: .
file: Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: |
standardnotes/server:latest
standardnotes/server:${{ github.sha }}

View File

@@ -17,6 +17,9 @@ on:
required: false required: false
default: true default: true
type: boolean type: boolean
e2e_tag_parameter_name:
required: false
type: string
package_path: package_path:
required: true required: true
type: string type: string
@@ -33,11 +36,177 @@ on:
required: true required: true
jobs: jobs:
build:
runs-on: ubuntu-latest
outputs:
temp_dir: ${{ steps.bundle-dir.outputs.temp_dir }}
steps:
- uses: actions/checkout@v3
- name: Create Bundle Dir
id: bundle-dir
run: echo "temp_dir=$(mktemp -d -t ${{ inputs.service_name }}-${{ github.sha }}-XXXXXXX)" >> $GITHUB_OUTPUT
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
${{ steps.bundle-dir.outputs.temp_dir }}
key: ${{ runner.os }}-${{ inputs.service_name }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Build
run: yarn build ${{ inputs.package_path }}
- name: Bundle
run: yarn workspace ${{ inputs.workspace_name }} bundle --no-compress --output-directory ${{ steps.bundle-dir.outputs.temp_dir }}
lint:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
${{ needs.build.outputs.temp_dir }}
key: ${{ runner.os }}-${{ inputs.service_name }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build ${{ inputs.package_path }}
- name: Lint
run: yarn lint:${{ inputs.service_name }}
test:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
${{ needs.build.outputs.temp_dir }}
key: ${{ runner.os }}-${{ inputs.service_name }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build ${{ inputs.package_path }}
- name: Test
run: yarn test ${{ inputs.package_path }}
e2e:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v3
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
${{ needs.build.outputs.temp_dir }}
key: ${{ runner.os }}-${{ inputs.service_name }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build ${{ inputs.package_path }}
- name: Bundle
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn workspace ${{ inputs.workspace_name }} bundle --no-compress --output-directory ${{ needs.build.outputs.temp_dir }}
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Set up QEMU
uses: docker/setup-qemu-action@master
with:
platforms: all
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
- name: Publish Docker image for E2E testing
uses: docker/build-push-action@v3
with:
builder: ${{ steps.buildx.outputs.name }}
context: ${{ needs.build.outputs.temp_dir }}
file: ${{ needs.build.outputs.temp_dir }}/${{ inputs.package_path }}/Dockerfile
platforms: linux/amd64,linux/arm64
push: true
tags: standardnotes/${{ inputs.service_name }}:${{ github.sha }}
- name: Run E2E test suite
uses: convictional/trigger-workflow-and-wait@master
with:
owner: standardnotes
repo: e2e
github_token: ${{ secrets.CI_PAT_TOKEN }}
workflow_file_name: testing-with-stable-client.yml
wait_interval: 30
client_payload: '{"${{ inputs.e2e_tag_parameter_name }}": "${{ github.sha }}"}'
propagate_failure: true
trigger_workflow: true
wait_workflow: true
publish: publish:
needs: [ build, test, lint, e2e ]
name: Publish Docker Image name: Publish Docker Image
uses: standardnotes/server/.github/workflows/common-docker-image.yml@main uses: standardnotes/server/.github/workflows/common-docker-image.yml@main
with: with:
service_name: ${{ inputs.service_name }} service_name: ${{ inputs.service_name }}
bundle_dir: ${{ needs.build.outputs.temp_dir }}
package_path: ${{ inputs.package_path }} package_path: ${{ inputs.package_path }}
workspace_name: ${{ inputs.workspace_name }} workspace_name: ${{ inputs.workspace_name }}
secrets: inherit secrets: inherit

View File

@@ -1,105 +0,0 @@
name: E2E Home Server Test Suite
on:
workflow_call:
inputs:
snjs_image_tag:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
jobs:
e2e-home-server:
name: (Home Server) E2E Test Suite
strategy:
fail-fast: false
matrix:
db_type: [mysql, sqlite]
cache_type: [redis, memory]
runs-on: ubuntu-latest
services:
snjs:
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
ports:
- 9001:9001
cache:
image: redis
ports:
- 6379:6379
db:
image: mysql
ports:
- 3306:3306
env:
MYSQL_ROOT_PASSWORD: root
MYSQL_DATABASE: standardnotes
MYSQL_USER: standardnotes
MYSQL_PASSWORD: standardnotes
steps:
- uses: actions/checkout@v4
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install Dependencies
run: yarn install --immutable
- name: Build
run: yarn build
- name: Copy dotenv file
run: cp packages/home-server/.env.sample packages/home-server/.env
- name: Fill in env variables
run: |
sed -i "s/JWT_SECRET=/JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/AUTH_JWT_SECRET=/AUTH_JWT_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/ENCRYPTION_SERVER_KEY=/ENCRYPTION_SERVER_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/PSEUDO_KEY_PARAMS_KEY=/PSEUDO_KEY_PARAMS_KEY=$(openssl rand -hex 32)/g" packages/home-server/.env
sed -i "s/VALET_TOKEN_SECRET=/VALET_TOKEN_SECRET=$(openssl rand -hex 32)/g" packages/home-server/.env
echo "ACCESS_TOKEN_AGE=4" >> packages/home-server/.env
echo "REFRESH_TOKEN_AGE=10" >> packages/home-server/.env
echo "REVISIONS_FREQUENCY=2" >> packages/home-server/.env
echo "CONTENT_SIZE_TRANSFER_LIMIT=1000000" >> packages/home-server/.env
echo "DB_HOST=localhost" >> packages/home-server/.env
echo "DB_PORT=3306" >> packages/home-server/.env
echo "DB_DATABASE=standardnotes" >> packages/home-server/.env
echo "DB_SQLITE_DATABASE_PATH=homeserver.db" >> packages/home-server/.env
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
echo "DB_DEBUG_LEVEL=all" >> packages/home-server/.env
echo "REDIS_URL=redis://localhost:6379" >> packages/home-server/.env
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
echo "FILES_SERVER_URL=http://localhost:3123" >> packages/home-server/.env
echo "E2E_TESTING=true" >> packages/home-server/.env
- name: Run Server
run: nohup yarn workspace @standardnotes/home-server start > logs/output.log 2>&1 &
env:
PORT: 3123
- name: Wait for server to start
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
- name: Archive failed run logs
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: home-server-failure-logs-${{ inputs.suite }}-${{ matrix.db_type }}-${{ matrix.cache_type }}
retention-days: 5
path: |
logs/output.log

View File

@@ -1,70 +0,0 @@
name: E2E Self Hosted Test Suite
on:
workflow_call:
inputs:
snjs_image_tag:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
jobs:
e2e:
name: (Self Hosting) E2E Test Suite
strategy:
fail-fast: false
matrix:
service_proxy_type: [http, grpc]
runs-on: ubuntu-latest
services:
snjs:
image: standardnotes/snjs:${{ inputs.snjs_image_tag }}
ports:
- 9001:9001
steps:
- uses: actions/checkout@v4
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install --immutable
- name: Run Server
run: docker compose -f docker-compose.ci.yml up -d
env:
DB_TYPE: mysql
CACHE_TYPE: redis
SERVICE_PROXY_TYPE: ${{ matrix.service_proxy_type }}
- name: Output Server Logs to File
run: docker compose -f docker-compose.ci.yml logs -f > logs/docker-compose.log 2>&1 &
env:
DB_TYPE: mysql
CACHE_TYPE: redis
SERVICE_PROXY_TYPE: ${{ matrix.service_proxy_type }}
- name: Wait for server to start
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 3600000 -f http://localhost:9001/mocha/test.html?suite=${{ inputs.suite }}
- name: Archive failed run logs
if: ${{ failure() }}
uses: actions/upload-artifact@v3
with:
name: self-hosted-failure-logs-${{ inputs.suite }}
retention-days: 5
path: |
logs/*.err
logs/*.log

View File

@@ -1,33 +0,0 @@
name: E2E Test Suite
run-name: E2E ${{ inputs.suite }} Test Suite against ${{ inputs.ref_name }} by ${{ inputs.author }}
on:
schedule:
- cron: '0 */12 * * *'
workflow_dispatch:
inputs:
snjs_image_tag:
type: string
default: latest
description: The Docker image tag used for SNJS container
suite:
type: string
default: all
description: The test suite to run
author:
type: string
default: unknown
description: The author that triggered the workflow
ref_name:
type: string
default: unknown
description: The ref name from which the workflow was triggered
jobs:
e2e:
name: E2E
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: ${{ inputs.snjs_image_tag || 'latest' }}
suite: ${{ inputs.suite || 'all' }}

39
.github/workflows/event-store.yml vendored Normal file
View File

@@ -0,0 +1,39 @@
name: Event Store
concurrency:
group: event-store
cancel-in-progress: true
on:
push:
tags:
- '*standardnotes/event-store*'
workflow_dispatch:
jobs:
call_server_application_workflow:
name: Server Application
uses: standardnotes/server/.github/workflows/common-server-application.yml@main
with:
service_name: event-store
workspace_name: "@standardnotes/event-store"
e2e_tag_parameter_name: event_store_image_tag
deploy_web: false
package_path: packages/event-store
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_EVENT_STORE_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -17,5 +17,30 @@ jobs:
with: with:
service_name: files service_name: files
workspace_name: "@standardnotes/files-server" workspace_name: "@standardnotes/files-server"
e2e_tag_parameter_name: files_image_tag
package_path: packages/files package_path: packages/files
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_FILES_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -6,107 +6,19 @@ on:
- main - main
jobs: jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
key: ${{ runner.os }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install
- name: Build
run: yarn build
lint:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
key: ${{ runner.os }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build
- name: Lint
run: yarn lint
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build
steps: steps:
- uses: actions/checkout@v4 - name: Checkout code
uses: actions/checkout@v3
- name: Cache build - name: Set up Node
id: cache-build uses: actions/setup-node@v3
uses: actions/cache@v3 with:
with: registry-url: 'https://registry.npmjs.org'
path: | node-version-file: '.nvmrc'
packages/**/dist - name: ESLint
key: ${{ runner.os }}-build-${{ github.sha }} run: yarn lint
- name: Build
- name: Set up Node run: yarn build
uses: actions/setup-node@v3 - name: Test
with: run: yarn test
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build
- name: Test
run: yarn test
e2e-base:
needs: build
name: E2E Base Suite
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: 'latest'
suite: 'base'
e2e-vaults:
needs: build
name: E2E Vaults Suite
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: 'latest'
suite: 'vaults'

View File

@@ -4,149 +4,24 @@ on:
push: push:
branches: [ main ] branches: [ main ]
permissions:
id-token: write
jobs: jobs:
build: release_and_publish:
if: contains(github.event.head_commit.message, 'chore(release)') == false if: contains(github.event.head_commit.message, 'chore(release)') == false
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
key: ${{ runner.os }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install --immutable
- name: Build
run: yarn build
lint:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
key: ${{ runner.os }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install --immutable
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build
- name: Lint
run: yarn lint
test:
runs-on: ubuntu-latest
needs: build
steps:
- uses: actions/checkout@v4
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
key: ${{ runner.os }}-build-${{ github.sha }}
- name: Set up Node
uses: actions/setup-node@v3
with:
registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc'
- name: Install
run: yarn install --immutable
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build
- name: Test
run: yarn test
e2e-base:
needs: build
name: E2E Base Suite
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: 'latest'
suite: 'base'
e2e-vaults:
needs: build
name: E2E Vaults Suite
uses: standardnotes/server/.github/workflows/common-e2e.yml@main
with:
snjs_image_tag: 'latest'
suite: 'vaults'
publish-self-hosting:
needs: [ test, lint, e2e-base, e2e-vaults ]
name: Publish Self Hosting Docker Image
uses: standardnotes/server/.github/workflows/common-self-hosting.yml@main
secrets: inherit
publish-services:
needs: [ test, lint, e2e-base, e2e-vaults ]
runs-on: ubuntu-latest
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v3
with: with:
token: ${{ secrets.CI_PAT_TOKEN }} token: ${{ secrets.CI_PAT_TOKEN }}
fetch-depth: 0 fetch-depth: 0
- name: Cache build
id: cache-build
uses: actions/cache@v3
with:
path: |
packages/**/dist
key: ${{ runner.os }}-build-${{ github.sha }}
- name: Setup git config - name: Setup git config
run: | run: |
git config --global user.name "standardci" git config --global user.name "standardci"
git config --global user.email "ci@standardnotes.com" git config --global user.email "ci@standardnotes.com"
- name: Import GPG key - name: Import GPG key
uses: crazy-max/ghaction-import-gpg@v6 uses: crazy-max/ghaction-import-gpg@v5
with: with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.PASSPHRASE }} passphrase: ${{ secrets.PASSPHRASE }}
@@ -159,11 +34,7 @@ jobs:
registry-url: 'https://registry.npmjs.org' registry-url: 'https://registry.npmjs.org'
node-version-file: '.nvmrc' node-version-file: '.nvmrc'
- name: Install - name: Build packages
run: yarn install --immutable
- name: Build
if: steps.cache-build.outputs.cache-hit != 'true'
run: yarn build run: yarn build
- name: Bump version - name: Bump version
@@ -173,3 +44,4 @@ jobs:
run: yarn publish run: yarn publish
env: env:
NODE_AUTH_TOKEN: ${{ secrets.CI_NPM_TOKEN }} NODE_AUTH_TOKEN: ${{ secrets.CI_NPM_TOKEN }}

View File

@@ -17,5 +17,30 @@ jobs:
with: with:
service_name: revisions service_name: revisions
workspace_name: "@standardnotes/revisions-server" workspace_name: "@standardnotes/revisions-server"
e2e_tag_parameter_name: revisions_image_tag
package_path: packages/revisions package_path: packages/revisions
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_REVISIONS_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_REVISIONS_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -17,6 +17,23 @@ jobs:
with: with:
service_name: scheduler service_name: scheduler
workspace_name: "@standardnotes/scheduler-server" workspace_name: "@standardnotes/scheduler-server"
e2e_tag_parameter_name: scheduler_image_tag
deploy_web: false deploy_web: false
package_path: packages/scheduler package_path: packages/scheduler
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SCHEDULER_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -17,5 +17,30 @@ jobs:
with: with:
service_name: syncing-server-js service_name: syncing-server-js
workspace_name: "@standardnotes/syncing-server" workspace_name: "@standardnotes/syncing-server"
e2e_tag_parameter_name: syncing_server_js_image_tag
package_path: packages/syncing-server package_path: packages/syncing-server
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SYNCING_SERVER_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_SYNCING_SERVER_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

View File

@@ -17,5 +17,30 @@ jobs:
with: with:
service_name: websockets service_name: websockets
workspace_name: "@standardnotes/websockets-server" workspace_name: "@standardnotes/websockets-server"
e2e_tag_parameter_name: websockets_image_tag
package_path: packages/websockets package_path: packages/websockets
secrets: inherit secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_WEBSOCKETS_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_WEBSOCKETS_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

48
.github/workflows/workspace.yml vendored Normal file
View File

@@ -0,0 +1,48 @@
name: Workspace Server
concurrency:
group: workspace
cancel-in-progress: true
on:
push:
tags:
- '*standardnotes/workspace-server*'
workflow_dispatch:
jobs:
call_server_application_workflow:
name: Server Application
uses: standardnotes/server/.github/workflows/common-server-application.yml@main
with:
service_name: workspace
workspace_name: "@standardnotes/workspace-server"
e2e_tag_parameter_name: workspace_image_tag
package_path: packages/workspace
secrets: inherit
newrelic:
needs: call_server_application_workflow
runs-on: ubuntu-latest
steps:
- name: Create New Relic deployment marker for Web
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_WORKSPACE_WEB_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"
- name: Create New Relic deployment marker for Worker
uses: newrelic/deployment-marker-action@v1
with:
accountId: ${{ secrets.NEW_RELIC_ACCOUNT_ID }}
apiKey: ${{ secrets.NEW_RELIC_API_KEY }}
applicationId: ${{ secrets.NEW_RELIC_APPLICATION_ID_WORKSPACE_WORKER_PROD }}
revision: "${{ github.sha }}"
description: "Automated Deployment via Github Actions"
user: "${{ github.actor }}"

11
.gitignore vendored
View File

@@ -4,26 +4,19 @@
dist dist
coverage coverage
.env .env
newrelic_agent.log
.yarn/* .yarn/*
!.yarn/cache !.yarn/cache
!.yarn/patches !.yarn/patches
!.yarn/plugins !.yarn/plugins
!.yarn/releases !.yarn/releases
!.yarn/unplugged
!.yarn/sdks !.yarn/sdks
!.yarn/versions !.yarn/versions
yarn.build-error.log
packages/files/uploads/* packages/files/uploads/*
!packages/files/uploads/.gitkeep !packages/files/uploads/.gitkeep
data/* data/*
!data/.gitkeep !data/.gitkeep
packages/**/data/*
logs/*
!logs/.gitkeep
docker-compose.yml

2
.nvmrc
View File

@@ -1 +1 @@
20.10.0 18.12.1

19051
.pnp.cjs generated

File diff suppressed because one or more lines are too long

273
.pnp.loader.mjs generated
View File

@@ -1,31 +1,28 @@
import fs from 'fs';
import { URL as URL$1, fileURLToPath, pathToFileURL } from 'url'; import { URL as URL$1, fileURLToPath, pathToFileURL } from 'url';
import fs from 'fs';
import path from 'path'; import path from 'path';
import moduleExports, { Module } from 'module';
import { createHash } from 'crypto'; import { createHash } from 'crypto';
import { EOL } from 'os'; import { EOL } from 'os';
import moduleExports, { isBuiltin } from 'module';
import assert from 'assert'; import assert from 'assert';
const SAFE_TIME = 456789e3; const SAFE_TIME = 456789e3;
const PortablePath = { const PortablePath = {
root: `/`, root: `/`,
dot: `.`, dot: `.`
parent: `..`
}; };
const npath = Object.create(path); const npath = Object.create(path);
const ppath = Object.create(path.posix); const ppath = Object.create(path.posix);
npath.cwd = () => process.cwd(); npath.cwd = () => process.cwd();
ppath.cwd = process.platform === `win32` ? () => toPortablePath(process.cwd()) : process.cwd; ppath.cwd = () => toPortablePath(process.cwd());
if (process.platform === `win32`) { ppath.resolve = (...segments) => {
ppath.resolve = (...segments) => { if (segments.length > 0 && ppath.isAbsolute(segments[0])) {
if (segments.length > 0 && ppath.isAbsolute(segments[0])) { return path.posix.resolve(...segments);
return path.posix.resolve(...segments); } else {
} else { return path.posix.resolve(ppath.cwd(), ...segments);
return path.posix.resolve(ppath.cwd(), ...segments); }
} };
};
}
const contains = function(pathUtils, from, to) { const contains = function(pathUtils, from, to) {
from = pathUtils.normalize(from); from = pathUtils.normalize(from);
to = pathUtils.normalize(to); to = pathUtils.normalize(to);
@@ -39,13 +36,17 @@ const contains = function(pathUtils, from, to) {
return null; return null;
} }
}; };
npath.fromPortablePath = fromPortablePath;
npath.toPortablePath = toPortablePath;
npath.contains = (from, to) => contains(npath, from, to); npath.contains = (from, to) => contains(npath, from, to);
ppath.contains = (from, to) => contains(ppath, from, to); ppath.contains = (from, to) => contains(ppath, from, to);
const WINDOWS_PATH_REGEXP = /^([a-zA-Z]:.*)$/; const WINDOWS_PATH_REGEXP = /^([a-zA-Z]:.*)$/;
const UNC_WINDOWS_PATH_REGEXP = /^\/\/(\.\/)?(.*)$/; const UNC_WINDOWS_PATH_REGEXP = /^\/\/(\.\/)?(.*)$/;
const PORTABLE_PATH_REGEXP = /^\/([a-zA-Z]:.*)$/; const PORTABLE_PATH_REGEXP = /^\/([a-zA-Z]:.*)$/;
const UNC_PORTABLE_PATH_REGEXP = /^\/unc\/(\.dot\/)?(.*)$/; const UNC_PORTABLE_PATH_REGEXP = /^\/unc\/(\.dot\/)?(.*)$/;
function fromPortablePathWin32(p) { function fromPortablePath(p) {
if (process.platform !== `win32`)
return p;
let portablePathMatch, uncPortablePathMatch; let portablePathMatch, uncPortablePathMatch;
if (portablePathMatch = p.match(PORTABLE_PATH_REGEXP)) if (portablePathMatch = p.match(PORTABLE_PATH_REGEXP))
p = portablePathMatch[1]; p = portablePathMatch[1];
@@ -55,7 +56,9 @@ function fromPortablePathWin32(p) {
return p; return p;
return p.replace(/\//g, `\\`); return p.replace(/\//g, `\\`);
} }
function toPortablePathWin32(p) { function toPortablePath(p) {
if (process.platform !== `win32`)
return p;
p = p.replace(/\\/g, `/`); p = p.replace(/\\/g, `/`);
let windowsPathMatch, uncWindowsPathMatch; let windowsPathMatch, uncWindowsPathMatch;
if (windowsPathMatch = p.match(WINDOWS_PATH_REGEXP)) if (windowsPathMatch = p.match(WINDOWS_PATH_REGEXP))
@@ -64,10 +67,6 @@ function toPortablePathWin32(p) {
p = `/unc/${uncWindowsPathMatch[1] ? `.dot/` : ``}${uncWindowsPathMatch[2]}`; p = `/unc/${uncWindowsPathMatch[1] ? `.dot/` : ``}${uncWindowsPathMatch[2]}`;
return p; return p;
} }
const toPortablePath = process.platform === `win32` ? toPortablePathWin32 : (p) => p;
const fromPortablePath = process.platform === `win32` ? fromPortablePathWin32 : (p) => p;
npath.fromPortablePath = fromPortablePath;
npath.toPortablePath = toPortablePath;
function convertPath(targetPathUtils, sourcePath) { function convertPath(targetPathUtils, sourcePath) {
return targetPathUtils === npath ? fromPortablePath(sourcePath) : toPortablePath(sourcePath); return targetPathUtils === npath ? fromPortablePath(sourcePath) : toPortablePath(sourcePath);
} }
@@ -89,6 +88,7 @@ async function copyPromise(destinationFs, destination, sourceFs, source, opts) {
})); }));
} }
async function copyImpl(prelayout, postlayout, destinationFs, destination, sourceFs, source, opts) { async function copyImpl(prelayout, postlayout, destinationFs, destination, sourceFs, source, opts) {
var _a, _b, _c;
const destinationStat = opts.didParentExist ? await maybeLStat(destinationFs, destination) : null; const destinationStat = opts.didParentExist ? await maybeLStat(destinationFs, destination) : null;
const sourceStat = await sourceFs.lstatPromise(source); const sourceStat = await sourceFs.lstatPromise(source);
const { atime, mtime } = opts.stableTime ? { atime: defaultTime, mtime: defaultTime } : sourceStat; const { atime, mtime } = opts.stableTime ? { atime: defaultTime, mtime: defaultTime } : sourceStat;
@@ -114,8 +114,8 @@ async function copyImpl(prelayout, postlayout, destinationFs, destination, sourc
throw new Error(`Unsupported file type (${sourceStat.mode})`); throw new Error(`Unsupported file type (${sourceStat.mode})`);
} }
} }
if (opts.linkStrategy?.type !== `HardlinkFromIndex` || !sourceStat.isFile()) { if (((_a = opts.linkStrategy) == null ? void 0 : _a.type) !== `HardlinkFromIndex` || !sourceStat.isFile()) {
if (updated || destinationStat?.mtime?.getTime() !== mtime.getTime() || destinationStat?.atime?.getTime() !== atime.getTime()) { if (updated || ((_b = destinationStat == null ? void 0 : destinationStat.mtime) == null ? void 0 : _b.getTime()) !== mtime.getTime() || ((_c = destinationStat == null ? void 0 : destinationStat.atime) == null ? void 0 : _c.getTime()) !== atime.getTime()) {
postlayout.push(() => destinationFs.lutimesPromise(destination, atime, mtime)); postlayout.push(() => destinationFs.lutimesPromise(destination, atime, mtime));
updated = true; updated = true;
} }
@@ -185,7 +185,7 @@ async function copyFileViaIndex(prelayout, postlayout, destinationFs, destinatio
let indexStat = await maybeLStat(destinationFs, indexPath); let indexStat = await maybeLStat(destinationFs, indexPath);
if (destinationStat) { if (destinationStat) {
const isDestinationHardlinkedFromIndex = indexStat && destinationStat.dev === indexStat.dev && destinationStat.ino === indexStat.ino; const isDestinationHardlinkedFromIndex = indexStat && destinationStat.dev === indexStat.dev && destinationStat.ino === indexStat.ino;
const isIndexModified = indexStat?.mtimeMs !== defaultTimeMs; const isIndexModified = (indexStat == null ? void 0 : indexStat.mtimeMs) !== defaultTimeMs;
if (isDestinationHardlinkedFromIndex) { if (isDestinationHardlinkedFromIndex) {
if (isIndexModified && linkStrategy.autoRepair) { if (isIndexModified && linkStrategy.autoRepair) {
atomicBehavior = 0 /* Lock */; atomicBehavior = 0 /* Lock */;
@@ -255,7 +255,8 @@ async function copyFileDirect(prelayout, postlayout, destinationFs, destination,
return true; return true;
} }
async function copyFile(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts) { async function copyFile(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts) {
if (opts.linkStrategy?.type === `HardlinkFromIndex`) { var _a;
if (((_a = opts.linkStrategy) == null ? void 0 : _a.type) === `HardlinkFromIndex`) {
return copyFileViaIndex(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts, opts.linkStrategy); return copyFileViaIndex(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts, opts.linkStrategy);
} else { } else {
return copyFileDirect(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts); return copyFileDirect(prelayout, postlayout, destinationFs, destination, destinationStat, sourceFs, source, sourceStat, opts);
@@ -385,7 +386,7 @@ class FakeFS {
throw error; throw error;
} }
} }
createdDirectory ??= subPath; createdDirectory ?? (createdDirectory = subPath);
if (chmod != null) if (chmod != null)
await this.chmodPromise(subPath, chmod); await this.chmodPromise(subPath, chmod);
if (utimes != null) { if (utimes != null) {
@@ -416,7 +417,7 @@ class FakeFS {
throw error; throw error;
} }
} }
createdDirectory ??= subPath; createdDirectory ?? (createdDirectory = subPath);
if (chmod != null) if (chmod != null)
this.chmodSync(subPath, chmod); this.chmodSync(subPath, chmod);
if (utimes != null) { if (utimes != null) {
@@ -611,14 +612,12 @@ class FakeFS {
throw error; throw error;
} }
} }
async writeJsonPromise(p, data, { compact = false } = {}) { async writeJsonPromise(p, data) {
const space = compact ? 0 : 2; return await this.writeFilePromise(p, `${JSON.stringify(data, null, 2)}
return await this.writeFilePromise(p, `${JSON.stringify(data, null, space)}
`); `);
} }
writeJsonSync(p, data, { compact = false } = {}) { writeJsonSync(p, data) {
const space = compact ? 0 : 2; return this.writeFileSync(p, `${JSON.stringify(data, null, 2)}
return this.writeFileSync(p, `${JSON.stringify(data, null, space)}
`); `);
} }
async preserveTimePromise(p, cb) { async preserveTimePromise(p, cb) {
@@ -852,7 +851,7 @@ class ProxiedFS extends FakeFS {
readFileSync(p, encoding) { readFileSync(p, encoding) {
return this.baseFs.readFileSync(this.fsMapToBase(p), encoding); return this.baseFs.readFileSync(this.fsMapToBase(p), encoding);
} }
readdirPromise(p, opts) { async readdirPromise(p, opts) {
return this.baseFs.readdirPromise(this.mapToBase(p), opts); return this.baseFs.readdirPromise(this.mapToBase(p), opts);
} }
readdirSync(p, opts) { readdirSync(p, opts) {
@@ -902,12 +901,6 @@ class ProxiedFS extends FakeFS {
} }
} }
function direntToPortable(dirent) {
const portableDirent = dirent;
if (typeof dirent.path === `string`)
portableDirent.path = npath.toPortablePath(dirent.path);
return portableDirent;
}
class NodeFS extends BasePortableFakeFS { class NodeFS extends BasePortableFakeFS {
constructor(realFs = fs) { constructor(realFs = fs) {
super(); super();
@@ -938,24 +931,12 @@ class NodeFS extends BasePortableFakeFS {
this.realFs.opendir(npath.fromPortablePath(p), this.makeCallback(resolve, reject)); this.realFs.opendir(npath.fromPortablePath(p), this.makeCallback(resolve, reject));
} }
}).then((dir) => { }).then((dir) => {
const dirWithFixedPath = dir; return Object.defineProperty(dir, `path`, { value: p, configurable: true, writable: true });
Object.defineProperty(dirWithFixedPath, `path`, {
value: p,
configurable: true,
writable: true
});
return dirWithFixedPath;
}); });
} }
opendirSync(p, opts) { opendirSync(p, opts) {
const dir = typeof opts !== `undefined` ? this.realFs.opendirSync(npath.fromPortablePath(p), opts) : this.realFs.opendirSync(npath.fromPortablePath(p)); const dir = typeof opts !== `undefined` ? this.realFs.opendirSync(npath.fromPortablePath(p), opts) : this.realFs.opendirSync(npath.fromPortablePath(p));
const dirWithFixedPath = dir; return Object.defineProperty(dir, `path`, { value: p, configurable: true, writable: true });
Object.defineProperty(dirWithFixedPath, `path`, {
value: p,
configurable: true,
writable: true
});
return dirWithFixedPath;
} }
async readPromise(fd, buffer, offset = 0, length = 0, position = -1) { async readPromise(fd, buffer, offset = 0, length = 0, position = -1) {
return await new Promise((resolve, reject) => { return await new Promise((resolve, reject) => {
@@ -1233,32 +1214,16 @@ class NodeFS extends BasePortableFakeFS {
} }
async readdirPromise(p, opts) { async readdirPromise(p, opts) {
return await new Promise((resolve, reject) => { return await new Promise((resolve, reject) => {
if (opts) { if (opts == null ? void 0 : opts.withFileTypes) {
if (opts.recursive && process.platform === `win32`) { this.realFs.readdir(npath.fromPortablePath(p), { withFileTypes: true }, this.makeCallback(resolve, reject));
if (opts.withFileTypes) {
this.realFs.readdir(npath.fromPortablePath(p), opts, this.makeCallback((results) => resolve(results.map(direntToPortable)), reject));
} else {
this.realFs.readdir(npath.fromPortablePath(p), opts, this.makeCallback((results) => resolve(results.map(npath.toPortablePath)), reject));
}
} else {
this.realFs.readdir(npath.fromPortablePath(p), opts, this.makeCallback(resolve, reject));
}
} else { } else {
this.realFs.readdir(npath.fromPortablePath(p), this.makeCallback(resolve, reject)); this.realFs.readdir(npath.fromPortablePath(p), this.makeCallback((value) => resolve(value), reject));
} }
}); });
} }
readdirSync(p, opts) { readdirSync(p, opts) {
if (opts) { if (opts == null ? void 0 : opts.withFileTypes) {
if (opts.recursive && process.platform === `win32`) { return this.realFs.readdirSync(npath.fromPortablePath(p), { withFileTypes: true });
if (opts.withFileTypes) {
return this.realFs.readdirSync(npath.fromPortablePath(p), opts).map(direntToPortable);
} else {
return this.realFs.readdirSync(npath.fromPortablePath(p), opts).map(npath.toPortablePath);
}
} else {
return this.realFs.readdirSync(npath.fromPortablePath(p), opts);
}
} else { } else {
return this.realFs.readdirSync(npath.fromPortablePath(p)); return this.realFs.readdirSync(npath.fromPortablePath(p));
} }
@@ -1392,10 +1357,8 @@ class VirtualFS extends ProxiedFS {
} }
} }
const [major, minor] = process.versions.node.split(`.`).map((value) => parseInt(value, 10)); const builtinModules = new Set(Module.builtinModules || Object.keys(process.binding(`natives`)));
const WATCH_MODE_MESSAGE_USES_ARRAYS = major > 19 || major === 19 && minor >= 2 || major === 18 && minor >= 13; const isBuiltinModule = (request) => request.startsWith(`node:`) || builtinModules.has(request);
const HAS_LAZY_LOADED_TRANSLATORS = major === 20 && minor < 6 || major === 19 && minor >= 3;
function readPackageScope(checkPath) { function readPackageScope(checkPath) {
const rootSeparatorIndex = checkPath.indexOf(npath.sep); const rootSeparatorIndex = checkPath.indexOf(npath.sep);
let separatorIndex; let separatorIndex;
@@ -1421,6 +1384,11 @@ function readPackage(requestPath) {
return JSON.parse(fs.readFileSync(jsonPath, `utf8`)); return JSON.parse(fs.readFileSync(jsonPath, `utf8`));
} }
const [major, minor] = process.versions.node.split(`.`).map((value) => parseInt(value, 10));
const HAS_CONSOLIDATED_HOOKS = major > 16 || major === 16 && minor >= 12;
const HAS_UNFLAGGED_JSON_MODULES = major > 17 || major === 17 && minor >= 5 || major === 16 && minor >= 15;
const HAS_JSON_IMPORT_ASSERTION_REQUIREMENT = major > 17 || major === 17 && minor >= 1 || major === 16 && minor > 14;
async function tryReadFile$1(path2) { async function tryReadFile$1(path2) {
try { try {
return await fs.promises.readFile(path2, `utf8`); return await fs.promises.readFile(path2, `utf8`);
@@ -1456,7 +1424,11 @@ function getFileFormat(filepath) {
); );
} }
case `.json`: { case `.json`: {
return `json`; if (HAS_UNFLAGGED_JSON_MODULES)
return `json`;
throw new Error(
`Unknown file extension ".json" for ${filepath}`
);
} }
case `.js`: { case `.js`: {
const pkg = readPackageScope(filepath); const pkg = readPackageScope(filepath);
@@ -1477,32 +1449,54 @@ function getFileFormat(filepath) {
} }
} }
async function load$1(urlString, context, nextLoad) { async function getFormat$1(resolved, context, defaultGetFormat) {
const url = tryParseURL(resolved);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetFormat(resolved, context, defaultGetFormat);
const format = getFileFormat(fileURLToPath(url));
if (format) {
return {
format
};
}
return defaultGetFormat(resolved, context, defaultGetFormat);
}
async function getSource$1(urlString, context, defaultGetSource) {
const url = tryParseURL(urlString); const url = tryParseURL(urlString);
if (url?.protocol !== `file:`) if ((url == null ? void 0 : url.protocol) !== `file:`)
return defaultGetSource(urlString, context, defaultGetSource);
return {
source: await fs.promises.readFile(fileURLToPath(url), `utf8`)
};
}
async function load$1(urlString, context, nextLoad) {
var _a;
const url = tryParseURL(urlString);
if ((url == null ? void 0 : url.protocol) !== `file:`)
return nextLoad(urlString, context, nextLoad); return nextLoad(urlString, context, nextLoad);
const filePath = fileURLToPath(url); const filePath = fileURLToPath(url);
const format = getFileFormat(filePath); const format = getFileFormat(filePath);
if (!format) if (!format)
return nextLoad(urlString, context, nextLoad); return nextLoad(urlString, context, nextLoad);
if (format === `json` && context.importAssertions?.type !== `json`) { if (HAS_JSON_IMPORT_ASSERTION_REQUIREMENT && format === `json` && ((_a = context.importAssertions) == null ? void 0 : _a.type) !== `json`) {
const err = new TypeError(`[ERR_IMPORT_ASSERTION_TYPE_MISSING]: Module "${urlString}" needs an import assertion of type "json"`); const err = new TypeError(`[ERR_IMPORT_ASSERTION_TYPE_MISSING]: Module "${urlString}" needs an import assertion of type "json"`);
err.code = `ERR_IMPORT_ASSERTION_TYPE_MISSING`; err.code = `ERR_IMPORT_ASSERTION_TYPE_MISSING`;
throw err; throw err;
} }
if (process.env.WATCH_REPORT_DEPENDENCIES && process.send) { if (process.env.WATCH_REPORT_DEPENDENCIES && process.send) {
const pathToSend = pathToFileURL(
npath.fromPortablePath(
VirtualFS.resolveVirtual(npath.toPortablePath(filePath))
)
).href;
process.send({ process.send({
"watch:import": WATCH_MODE_MESSAGE_USES_ARRAYS ? [pathToSend] : pathToSend "watch:import": pathToFileURL(
npath.fromPortablePath(
VirtualFS.resolveVirtual(npath.toPortablePath(filePath))
)
).href
}); });
} }
return { return {
format, format,
source: format === `commonjs` ? void 0 : await fs.promises.readFile(filePath, `utf8`), source: await fs.promises.readFile(filePath, `utf8`),
shortCircuit: true shortCircuit: true
}; };
} }
@@ -1886,7 +1880,12 @@ function patternKeyCompare(a, b) {
return 1; return 1;
return 0; return 0;
} }
function packageImportsResolve({ name, base, conditions, readFileSyncFn }) { function packageImportsResolve({
name,
base,
conditions,
readFileSyncFn
}) {
if (name === "#" || StringPrototypeStartsWith(name, "#/") || StringPrototypeEndsWith(name, "/")) { if (name === "#" || StringPrototypeStartsWith(name, "#/") || StringPrototypeEndsWith(name, "/")) {
const reason = "is not a valid internal imports specifier name"; const reason = "is not a valid internal imports specifier name";
throw new ERR_INVALID_MODULE_SPECIFIER(name, reason, fileURLToPath(base)); throw new ERR_INVALID_MODULE_SPECIFIER(name, reason, fileURLToPath(base));
@@ -1983,7 +1982,7 @@ async function resolvePrivateRequest(specifier, issuer, context, nextResolve) {
} }
async function resolve$1(originalSpecifier, context, nextResolve) { async function resolve$1(originalSpecifier, context, nextResolve) {
const { findPnpApi } = moduleExports; const { findPnpApi } = moduleExports;
if (!findPnpApi || isBuiltin(originalSpecifier)) if (!findPnpApi || isBuiltinModule(originalSpecifier))
return nextResolve(originalSpecifier, context, nextResolve); return nextResolve(originalSpecifier, context, nextResolve);
let specifier = originalSpecifier; let specifier = originalSpecifier;
const url = tryParseURL(specifier, isRelativeRegexp.test(specifier) ? context.parentURL : void 0); const url = tryParseURL(specifier, isRelativeRegexp.test(specifier) ? context.parentURL : void 0);
@@ -1993,7 +1992,7 @@ async function resolve$1(originalSpecifier, context, nextResolve) {
specifier = fileURLToPath(url); specifier = fileURLToPath(url);
} }
const { parentURL, conditions = [] } = context; const { parentURL, conditions = [] } = context;
const issuer = parentURL && tryParseURL(parentURL)?.protocol === `file:` ? fileURLToPath(parentURL) : process.cwd(); const issuer = parentURL ? fileURLToPath(parentURL) : process.cwd();
const pnpapi = findPnpApi(issuer) ?? (url ? findPnpApi(specifier) : null); const pnpapi = findPnpApi(issuer) ?? (url ? findPnpApi(specifier) : null);
if (!pnpapi) if (!pnpapi)
return nextResolve(originalSpecifier, context, nextResolve); return nextResolve(originalSpecifier, context, nextResolve);
@@ -2003,7 +2002,7 @@ async function resolve$1(originalSpecifier, context, nextResolve) {
let allowLegacyResolve = false; let allowLegacyResolve = false;
if (dependencyNameMatch) { if (dependencyNameMatch) {
const [, dependencyName, subPath] = dependencyNameMatch; const [, dependencyName, subPath] = dependencyNameMatch;
if (subPath === `` && dependencyName !== `pnpapi`) { if (subPath === ``) {
const resolved = pnpapi.resolveToUnqualified(`${dependencyName}/package.json`, issuer); const resolved = pnpapi.resolveToUnqualified(`${dependencyName}/package.json`, issuer);
if (resolved) { if (resolved) {
const content = await tryReadFile$1(resolved); const content = await tryReadFile$1(resolved);
@@ -2014,17 +2013,10 @@ async function resolve$1(originalSpecifier, context, nextResolve) {
} }
} }
} }
let result; const result = pnpapi.resolveRequest(specifier, issuer, {
try { conditions: new Set(conditions),
result = pnpapi.resolveRequest(specifier, issuer, { extensions: allowLegacyResolve ? void 0 : []
conditions: new Set(conditions), });
extensions: allowLegacyResolve ? void 0 : []
});
} catch (err) {
if (err instanceof Error && `code` in err && err.code === `MODULE_NOT_FOUND`)
err.code = `ERR_MODULE_NOT_FOUND`;
throw err;
}
if (!result) if (!result)
throw new Error(`Resolving '${specifier}' from '${issuer}' failed`); throw new Error(`Resolving '${specifier}' from '${issuer}' failed`);
const resultURL = pathToFileURL(result); const resultURL = pathToFileURL(result);
@@ -2040,51 +2032,36 @@ async function resolve$1(originalSpecifier, context, nextResolve) {
}; };
} }
if (!HAS_LAZY_LOADED_TRANSLATORS) { const binding = process.binding(`fs`);
const binding = process.binding(`fs`); const originalfstat = binding.fstat;
const originalReadFile = binding.readFileUtf8 || binding.readFileSync; const ZIP_MASK = 4278190080;
if (originalReadFile) { const ZIP_MAGIC = 704643072;
binding[originalReadFile.name] = function(...args) { binding.fstat = function(...args) {
try { const [fd, useBigint, req] = args;
return fs.readFileSync(args[0], { if ((fd & ZIP_MASK) === ZIP_MAGIC && useBigint === false && req === void 0) {
encoding: `utf8`, try {
flag: args[1] const stats = fs.fstatSync(fd);
}); return new Float64Array([
} catch { stats.dev,
} stats.mode,
return originalReadFile.apply(this, args); stats.nlink,
}; stats.uid,
} else { stats.gid,
const binding2 = process.binding(`fs`); stats.rdev,
const originalfstat = binding2.fstat; stats.blksize,
const ZIP_MASK = 4278190080; stats.ino,
const ZIP_MAGIC = 704643072; stats.size,
binding2.fstat = function(...args) { stats.blocks
const [fd, useBigint, req] = args; ]);
if ((fd & ZIP_MASK) === ZIP_MAGIC && useBigint === false && req === void 0) { } catch {
try { }
const stats = fs.fstatSync(fd);
return new Float64Array([
stats.dev,
stats.mode,
stats.nlink,
stats.uid,
stats.gid,
stats.rdev,
stats.blksize,
stats.ino,
stats.size,
stats.blocks
]);
} catch {
}
}
return originalfstat.apply(this, args);
};
} }
} return originalfstat.apply(this, args);
};
const resolve = resolve$1; const resolve = resolve$1;
const load = load$1; const getFormat = HAS_CONSOLIDATED_HOOKS ? void 0 : getFormat$1;
const getSource = HAS_CONSOLIDATED_HOOKS ? void 0 : getSource$1;
const load = HAS_CONSOLIDATED_HOOKS ? load$1 : void 0;
export { load, resolve }; export { getFormat, getSource, load, resolve };

View File

@@ -6,6 +6,5 @@
"eslint.nodePath": ".yarn/sdks", "eslint.nodePath": ".yarn/sdks",
"prettier.prettierPath": ".yarn/sdks/prettier/index.js", "prettier.prettierPath": ".yarn/sdks/prettier/index.js",
"typescript.tsdk": ".yarn/sdks/typescript/lib", "typescript.tsdk": ".yarn/sdks/typescript/lib",
"typescript.enablePromptUseWorkspaceTsdk": true, "typescript.enablePromptUseWorkspaceTsdk": true
"terraform.languageServer.enable": false
} }

Some files were not shown because too many files have changed in this diff Show More