Compare commits

..

15 Commits

Author SHA1 Message Date
standardci
23b8cdc4a1 chore(release): publish new version
- @standardnotes/home-server@1.15.6
 - @standardnotes/revisions-server@1.27.0
 - @standardnotes/syncing-server@1.86.0
2023-08-29 10:54:17 +00:00
Karol Sójko
2646b756a9 feat(revisions): add MongoDB support (#715)
* feat(revisions): add MongoDB support

* fix: add missing mongodb from revisions

* fix: mongodb bson imports
2023-08-29 12:19:55 +02:00
Karol Sójko
28e058c6e8 fix: remove redundant vaults enabled flag 2023-08-29 08:28:40 +02:00
standardci
8dea171115 chore(release): publish new version
- @standardnotes/api-gateway@1.72.1
 - @standardnotes/auth-server@1.135.2
 - @standardnotes/files-server@1.22.2
 - @standardnotes/home-server@1.15.5
 - @standardnotes/revisions-server@1.26.12
 - @standardnotes/syncing-server@1.85.1
2023-08-28 14:09:12 +00:00
Karol Sójko
aef9254713 fix: allow self hosted to use new model of items (#714)
* fix: allow self hosted to use new model of items

* fix: env sample

* fix: binding
2023-08-28 15:34:07 +02:00
Karol Sójko
31b7396006 fix: enable vault tests for all suites (#713)
* fix: enable vault tests for all suites

* fix: rename test suite
2023-08-28 14:29:01 +02:00
standardci
be0a2649da chore(release): publish new version
- @standardnotes/home-server@1.15.4
 - @standardnotes/syncing-server@1.85.0
2023-08-28 12:17:41 +00:00
Karol Sójko
bf8f91f83d feat(syncing-server): distinguish between legacy and current items model usage (#712)
* feat(syncing-server): turn mysql items model into legacy

* fix: rename MySQL model to SQL model to include SQLite option

* feat(syncing-server): distinguish between legacy and current items model usage
2023-08-28 13:48:27 +02:00
Karol Sójko
effdfebc19 feat(syncing-server): turn mysql items model into legacy (#711)
* feat(syncing-server): turn mysql items model into legacy

* fix: rename MySQL model to SQL model to include SQLite option

* fix: rename mysqlitem to sqlitem
2023-08-28 12:28:48 +02:00
standardci
f4816e6c9a chore(release): publish new version
- @standardnotes/auth-server@1.135.1
 - @standardnotes/home-server@1.15.3
 - @standardnotes/syncing-server@1.84.2
2023-08-25 13:41:14 +00:00
Karol Sójko
152a5cbd27 fix(syncing-server): items sorting in MongoDB (#710) 2023-08-25 15:01:17 +02:00
Karol Sójko
1488763115 fix(syncing-server): logs severity on creating duplicates 2023-08-25 12:24:43 +02:00
Karol Sójko
bbb35d16fc fix(auth): account enumeration with pseudo u2f and mfa (#709) 2023-08-25 12:05:16 +02:00
standardci
ef07045ee9 chore(release): publish new version
- @standardnotes/home-server@1.15.2
 - @standardnotes/syncing-server@1.84.1
2023-08-25 09:16:28 +00:00
Karol Sójko
3ba673b424 fix(syncing-server): handling mixed values of deleted flag in MongoDB (#708) 2023-08-25 10:45:14 +02:00
83 changed files with 1504 additions and 287 deletions

View File

@@ -19,7 +19,7 @@ on:
jobs:
e2e:
name: (Docker) E2E Test Suite
name: (Self Hosting) E2E Test Suite
strategy:
fail-fast: false
matrix:
@@ -56,17 +56,8 @@ jobs:
- name: Wait for server to start
run: docker/is-available.sh http://localhost:3123 $(pwd)/logs
- name: Define if vault tests are enabled
id: vaults
run: |
if [ "${{ matrix.secondary_db_enabled }}" = "true" ] && [ "${{ matrix.transition_mode_enabled }}" = "true" ]; then
echo "vault-tests=enabled" >> $GITHUB_OUTPUT
else
echo "vault-tests=disabled" >> $GITHUB_OUTPUT
fi
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 1800000 -f http://localhost:9001/mocha/test.html?vaults=${{ steps.vaults.outputs.vault-tests }}
run: yarn dlx mocha-headless-chrome --timeout 1800000 -f http://localhost:9001/mocha/test.html
- name: Show logs on failure
if: ${{ failure() }}
@@ -170,17 +161,8 @@ jobs:
- name: Wait for server to start
run: for i in {1..30}; do curl -s http://localhost:3123/healthcheck && break || sleep 1; done
- name: Define if vault tests are enabled
id: vaults
run: |
if [ "${{ matrix.secondary_db_enabled }}" = "true" ] && [ "${{ matrix.transition_mode_enabled }}" = "true" ]; then
echo "vault-tests=enabled" >> $GITHUB_OUTPUT
else
echo "vault-tests=disabled" >> $GITHUB_OUTPUT
fi
- name: Run E2E Test Suite
run: yarn dlx mocha-headless-chrome --timeout 1800000 -f http://localhost:9001/mocha/test.html?vaults=${{ steps.vaults.outputs.vault-tests }}
run: yarn dlx mocha-headless-chrome --timeout 1800000 -f http://localhost:9001/mocha/test.html
- name: Show logs on failure
if: ${{ failure() }}

152
.pnp.cjs generated
View File

@@ -3602,6 +3602,16 @@ const RAW_RUNTIME_STATE =
"linkType": "HARD"\
}]\
]],\
["@mongodb-js/saslprep", [\
["npm:1.1.0", {\
"packageLocation": "./.yarn/cache/@mongodb-js-saslprep-npm-1.1.0-3906c025b8-2cf6d124d4.zip/node_modules/@mongodb-js/saslprep/",\
"packageDependencies": [\
["@mongodb-js/saslprep", "npm:1.1.0"],\
["sparse-bitfield", "npm:3.0.3"]\
],\
"linkType": "HARD"\
}]\
]],\
["@mrleebo/prisma-ast", [\
["npm:0.5.2", {\
"packageLocation": "./.yarn/cache/@mrleebo-prisma-ast-npm-0.5.2-538c9d793e-69a7f3c188.zip/node_modules/@mrleebo/prisma-ast/",\
@@ -4997,6 +5007,7 @@ const RAW_RUNTIME_STATE =
["inversify", "npm:6.0.1"],\
["inversify-express-utils", "npm:6.4.3"],\
["jest", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:29.5.0"],\
["mongodb", "virtual:365b8c88cdf194291829ee28b79556e2328175d26a621363e703848100bea0042e9500db2a1206c9bbc3a4a76a1d169639ef774b2ea3a1a98584a9936b58c6be#npm:6.0.0"],\
["mysql2", "npm:3.3.3"],\
["newrelic", "npm:10.1.2"],\
["npm-check-updates", "npm:16.10.12"],\
@@ -5191,7 +5202,7 @@ const RAW_RUNTIME_STATE =
["inversify-express-utils", "npm:6.4.3"],\
["jest", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:29.5.0"],\
["jsonwebtoken", "npm:9.0.0"],\
["mongodb", "virtual:67ad3a1ca34e24ce4821cc48979e98af0c3e5dd7aabc7ad0b5d22d1d977d6f943f81c9f141a420105ebdc61ef777e508a96c7946081decd98f8c30543d468b33#npm:5.7.0"],\
["mongodb", "virtual:365b8c88cdf194291829ee28b79556e2328175d26a621363e703848100bea0042e9500db2a1206c9bbc3a4a76a1d169639ef774b2ea3a1a98584a9936b58c6be#npm:6.0.0"],\
["mysql2", "npm:3.3.3"],\
["newrelic", "npm:10.1.2"],\
["nodemon", "npm:2.0.22"],\
@@ -5202,7 +5213,7 @@ const RAW_RUNTIME_STATE =
["semver", "npm:7.5.1"],\
["sqlite3", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:5.1.6"],\
["ts-jest", "virtual:fd909b174d079e30b336c4ce72c38a88c1e447767b1a8dd7655e07719a1e31b97807f0931368724fc78897ff15e6a6d00b83316c0f76d11f85111f342e08bb79#npm:29.1.0"],\
["typeorm", "virtual:67ad3a1ca34e24ce4821cc48979e98af0c3e5dd7aabc7ad0b5d22d1d977d6f943f81c9f141a420105ebdc61ef777e508a96c7946081decd98f8c30543d468b33#npm:0.3.16"],\
["typeorm", "virtual:365b8c88cdf194291829ee28b79556e2328175d26a621363e703848100bea0042e9500db2a1206c9bbc3a4a76a1d169639ef774b2ea3a1a98584a9936b58c6be#npm:0.3.16"],\
["typescript", "patch:typescript@npm%3A5.0.4#optional!builtin<compat/typescript>::version=5.0.4&hash=b5f058"],\
["ua-parser-js", "npm:1.0.35"],\
["uuid", "npm:9.0.0"],\
@@ -7096,10 +7107,10 @@ const RAW_RUNTIME_STATE =
}]\
]],\
["bson", [\
["npm:5.4.0", {\
"packageLocation": "./.yarn/cache/bson-npm-5.4.0-2f854c8216-2c913a45c0.zip/node_modules/bson/",\
["npm:6.0.0", {\
"packageLocation": "./.yarn/cache/bson-npm-6.0.0-7b3cba060e-7290998ee8.zip/node_modules/bson/",\
"packageDependencies": [\
["bson", "npm:5.4.0"]\
["bson", "npm:6.0.0"]\
],\
"linkType": "HARD"\
}]\
@@ -12330,43 +12341,50 @@ const RAW_RUNTIME_STATE =
}]\
]],\
["mongodb", [\
["npm:5.7.0", {\
"packageLocation": "./.yarn/cache/mongodb-npm-5.7.0-c5e415a2e7-23a291ffe7.zip/node_modules/mongodb/",\
["npm:6.0.0", {\
"packageLocation": "./.yarn/cache/mongodb-npm-6.0.0-7c1e74de91-daec6dc9dc.zip/node_modules/mongodb/",\
"packageDependencies": [\
["mongodb", "npm:5.7.0"]\
["mongodb", "npm:6.0.0"]\
],\
"linkType": "SOFT"\
}],\
["virtual:67ad3a1ca34e24ce4821cc48979e98af0c3e5dd7aabc7ad0b5d22d1d977d6f943f81c9f141a420105ebdc61ef777e508a96c7946081decd98f8c30543d468b33#npm:5.7.0", {\
"packageLocation": "./.yarn/__virtual__/mongodb-virtual-eb0cd47e23/0/cache/mongodb-npm-5.7.0-c5e415a2e7-23a291ffe7.zip/node_modules/mongodb/",\
["virtual:365b8c88cdf194291829ee28b79556e2328175d26a621363e703848100bea0042e9500db2a1206c9bbc3a4a76a1d169639ef774b2ea3a1a98584a9936b58c6be#npm:6.0.0", {\
"packageLocation": "./.yarn/__virtual__/mongodb-virtual-789f2eaaac/0/cache/mongodb-npm-6.0.0-7c1e74de91-daec6dc9dc.zip/node_modules/mongodb/",\
"packageDependencies": [\
["mongodb", "virtual:67ad3a1ca34e24ce4821cc48979e98af0c3e5dd7aabc7ad0b5d22d1d977d6f943f81c9f141a420105ebdc61ef777e508a96c7946081decd98f8c30543d468b33#npm:5.7.0"],\
["mongodb", "virtual:365b8c88cdf194291829ee28b79556e2328175d26a621363e703848100bea0042e9500db2a1206c9bbc3a4a76a1d169639ef774b2ea3a1a98584a9936b58c6be#npm:6.0.0"],\
["@aws-sdk/credential-providers", null],\
["@mongodb-js/saslprep", "npm:1.1.0"],\
["@mongodb-js/zstd", null],\
["@types/aws-sdk__credential-providers", null],\
["@types/gcp-metadata", null],\
["@types/kerberos", null],\
["@types/mongodb-client-encryption", null],\
["@types/mongodb-js__zstd", null],\
["@types/snappy", null],\
["bson", "npm:5.4.0"],\
["@types/socks", null],\
["bson", "npm:6.0.0"],\
["gcp-metadata", null],\
["kerberos", null],\
["mongodb-client-encryption", null],\
["mongodb-connection-string-url", "npm:2.6.0"],\
["saslprep", "npm:1.0.3"],\
["snappy", null],\
["socks", "npm:2.7.1"]\
["socks", null]\
],\
"packagePeers": [\
"@aws-sdk/credential-providers",\
"@mongodb-js/zstd",\
"@types/aws-sdk__credential-providers",\
"@types/gcp-metadata",\
"@types/kerberos",\
"@types/mongodb-client-encryption",\
"@types/mongodb-js__zstd",\
"@types/snappy",\
"@types/socks",\
"gcp-metadata",\
"kerberos",\
"mongodb-client-encryption",\
"snappy"\
"snappy",\
"socks"\
],\
"linkType": "HARD"\
}]\
@@ -14341,16 +14359,6 @@ const RAW_RUNTIME_STATE =
"linkType": "HARD"\
}]\
]],\
["saslprep", [\
["npm:1.0.3", {\
"packageLocation": "./.yarn/cache/saslprep-npm-1.0.3-8db649c346-23ebcda091.zip/node_modules/saslprep/",\
"packageDependencies": [\
["saslprep", "npm:1.0.3"],\
["sparse-bitfield", "npm:3.0.3"]\
],\
"linkType": "HARD"\
}]\
]],\
["schema-utils", [\
["npm:3.1.2", {\
"packageLocation": "./.yarn/cache/schema-utils-npm-3.1.2-d97c6dc247-11d35f997e.zip/node_modules/schema-utils/",\
@@ -15821,99 +15829,7 @@ const RAW_RUNTIME_STATE =
["hdb-pool", null],\
["ioredis", null],\
["mkdirp", "npm:2.1.6"],\
["mongodb", null],\
["mssql", null],\
["mysql2", "npm:3.3.3"],\
["oracledb", null],\
["pg", null],\
["pg-native", null],\
["pg-query-stream", null],\
["redis", null],\
["reflect-metadata", "npm:0.1.13"],\
["sha.js", "npm:2.4.11"],\
["sql.js", null],\
["sqlite3", "virtual:31b5a94a105c89c9294c3d524a7f8929fe63ee5a2efadf21951ca4c0cfd2ecf02e8f4ef5a066bbda091f1e3a56e57c6749069a080618c96b22e51131a330fc4a#npm:5.1.6"],\
["ts-node", null],\
["tslib", "npm:2.5.2"],\
["typeorm-aurora-data-api-driver", null],\
["uuid", "npm:9.0.0"],\
["yargs", "npm:17.7.2"]\
],\
"packagePeers": [\
"@google-cloud/spanner",\
"@sap/hana-client",\
"@types/better-sqlite3",\
"@types/google-cloud__spanner",\
"@types/hdb-pool",\
"@types/ioredis",\
"@types/mongodb",\
"@types/mssql",\
"@types/mysql2",\
"@types/oracledb",\
"@types/pg-native",\
"@types/pg-query-stream",\
"@types/pg",\
"@types/redis",\
"@types/sap__hana-client",\
"@types/sql.js",\
"@types/sqlite3",\
"@types/ts-node",\
"@types/typeorm-aurora-data-api-driver",\
"better-sqlite3",\
"hdb-pool",\
"ioredis",\
"mongodb",\
"mssql",\
"mysql2",\
"oracledb",\
"pg-native",\
"pg-query-stream",\
"pg",\
"redis",\
"sql.js",\
"sqlite3",\
"ts-node",\
"typeorm-aurora-data-api-driver"\
],\
"linkType": "HARD"\
}],\
["virtual:67ad3a1ca34e24ce4821cc48979e98af0c3e5dd7aabc7ad0b5d22d1d977d6f943f81c9f141a420105ebdc61ef777e508a96c7946081decd98f8c30543d468b33#npm:0.3.16", {\
"packageLocation": "./.yarn/__virtual__/typeorm-virtual-13b6364fde/0/cache/typeorm-npm-0.3.16-5ac12a7afc-19803f935e.zip/node_modules/typeorm/",\
"packageDependencies": [\
["typeorm", "virtual:67ad3a1ca34e24ce4821cc48979e98af0c3e5dd7aabc7ad0b5d22d1d977d6f943f81c9f141a420105ebdc61ef777e508a96c7946081decd98f8c30543d468b33#npm:0.3.16"],\
["@google-cloud/spanner", null],\
["@sap/hana-client", null],\
["@sqltools/formatter", "npm:1.2.5"],\
["@types/better-sqlite3", null],\
["@types/google-cloud__spanner", null],\
["@types/hdb-pool", null],\
["@types/ioredis", null],\
["@types/mongodb", null],\
["@types/mssql", null],\
["@types/mysql2", null],\
["@types/oracledb", null],\
["@types/pg", null],\
["@types/pg-native", null],\
["@types/pg-query-stream", null],\
["@types/redis", null],\
["@types/sap__hana-client", null],\
["@types/sql.js", null],\
["@types/sqlite3", null],\
["@types/ts-node", null],\
["@types/typeorm-aurora-data-api-driver", null],\
["app-root-path", "npm:3.1.0"],\
["better-sqlite3", null],\
["buffer", "npm:6.0.3"],\
["chalk", "npm:4.1.2"],\
["cli-highlight", "npm:2.1.11"],\
["date-fns", "npm:2.30.0"],\
["debug", "virtual:ac3d8e680759ce54399273724d44e041d6c9b73454d191d411a8c44bb27e22f02aaf6ed9d3ad0ac1c298eac4833cff369c9c7b84c573016112c4f84be2cd8543#npm:4.3.4"],\
["dotenv", "npm:16.1.3"],\
["glob", "npm:8.1.0"],\
["hdb-pool", null],\
["ioredis", null],\
["mkdirp", "npm:2.1.6"],\
["mongodb", "virtual:67ad3a1ca34e24ce4821cc48979e98af0c3e5dd7aabc7ad0b5d22d1d977d6f943f81c9f141a420105ebdc61ef777e508a96c7946081decd98f8c30543d468b33#npm:5.7.0"],\
["mongodb", "virtual:365b8c88cdf194291829ee28b79556e2328175d26a621363e703848100bea0042e9500db2a1206c9bbc3a4a76a1d169639ef774b2ea3a1a98584a9936b58c6be#npm:6.0.0"],\
["mssql", null],\
["mysql2", "npm:3.3.3"],\
["oracledb", null],\

Binary file not shown.

View File

@@ -2,6 +2,8 @@
# Setup environment variables
export MODE="self-hosted"
#########
# PORTS #
#########

View File

@@ -1,4 +1,4 @@
MODE=microservice # microservice | home-server
MODE=microservice # microservice | home-server | self-hosted
LOG_LEVEL=debug
NODE_ENV=development
VERSION=development

View File

@@ -3,6 +3,12 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.72.1](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.72.0...@standardnotes/api-gateway@1.72.1) (2023-08-28)
### Bug Fixes
* allow self hosted to use new model of items ([#714](https://github.com/standardnotes/api-gateway/issues/714)) ([aef9254](https://github.com/standardnotes/api-gateway/commit/aef9254713560c00a90a3e84e3cd94417e8f30d2))
# [1.72.0](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.71.1...@standardnotes/api-gateway@1.72.0) (2023-08-24)
### Features

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/api-gateway",
"version": "1.72.0",
"version": "1.72.1",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -1,4 +1,4 @@
MODE=microservice # microservice | home-server
MODE=microservice # microservice | home-server | self-hosted
LOG_LEVEL=debug
NODE_ENV=development
VERSION=development

View File

@@ -3,6 +3,18 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.135.2](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.135.1...@standardnotes/auth-server@1.135.2) (2023-08-28)
### Bug Fixes
* allow self hosted to use new model of items ([#714](https://github.com/standardnotes/server/issues/714)) ([aef9254](https://github.com/standardnotes/server/commit/aef9254713560c00a90a3e84e3cd94417e8f30d2))
## [1.135.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.135.0...@standardnotes/auth-server@1.135.1) (2023-08-25)
### Bug Fixes
* **auth:** account enumeration with pseudo u2f and mfa ([#709](https://github.com/standardnotes/server/issues/709)) ([bbb35d1](https://github.com/standardnotes/server/commit/bbb35d16fc4f6a57fe774a648fbda13ec64a8865))
# [1.135.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.134.0...@standardnotes/auth-server@1.135.0) (2023-08-24)
### Features

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/auth-server",
"version": "1.135.0",
"version": "1.135.2",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -257,7 +257,7 @@ describe('VerifyMFA', () => {
})
it('should not pass if user is not found and pseudo u2f is required', async () => {
booleanSelector.select = jest.fn().mockReturnValueOnce(false).mockReturnValueOnce(true)
booleanSelector.select = jest.fn().mockReturnValueOnce(true).mockReturnValueOnce(true)
userRepository.findOneByUsernameOrEmail = jest.fn().mockReturnValue(null)
expect(

View File

@@ -48,33 +48,33 @@ export class VerifyMFA implements UseCaseInterface {
const user = await this.userRepository.findOneByUsernameOrEmail(username)
if (user == null) {
const mfaSelectorHash = crypto
const secondFactorSelectorHash = crypto
.createHash('sha256')
.update(`mfa-selector-${dto.email}${this.pseudoKeyParamsKey}`)
.digest('hex')
const u2fSelectorHash = crypto
.createHash('sha256')
.update(`u2f-selector-${dto.email}${this.pseudoKeyParamsKey}`)
.update(`second-factor-selector-${dto.email}${this.pseudoKeyParamsKey}`)
.digest('hex')
const isPseudoMFARequired = this.booleanSelector.select(mfaSelectorHash, [true, false])
const isPseudoSecondFactorRequired = this.booleanSelector.select(secondFactorSelectorHash, [true, false])
if (isPseudoSecondFactorRequired) {
const u2fSelectorHash = crypto
.createHash('sha256')
.update(`u2f-selector-${dto.email}${this.pseudoKeyParamsKey}`)
.digest('hex')
const isPseudoU2FRequired = this.booleanSelector.select(u2fSelectorHash, [true, false])
const isPseudoU2FRequired = this.booleanSelector.select(u2fSelectorHash, [true, false])
if (isPseudoMFARequired) {
return {
success: false,
errorTag: ErrorTag.MfaRequired,
errorMessage: 'Please enter your two-factor authentication code.',
errorPayload: { mfa_key: `mfa_${uuidv4()}` },
}
}
if (isPseudoU2FRequired) {
return {
success: false,
errorTag: ErrorTag.U2FRequired,
errorMessage: 'Please authenticate with your U2F device.',
if (isPseudoU2FRequired) {
return {
success: false,
errorTag: ErrorTag.U2FRequired,
errorMessage: 'Please authenticate with your U2F device.',
}
} else {
return {
success: false,
errorTag: ErrorTag.MfaRequired,
errorMessage: 'Please enter your two-factor authentication code.',
errorPayload: { mfa_key: `mfa_${uuidv4()}` },
}
}
}

View File

@@ -1,4 +1,4 @@
MODE=microservice # microservice | home-server
MODE=microservice # microservice | home-server | self-hosted
LOG_LEVEL=debug
NODE_ENV=development
VERSION=development

View File

@@ -3,6 +3,12 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.22.2](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.22.1...@standardnotes/files-server@1.22.2) (2023-08-28)
### Bug Fixes
* allow self hosted to use new model of items ([#714](https://github.com/standardnotes/files/issues/714)) ([aef9254](https://github.com/standardnotes/files/commit/aef9254713560c00a90a3e84e3cd94417e8f30d2))
## [1.22.1](https://github.com/standardnotes/files/compare/@standardnotes/files-server@1.22.0...@standardnotes/files-server@1.22.1) (2023-08-24)
**Note:** Version bump only for package @standardnotes/files-server

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/files-server",
"version": "1.22.1",
"version": "1.22.2",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -3,6 +3,26 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.15.6](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.5...@standardnotes/home-server@1.15.6) (2023-08-29)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.5](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.4...@standardnotes/home-server@1.15.5) (2023-08-28)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.4](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.3...@standardnotes/home-server@1.15.4) (2023-08-28)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.3](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.2...@standardnotes/home-server@1.15.3) (2023-08-25)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.2](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.1...@standardnotes/home-server@1.15.2) (2023-08-25)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.1](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.0...@standardnotes/home-server@1.15.1) (2023-08-24)
**Note:** Version bump only for package @standardnotes/home-server

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/home-server",
"version": "1.15.1",
"version": "1.15.6",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -1,4 +1,4 @@
MODE=microservice # microservice | home-server
MODE=microservice # microservice | home-server | self-hosted
LOG_LEVEL=info
NODE_ENV=development
VERSION=development
@@ -33,3 +33,11 @@ NEW_RELIC_NO_CONFIG_FILE=true
NEW_RELIC_DISTRIBUTED_TRACING_ENABLED=false
NEW_RELIC_LOG_ENABLED=false
NEW_RELIC_LOG_LEVEL=info
# (Optional) Mongo Setup
SECONDARY_DB_ENABLED=false
MONGO_HOST=
MONGO_PORT=
MONGO_USERNAME=
MONGO_PASSWORD=
MONGO_DATABASE=

View File

@@ -3,6 +3,18 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
# [1.27.0](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.26.12...@standardnotes/revisions-server@1.27.0) (2023-08-29)
### Features
* **revisions:** add MongoDB support ([#715](https://github.com/standardnotes/server/issues/715)) ([2646b75](https://github.com/standardnotes/server/commit/2646b756a95c425bd406622bfe3a9aa4c490d537))
## [1.26.12](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.26.11...@standardnotes/revisions-server@1.26.12) (2023-08-28)
### Bug Fixes
* allow self hosted to use new model of items ([#714](https://github.com/standardnotes/server/issues/714)) ([aef9254](https://github.com/standardnotes/server/commit/aef9254713560c00a90a3e84e3cd94417e8f30d2))
## [1.26.11](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.26.10...@standardnotes/revisions-server@1.26.11) (2023-08-24)
**Note:** Version bump only for package @standardnotes/revisions-server

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/revisions-server",
"version": "1.26.11",
"version": "1.27.0",
"engines": {
"node": ">=18.0.0 <21.0.0"
},
@@ -40,6 +40,7 @@
"express": "^4.18.2",
"inversify": "^6.0.1",
"inversify-express-utils": "^6.4.3",
"mongodb": "^6.0.0",
"mysql2": "^3.0.1",
"reflect-metadata": "0.1.13",
"sqlite3": "^5.1.6",

View File

@@ -1,15 +1,13 @@
import { ControllerContainer, ControllerContainerInterface, MapperInterface } from '@standardnotes/domain-core'
import { Container, interfaces } from 'inversify'
import { Repository } from 'typeorm'
import { MongoRepository, Repository } from 'typeorm'
import * as winston from 'winston'
import { Revision } from '../Domain/Revision/Revision'
import { RevisionMetadata } from '../Domain/Revision/RevisionMetadata'
import { RevisionRepositoryInterface } from '../Domain/Revision/RevisionRepositoryInterface'
import { TypeORMRevisionRepository } from '../Infra/TypeORM/TypeORMRevisionRepository'
import { TypeORMRevision } from '../Infra/TypeORM/TypeORMRevision'
import { RevisionMetadataPersistenceMapper } from '../Mapping/RevisionMetadataPersistenceMapper'
import { RevisionPersistenceMapper } from '../Mapping/RevisionPersistenceMapper'
import { TypeORMRevisionRepository } from '../Infra/TypeORM/SQLRevisionRepository'
import { TypeORMRevision } from '../Infra/TypeORM/SQLRevision'
import { AppDataSource } from './DataSource'
import { Env } from './Env'
import TYPES from './Types'
@@ -21,8 +19,7 @@ import { DeleteRevision } from '../Domain/UseCase/DeleteRevision/DeleteRevision'
import { GetRequiredRoleToViewRevision } from '../Domain/UseCase/GetRequiredRoleToViewRevision/GetRequiredRoleToViewRevision'
import { GetRevision } from '../Domain/UseCase/GetRevision/GetRevision'
import { GetRevisionsMetada } from '../Domain/UseCase/GetRevisionsMetada/GetRevisionsMetada'
import { RevisionHttpMapper } from '../Mapping/RevisionHttpMapper'
import { RevisionMetadataHttpMapper } from '../Mapping/RevisionMetadataHttpMapper'
import { RevisionMetadataHttpMapper } from '../Mapping/Http/RevisionMetadataHttpMapper'
import { S3Client } from '@aws-sdk/client-s3'
import { SQSClient, SQSClientConfig } from '@aws-sdk/client-sqs'
import {
@@ -44,9 +41,16 @@ import { RevisionsCopyRequestedEventHandler } from '../Domain/Handler/RevisionsC
import { CopyRevisions } from '../Domain/UseCase/CopyRevisions/CopyRevisions'
import { FSDumpRepository } from '../Infra/FS/FSDumpRepository'
import { S3DumpRepository } from '../Infra/S3/S3ItemDumpRepository'
import { RevisionItemStringMapper } from '../Mapping/RevisionItemStringMapper'
import { RevisionItemStringMapper } from '../Mapping/Backup/RevisionItemStringMapper'
import { BaseRevisionsController } from '../Infra/InversifyExpress/Base/BaseRevisionsController'
import { Transform } from 'stream'
import { MongoDBRevision } from '../Infra/TypeORM/MongoDB/MongoDBRevision'
import { MongoDBRevisionRepository } from '../Infra/TypeORM/MongoDB/MongoDBRevisionRepository'
import { SQLRevisionMetadataPersistenceMapper } from '../Mapping/Persistence/SQL/SQLRevisionMetadataPersistenceMapper'
import { SQLRevisionPersistenceMapper } from '../Mapping/Persistence/SQL/SQLRevisionPersistenceMapper'
import { MongoDBRevisionMetadataPersistenceMapper } from '../Mapping/Persistence/MongoDB/MongoDBRevisionMetadataPersistenceMapper'
import { MongoDBRevisionPersistenceMapper } from '../Mapping/Persistence/MongoDB/MongoDBRevisionPersistenceMapper'
import { RevisionHttpMapper } from '../Mapping/Http/RevisionHttpMapper'
export class ContainerConfigLoader {
async load(configuration?: {
@@ -62,6 +66,7 @@ export class ContainerConfigLoader {
env.load()
const isConfiguredForHomeServer = env.get('MODE', true) === 'home-server'
const isSecondaryDatabaseEnabled = env.get('SECONDARY_DB_ENABLED', true) === 'true'
const container = new Container({
defaultScope: 'Singleton',
@@ -101,11 +106,19 @@ export class ContainerConfigLoader {
// Map
container
.bind<MapperInterface<RevisionMetadata, TypeORMRevision>>(TYPES.Revisions_RevisionMetadataPersistenceMapper)
.toDynamicValue(() => new RevisionMetadataPersistenceMapper())
.bind<MapperInterface<RevisionMetadata, TypeORMRevision>>(TYPES.Revisions_SQLRevisionMetadataPersistenceMapper)
.toConstantValue(new SQLRevisionMetadataPersistenceMapper())
container
.bind<MapperInterface<Revision, TypeORMRevision>>(TYPES.Revisions_RevisionPersistenceMapper)
.toDynamicValue(() => new RevisionPersistenceMapper())
.bind<MapperInterface<Revision, TypeORMRevision>>(TYPES.Revisions_SQLRevisionPersistenceMapper)
.toConstantValue(new SQLRevisionPersistenceMapper())
container
.bind<MapperInterface<RevisionMetadata, MongoDBRevision>>(
TYPES.Revisions_MongoDBRevisionMetadataPersistenceMapper,
)
.toConstantValue(new MongoDBRevisionMetadataPersistenceMapper())
container
.bind<MapperInterface<Revision, MongoDBRevision>>(TYPES.Revisions_MongoDBRevisionPersistenceMapper)
.toConstantValue(new MongoDBRevisionPersistenceMapper())
// ORM
container
@@ -115,14 +128,35 @@ export class ContainerConfigLoader {
// Repositories
container
.bind<RevisionRepositoryInterface>(TYPES.Revisions_RevisionRepository)
.toDynamicValue((context: interfaces.Context) => {
return new TypeORMRevisionRepository(
context.container.get(TYPES.Revisions_ORMRevisionRepository),
context.container.get(TYPES.Revisions_RevisionMetadataPersistenceMapper),
context.container.get(TYPES.Revisions_RevisionPersistenceMapper),
context.container.get(TYPES.Revisions_Logger),
.toConstantValue(
new TypeORMRevisionRepository(
container.get<Repository<TypeORMRevision>>(TYPES.Revisions_ORMRevisionRepository),
container.get<MapperInterface<RevisionMetadata, TypeORMRevision>>(
TYPES.Revisions_SQLRevisionMetadataPersistenceMapper,
),
container.get<MapperInterface<Revision, TypeORMRevision>>(TYPES.Revisions_SQLRevisionPersistenceMapper),
container.get<winston.Logger>(TYPES.Revisions_Logger),
),
)
if (isSecondaryDatabaseEnabled) {
container
.bind<MongoRepository<MongoDBRevision>>(TYPES.Revisions_ORMMongoRevisionRepository)
.toConstantValue(appDataSource.getMongoRepository(MongoDBRevision))
container
.bind<RevisionRepositoryInterface>(TYPES.Revisions_MongoDBRevisionRepository)
.toConstantValue(
new MongoDBRevisionRepository(
container.get<MongoRepository<MongoDBRevision>>(TYPES.Revisions_ORMMongoRevisionRepository),
container.get<MapperInterface<RevisionMetadata, MongoDBRevision>>(
TYPES.Revisions_MongoDBRevisionMetadataPersistenceMapper,
),
container.get<MapperInterface<Revision, MongoDBRevision>>(TYPES.Revisions_MongoDBRevisionPersistenceMapper),
container.get<winston.Logger>(TYPES.Revisions_Logger),
),
)
})
}
container.bind<TimerInterface>(TYPES.Revisions_Timer).toDynamicValue(() => new Timer())

View File

@@ -1,25 +1,66 @@
import { DataSource, EntityTarget, LoggerOptions, ObjectLiteral, Repository } from 'typeorm'
import { DataSource, EntityTarget, LoggerOptions, MongoRepository, ObjectLiteral, Repository } from 'typeorm'
import { MysqlConnectionOptions } from 'typeorm/driver/mysql/MysqlConnectionOptions'
import { TypeORMRevision } from '../Infra/TypeORM/TypeORMRevision'
import { TypeORMRevision } from '../Infra/TypeORM/SQLRevision'
import { Env } from './Env'
import { SqliteConnectionOptions } from 'typeorm/driver/sqlite/SqliteConnectionOptions'
import { MongoDBRevision } from '../Infra/TypeORM/MongoDB/MongoDBRevision'
export class AppDataSource {
private dataSource: DataSource | undefined
private _dataSource: DataSource | undefined
private _secondaryDataSource: DataSource | undefined
constructor(private env: Env) {}
getRepository<Entity extends ObjectLiteral>(target: EntityTarget<Entity>): Repository<Entity> {
if (!this.dataSource) {
if (!this._dataSource) {
throw new Error('DataSource not initialized')
}
return this.dataSource.getRepository(target)
return this._dataSource.getRepository(target)
}
getMongoRepository<Entity extends ObjectLiteral>(target: EntityTarget<Entity>): MongoRepository<Entity> {
if (!this._secondaryDataSource) {
throw new Error('Secondary DataSource not initialized')
}
return this._secondaryDataSource.getMongoRepository(target)
}
async initialize(): Promise<void> {
await this.dataSource.initialize()
const secondaryDataSource = this.secondaryDataSource
if (secondaryDataSource) {
await secondaryDataSource.initialize()
}
}
get secondaryDataSource(): DataSource | undefined {
this.env.load()
if (this.env.get('SECONDARY_DB_ENABLED', true) !== 'true') {
return undefined
}
this._secondaryDataSource = new DataSource({
type: 'mongodb',
host: this.env.get('MONGO_HOST'),
authSource: 'admin',
port: parseInt(this.env.get('MONGO_PORT')),
username: this.env.get('MONGO_USERNAME'),
password: this.env.get('MONGO_PASSWORD', true),
database: this.env.get('MONGO_DATABASE'),
entities: [MongoDBRevision],
retryWrites: false,
synchronize: true,
})
return this._secondaryDataSource
}
get dataSource(): DataSource {
this.env.load()
const isConfiguredForMySQL = this.env.get('DB_TYPE') === 'mysql'
@@ -74,7 +115,7 @@ export class AppDataSource {
database: inReplicaMode ? undefined : this.env.get('DB_DATABASE'),
}
this.dataSource = new DataSource(mySQLDataSourceOptions)
this._dataSource = new DataSource(mySQLDataSourceOptions)
} else {
const sqliteDataSourceOptions: SqliteConnectionOptions = {
...commonDataSourceOptions,
@@ -84,9 +125,9 @@ export class AppDataSource {
busyErrorRetry: 2000,
}
this.dataSource = new DataSource(sqliteDataSourceOptions)
this._dataSource = new DataSource(sqliteDataSourceOptions)
}
await this.dataSource.initialize()
return this._dataSource
}
}

View File

@@ -5,15 +5,20 @@ const TYPES = {
Revisions_S3: Symbol.for('Revisions_S3'),
Revisions_Env: Symbol.for('Revisions_Env'),
// Map
Revisions_RevisionMetadataPersistenceMapper: Symbol.for('Revisions_RevisionMetadataPersistenceMapper'),
Revisions_RevisionPersistenceMapper: Symbol.for('Revisions_RevisionPersistenceMapper'),
Revisions_SQLRevisionMetadataPersistenceMapper: Symbol.for('Revisions_SQLRevisionMetadataPersistenceMapper'),
Revisions_SQLRevisionPersistenceMapper: Symbol.for('Revisions_SQLRevisionPersistenceMapper'),
Revisions_MongoDBRevisionMetadataPersistenceMapper: Symbol.for('Revisions_MongoDBRevisionMetadataPersistenceMapper'),
Revisions_MongoDBRevisionPersistenceMapper: Symbol.for('Revisions_MongoDBRevisionPersistenceMapper'),
Revisions_RevisionItemStringMapper: Symbol.for('Revisions_RevisionItemStringMapper'),
Revisions_RevisionHttpMapper: Symbol.for('Revisions_RevisionHttpMapper'),
Revisions_RevisionMetadataHttpMapper: Symbol.for('Revisions_RevisionMetadataHttpMapper'),
// ORM
Revisions_ORMRevisionRepository: Symbol.for('Revisions_ORMRevisionRepository'),
// Mongo
Revisions_ORMMongoRevisionRepository: Symbol.for('Revisions_ORMMongoRevisionRepository'),
// Repositories
Revisions_RevisionRepository: Symbol.for('Revisions_RevisionRepository'),
Revisions_MongoDBRevisionRepository: Symbol.for('Revisions_MongoDBRevisionRepository'),
Revisions_DumpRepository: Symbol.for('Revisions_DumpRepository'),
// env vars
Revisions_AUTH_JWT_SECRET: Symbol.for('Revisions_AUTH_JWT_SECRET'),

View File

@@ -0,0 +1,40 @@
import { BSON } from 'mongodb'
import { Column, Entity, Index, ObjectIdColumn } from 'typeorm'
@Entity({ name: 'revisions' })
export class MongoDBRevision {
@ObjectIdColumn()
declare _id: BSON.UUID
@Column()
@Index('item_uuid_on_revisions')
declare itemUuid: string
@Column()
@Index('user_uuid_on_revisions')
declare userUuid: string | null
@Column()
declare content: string | null
@Column()
declare contentType: string | null
@Column()
declare itemsKeyId: string | null
@Column()
declare encItemKey: string | null
@Column()
declare authHash: string | null
@Column()
declare creationDate: Date
@Column()
declare createdAt: Date
@Column()
declare updatedAt: Date
}

View File

@@ -0,0 +1,123 @@
import { MapperInterface, Uuid } from '@standardnotes/domain-core'
import { MongoRepository } from 'typeorm'
import { BSON } from 'mongodb'
import { Logger } from 'winston'
import { MongoDBRevision } from './MongoDBRevision'
import { Revision } from '../../../Domain/Revision/Revision'
import { RevisionMetadata } from '../../../Domain/Revision/RevisionMetadata'
import { RevisionRepositoryInterface } from '../../../Domain/Revision/RevisionRepositoryInterface'
export class MongoDBRevisionRepository implements RevisionRepositoryInterface {
constructor(
private mongoRepository: MongoRepository<MongoDBRevision>,
private revisionMetadataMapper: MapperInterface<RevisionMetadata, MongoDBRevision>,
private revisionMapper: MapperInterface<Revision, MongoDBRevision>,
private logger: Logger,
) {}
async removeByUserUuid(userUuid: Uuid): Promise<void> {
await this.mongoRepository.deleteMany({ where: { userUuid: { $eq: userUuid.value } } })
}
async removeOneByUuid(revisionUuid: Uuid, userUuid: Uuid): Promise<void> {
await this.mongoRepository.deleteOne({
where: {
$and: [
{ _id: { $eq: BSON.UUID.createFromHexString(revisionUuid.value) } },
{ userUuid: { $eq: userUuid.value } },
],
},
})
}
async findOneByUuid(revisionUuid: Uuid, userUuid: Uuid): Promise<Revision | null> {
const persistence = await this.mongoRepository.findOne({
where: {
$and: [
{ _id: { $eq: BSON.UUID.createFromHexString(revisionUuid.value) } },
{ userUuid: { $eq: userUuid.value } },
],
},
})
if (persistence === null) {
return null
}
return this.revisionMapper.toDomain(persistence)
}
async findByItemUuid(itemUuid: Uuid): Promise<Revision[]> {
const persistence = await this.mongoRepository.find({
where: {
itemUuid: { $eq: itemUuid.value },
},
})
const revisions: Revision[] = []
for (const revision of persistence) {
try {
revisions.push(this.revisionMapper.toDomain(revision))
} catch (error) {
this.logger.error(`Failed to map revision ${revision._id.toHexString()} to domain: ${(error as Error).message}`)
}
}
return revisions
}
async findMetadataByItemId(itemUuid: Uuid, userUuid: Uuid): Promise<RevisionMetadata[]> {
const persistence = await this.mongoRepository.find({
select: ['_id', 'contentType', 'createdAt', 'updatedAt'],
where: {
$and: [{ itemUuid: { $eq: itemUuid.value } }, { userUuid: { $eq: userUuid.value } }],
},
order: {
createdAt: 'DESC',
},
})
const revisions: RevisionMetadata[] = []
for (const revision of persistence) {
try {
revisions.push(this.revisionMetadataMapper.toDomain(revision))
} catch (error) {
this.logger.error(`Failed to map revision ${revision._id.toHexString()} to domain: ${(error as Error).message}`)
}
}
return revisions
}
async updateUserUuid(itemUuid: Uuid, userUuid: Uuid): Promise<void> {
await this.mongoRepository.updateMany(
{
itemUuid: { $eq: itemUuid.value },
},
{
$set: {
userUuid: userUuid.value,
},
},
)
}
async save(revision: Revision): Promise<Revision> {
const persistence = this.revisionMapper.toProjection(revision)
const { _id, ...rest } = persistence
await this.mongoRepository.updateOne(
{ _id: { $eq: _id } },
{
$set: rest,
},
{ upsert: true },
)
return revision
}
}

View File

@@ -5,7 +5,7 @@ import { Logger } from 'winston'
import { Revision } from '../../Domain/Revision/Revision'
import { RevisionMetadata } from '../../Domain/Revision/RevisionMetadata'
import { RevisionRepositoryInterface } from '../../Domain/Revision/RevisionRepositoryInterface'
import { TypeORMRevision } from './TypeORMRevision'
import { TypeORMRevision } from './SQLRevision'
export class TypeORMRevisionRepository implements RevisionRepositoryInterface {
constructor(

View File

@@ -1,6 +1,6 @@
import { MapperInterface, Dates, Uuid, ContentType } from '@standardnotes/domain-core'
import { Revision } from '../Domain/Revision/Revision'
import { Revision } from '../../Domain/Revision/Revision'
export class RevisionItemStringMapper implements MapperInterface<Revision, string> {
toDomain(projection: string): Revision {

View File

@@ -1,6 +1,6 @@
import { MapperInterface } from '@standardnotes/domain-core'
import { Revision } from '../Domain/Revision/Revision'
import { Revision } from '../../Domain/Revision/Revision'
export class RevisionHttpMapper
implements

View File

@@ -1,6 +1,6 @@
import { MapperInterface, SyncUseCaseInterface } from '@standardnotes/domain-core'
import { RevisionMetadata } from '../Domain/Revision/RevisionMetadata'
import { RevisionMetadata } from '../../Domain/Revision/RevisionMetadata'
export class RevisionMetadataHttpMapper
implements

View File

@@ -0,0 +1,41 @@
import { MapperInterface, Dates, UniqueEntityId, ContentType } from '@standardnotes/domain-core'
import { RevisionMetadata } from '../../../Domain/Revision/RevisionMetadata'
import { MongoDBRevision } from '../../../Infra/TypeORM/MongoDB/MongoDBRevision'
export class MongoDBRevisionMetadataPersistenceMapper implements MapperInterface<RevisionMetadata, MongoDBRevision> {
toDomain(projection: MongoDBRevision): RevisionMetadata {
const contentTypeOrError = ContentType.create(projection.contentType)
if (contentTypeOrError.isFailed()) {
throw new Error(`Could not create content type: ${contentTypeOrError.getError()}`)
}
const contentType = contentTypeOrError.getValue()
const createdAt = projection.createdAt instanceof Date ? projection.createdAt : new Date(projection.createdAt)
const updatedAt = projection.updatedAt instanceof Date ? projection.updatedAt : new Date(projection.updatedAt)
const datesOrError = Dates.create(createdAt, updatedAt)
if (datesOrError.isFailed()) {
throw new Error(`Could not create dates: ${datesOrError.getError()}`)
}
const dates = datesOrError.getValue()
const revisionMetadataOrError = RevisionMetadata.create(
{
contentType,
dates,
},
new UniqueEntityId(projection._id.toHexString()),
)
if (revisionMetadataOrError.isFailed()) {
throw new Error(`Could not create revision metdata: ${revisionMetadataOrError.getError()}`)
}
return revisionMetadataOrError.getValue()
}
toProjection(_domain: RevisionMetadata): MongoDBRevision {
throw new Error('Method not implemented.')
}
}

View File

@@ -0,0 +1,74 @@
import { MapperInterface, Dates, UniqueEntityId, Uuid, ContentType } from '@standardnotes/domain-core'
import { MongoDBRevision } from '../../../Infra/TypeORM/MongoDB/MongoDBRevision'
import { Revision } from '../../../Domain/Revision/Revision'
import { BSON } from 'mongodb'
export class MongoDBRevisionPersistenceMapper implements MapperInterface<Revision, MongoDBRevision> {
toDomain(projection: MongoDBRevision): Revision {
const contentTypeOrError = ContentType.create(projection.contentType)
if (contentTypeOrError.isFailed()) {
throw new Error(`Could not map typeorm revision to domain revision: ${contentTypeOrError.getError()}`)
}
const contentType = contentTypeOrError.getValue()
const datesOrError = Dates.create(projection.createdAt, projection.updatedAt)
if (datesOrError.isFailed()) {
throw new Error(`Could not map typeorm revision to domain revision: ${datesOrError.getError()}`)
}
const dates = datesOrError.getValue()
const itemUuidOrError = Uuid.create(projection.itemUuid)
if (itemUuidOrError.isFailed()) {
throw new Error(`Could not map typeorm revision to domain revision: ${itemUuidOrError.getError()}`)
}
const itemUuid = itemUuidOrError.getValue()
let userUuid = null
if (projection.userUuid !== null) {
const userUuidOrError = Uuid.create(projection.userUuid)
if (userUuidOrError.isFailed()) {
throw new Error(`Could not map typeorm revision to domain revision: ${userUuidOrError.getError()}`)
}
userUuid = userUuidOrError.getValue()
}
const revisionOrError = Revision.create(
{
authHash: projection.authHash,
content: projection.content,
contentType,
creationDate: projection.creationDate,
encItemKey: projection.encItemKey,
itemsKeyId: projection.itemsKeyId,
itemUuid,
userUuid,
dates,
},
new UniqueEntityId(projection._id.toHexString()),
)
if (revisionOrError.isFailed()) {
throw new Error(`Could not map typeorm revision to domain revision: ${revisionOrError.getError()}`)
}
return revisionOrError.getValue()
}
toProjection(domain: Revision): MongoDBRevision {
const mongoDBRevision = new MongoDBRevision()
mongoDBRevision.authHash = domain.props.authHash
mongoDBRevision.content = domain.props.content
mongoDBRevision.contentType = domain.props.contentType.value
mongoDBRevision.createdAt = domain.props.dates.createdAt
mongoDBRevision.updatedAt = domain.props.dates.updatedAt
mongoDBRevision.creationDate = domain.props.creationDate
mongoDBRevision.encItemKey = domain.props.encItemKey
mongoDBRevision.itemUuid = domain.props.itemUuid.value
mongoDBRevision.itemsKeyId = domain.props.itemsKeyId
mongoDBRevision.userUuid = domain.props.userUuid ? domain.props.userUuid.value : null
mongoDBRevision._id = BSON.UUID.createFromHexString(domain.id.toString())
return mongoDBRevision
}
}

View File

@@ -1,9 +1,9 @@
import { MapperInterface, Dates, UniqueEntityId, ContentType } from '@standardnotes/domain-core'
import { RevisionMetadata } from '../Domain/Revision/RevisionMetadata'
import { TypeORMRevision } from '../Infra/TypeORM/TypeORMRevision'
import { RevisionMetadata } from '../../../Domain/Revision/RevisionMetadata'
import { TypeORMRevision } from '../../../Infra/TypeORM/SQLRevision'
export class RevisionMetadataPersistenceMapper implements MapperInterface<RevisionMetadata, TypeORMRevision> {
export class SQLRevisionMetadataPersistenceMapper implements MapperInterface<RevisionMetadata, TypeORMRevision> {
toDomain(projection: TypeORMRevision): RevisionMetadata {
const contentTypeOrError = ContentType.create(projection.contentType)
if (contentTypeOrError.isFailed()) {

View File

@@ -1,8 +1,9 @@
import { MapperInterface, Dates, UniqueEntityId, Uuid, ContentType } from '@standardnotes/domain-core'
import { Revision } from '../Domain/Revision/Revision'
import { TypeORMRevision } from '../Infra/TypeORM/TypeORMRevision'
export class RevisionPersistenceMapper implements MapperInterface<Revision, TypeORMRevision> {
import { Revision } from '../../../Domain/Revision/Revision'
import { TypeORMRevision } from '../../../Infra/TypeORM/SQLRevision'
export class SQLRevisionPersistenceMapper implements MapperInterface<Revision, TypeORMRevision> {
toDomain(projection: TypeORMRevision): Revision {
const contentTypeOrError = ContentType.create(projection.contentType)
if (contentTypeOrError.isFailed()) {

View File

@@ -1,4 +1,4 @@
MODE=microservice # microservice | home-server
MODE=microservice # microservice | home-server | self-hosted
LOG_LEVEL=info
NODE_ENV=development
VERSION=development

View File

@@ -3,6 +3,38 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
# [1.86.0](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.85.1...@standardnotes/syncing-server@1.86.0) (2023-08-29)
### Features
* **revisions:** add MongoDB support ([#715](https://github.com/standardnotes/syncing-server-js/issues/715)) ([2646b75](https://github.com/standardnotes/syncing-server-js/commit/2646b756a95c425bd406622bfe3a9aa4c490d537))
## [1.85.1](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.85.0...@standardnotes/syncing-server@1.85.1) (2023-08-28)
### Bug Fixes
* allow self hosted to use new model of items ([#714](https://github.com/standardnotes/syncing-server-js/issues/714)) ([aef9254](https://github.com/standardnotes/syncing-server-js/commit/aef9254713560c00a90a3e84e3cd94417e8f30d2))
# [1.85.0](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.84.2...@standardnotes/syncing-server@1.85.0) (2023-08-28)
### Features
* **syncing-server:** distinguish between legacy and current items model usage ([#712](https://github.com/standardnotes/syncing-server-js/issues/712)) ([bf8f91f](https://github.com/standardnotes/syncing-server-js/commit/bf8f91f83d9d206ebfbcd9b2c9318786bd0040da))
* **syncing-server:** turn mysql items model into legacy ([#711](https://github.com/standardnotes/syncing-server-js/issues/711)) ([effdfeb](https://github.com/standardnotes/syncing-server-js/commit/effdfebc193c66d830bb4d516d408a9c126f3d62))
## [1.84.2](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.84.1...@standardnotes/syncing-server@1.84.2) (2023-08-25)
### Bug Fixes
* **syncing-server:** items sorting in MongoDB ([#710](https://github.com/standardnotes/syncing-server-js/issues/710)) ([152a5cb](https://github.com/standardnotes/syncing-server-js/commit/152a5cbd27375adbad8b070d1778b256a6dce1f4))
* **syncing-server:** logs severity on creating duplicates ([1488763](https://github.com/standardnotes/syncing-server-js/commit/14887631153b78117ec7433353bb32709209a617))
## [1.84.1](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.84.0...@standardnotes/syncing-server@1.84.1) (2023-08-25)
### Bug Fixes
* **syncing-server:** handling mixed values of deleted flag in MongoDB ([#708](https://github.com/standardnotes/syncing-server-js/issues/708)) ([3ba673b](https://github.com/standardnotes/syncing-server-js/commit/3ba673b424ae3bb6b64b2360323d7373636c6cd5))
# [1.84.0](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.83.0...@standardnotes/syncing-server@1.84.0) (2023-08-24)
### Features

View File

@@ -0,0 +1,50 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class initDatabase1606470249552 implements MigrationInterface {
name = 'initDatabase1606470249552'
public async up(queryRunner: QueryRunner): Promise<void> {
await this.fixUpdatedAtTimestampsFromLegacyMigration(queryRunner)
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `items` (`uuid` varchar(36) NOT NULL, `duplicate_of` varchar(36) NULL, `items_key_id` varchar(255) NULL, `content` mediumtext NULL, `content_type` varchar(255) NULL, `enc_item_key` text NULL, `auth_hash` varchar(255) NULL, `user_uuid` varchar(36) NULL, `deleted` tinyint(1) NULL DEFAULT 0, `last_user_agent` text NULL, `created_at` datetime(6) NOT NULL, `updated_at` datetime(6) NOT NULL, `created_at_timestamp` BIGINT NOT NULL, `updated_at_timestamp` BIGINT NOT NULL, INDEX `index_items_on_content_type` (`content_type`), INDEX `index_items_on_user_uuid` (`user_uuid`), INDEX `index_items_on_deleted` (`deleted`), INDEX `updated_at_timestamp` (`updated_at_timestamp`), INDEX `index_items_on_updated_at` (`updated_at`), INDEX `user_uuid_and_updated_at_timestamp_and_created_at_timestamp` (`user_uuid`, `updated_at_timestamp`, `created_at_timestamp`), INDEX `index_items_on_user_uuid_and_updated_at_and_created_at` (`user_uuid`, `updated_at`, `created_at`), INDEX `index_items_on_user_uuid_and_content_type` (`user_uuid`, `content_type`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NULL, `content` mediumtext NULL, `content_type` varchar(255) NULL, `items_key_id` varchar(255) NULL, `enc_item_key` text NULL, `auth_hash` varchar(255) NULL, `creation_date` date NULL, `created_at` datetime(6) NULL, `updated_at` datetime(6) NULL, INDEX `index_revisions_on_item_uuid` (`item_uuid`), INDEX `index_revisions_on_creation_date` (`creation_date`), INDEX `index_revisions_on_created_at` (`created_at`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `item_revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `revision_uuid` varchar(36) NOT NULL, INDEX `index_item_revisions_on_item_uuid` (`item_uuid`), INDEX `index_item_revisions_on_revision_uuid` (`revision_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
}
public async down(_queryRunner: QueryRunner): Promise<void> {
return
}
private async fixUpdatedAtTimestampsFromLegacyMigration(queryRunner: QueryRunner): Promise<void> {
const itemsTableExistsQueryResult = await queryRunner.manager.query(
'SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = "items"',
)
const itemsTableExists = itemsTableExistsQueryResult[0].count === 1
if (!itemsTableExists) {
return
}
const updatedAtTimestampColumnExistsQueryResult = await queryRunner.manager.query(
'SELECT COUNT(*) as count FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = "items" AND column_name = "updated_at_timestamp"',
)
const updatedAtTimestampColumnExists = updatedAtTimestampColumnExistsQueryResult[0].count === 1
if (updatedAtTimestampColumnExists) {
return
}
await queryRunner.query('ALTER TABLE `items` ADD COLUMN `updated_at_timestamp` BIGINT NOT NULL')
await queryRunner.query('ALTER TABLE `items` ADD COLUMN `created_at_timestamp` BIGINT NOT NULL')
await queryRunner.query(
'ALTER TABLE `items` ADD INDEX `user_uuid_and_updated_at_timestamp_and_created_at_timestamp` (`user_uuid`, `updated_at_timestamp`, `created_at_timestamp`)',
)
await queryRunner.query('ALTER TABLE `items` ADD INDEX `updated_at_timestamp` (`updated_at_timestamp`)')
await queryRunner.query('UPDATE `items` SET `created_at_timestamp` = UNIX_TIMESTAMP(`created_at`) * 1000000')
await queryRunner.query('UPDATE `items` SET `updated_at_timestamp` = UNIX_TIMESTAMP(`updated_at`) * 1000000')
}
}

View File

@@ -0,0 +1,15 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class addExtensionSettings1617615657558 implements MigrationInterface {
name = 'addExtensionSettings1617615657558'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `extension_settings` (`uuid` varchar(36) NOT NULL, `extension_id` varchar(255) NULL, `mute_emails` tinyint(1) NULL DEFAULT 0, `created_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `updated_at` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, INDEX `index_extension_settings_on_extension_id` (`extension_id`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
}
public async down(_queryRunner: QueryRunner): Promise<void> {
return
}
}

View File

@@ -0,0 +1,27 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class dropUnusedIndexes1629964808297 implements MigrationInterface {
name = 'dropUnusedIndexes1629964808297'
public async up(queryRunner: QueryRunner): Promise<void> {
const indexItemsOnUserAndTimestamp = await queryRunner.manager.query(
'SHOW INDEX FROM `items` where `key_name` = "index_items_on_user_uuid_and_updated_at_and_created_at"',
)
const indexItemsOnUserAndTimestampExists = indexItemsOnUserAndTimestamp && indexItemsOnUserAndTimestamp.length > 0
if (indexItemsOnUserAndTimestampExists) {
await queryRunner.query('ALTER TABLE `items` DROP INDEX index_items_on_user_uuid_and_updated_at_and_created_at')
}
const indexItemsOnUpdatedAt = await queryRunner.manager.query(
'SHOW INDEX FROM `items` where `key_name` = "index_items_on_updated_at"',
)
const indexItemsOnUpdatedAtExists = indexItemsOnUpdatedAt && indexItemsOnUpdatedAt.length > 0
if (indexItemsOnUpdatedAtExists) {
await queryRunner.query('ALTER TABLE `items` DROP INDEX index_items_on_updated_at')
}
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class refactorCalculatingIntegrityHash1630318893601 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `items` ADD INDEX `user_uuid_and_deleted` (`user_uuid`, `deleted`)')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,12 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class restrictContentType1630417724617 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('UPDATE `items` SET content_type = "Unknown" WHERE `content_type` IS NULL')
await queryRunner.query('ALTER TABLE `items` CHANGE `content_type` `content_type` varchar(255) NOT NULL')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,26 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
import { v4 } from 'uuid'
export class addRevisionForDuplicatedItems1631529502150 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const itemRevisions = await queryRunner.manager.query(
'SELECT r.uuid as originalRevisionUuid, ir.item_uuid as properItemUuid, ir.uuid as relationUuid FROM revisions r INNER JOIN item_revisions ir ON ir.revision_uuid = r.uuid AND ir.item_uuid <> r.item_uuid',
)
for (const itemRevision of itemRevisions) {
const revisionUuid = v4()
await queryRunner.manager.query(
`INSERT INTO revisions (uuid, item_uuid, content, content_type, items_key_id, enc_item_key, auth_hash, creation_date, created_at, updated_at) SELECT "${revisionUuid}", "${itemRevision['properItemUuid']}", content, content_type, items_key_id, enc_item_key, auth_hash, creation_date, created_at, updated_at FROM revisions WHERE uuid = "${itemRevision['originalRevisionUuid']}"`,
)
await queryRunner.manager.query(
`UPDATE item_revisions SET revision_uuid = "${revisionUuid}" WHERE uuid = "${itemRevision['relationUuid']}"`,
)
}
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,13 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class dropItemRevisionsJoiningTable1631530260504 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP TABLE `item_revisions`')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE `item_revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `revision_uuid` varchar(36) NOT NULL, INDEX `index_item_revisions_on_item_uuid` (`item_uuid`), INDEX `index_item_revisions_on_revision_uuid` (`revision_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
}
}

View File

@@ -0,0 +1,36 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class cleanupOrphanItemsAndRevisions1632219307742 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const usersTableExistsQueryResult = await queryRunner.manager.query(
'SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = "users"',
)
const usersTableExists = usersTableExistsQueryResult[0].count === 1
if (usersTableExists) {
const orphanedItems = await queryRunner.manager.query(
'SELECT i.uuid as uuid FROM items i LEFT JOIN users u ON i.user_uuid = u.uuid WHERE u.uuid IS NULL',
)
for (const orphanedItem of orphanedItems) {
await queryRunner.manager.query(`DELETE FROM revisions WHERE item_uuid = "${orphanedItem['uuid']}"`)
await queryRunner.manager.query(`DELETE FROM items WHERE uuid = "${orphanedItem['uuid']}"`)
}
}
await queryRunner.manager.query('DELETE FROM items WHERE user_uuid IS NULL')
const orphanedRevisions = await queryRunner.manager.query(
'SELECT r.uuid as uuid FROM revisions r LEFT JOIN items i ON r.item_uuid = i.uuid WHERE i.uuid IS NULL',
)
for (const orphanedRevision of orphanedRevisions) {
await queryRunner.manager.query(`DELETE FROM revisions WHERE uuid = "${orphanedRevision['uuid']}"`)
}
await queryRunner.manager.query('DELETE FROM revisions WHERE item_uuid IS NULL')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,28 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class addRevisionsItemsRelation1632221263106 implements MigrationInterface {
name = 'addRevisionsItemsRelation1632221263106'
public async up(queryRunner: QueryRunner): Promise<void> {
const indexRevisionsOnItemUuid = await queryRunner.manager.query(
'SHOW INDEX FROM `revisions` where `key_name` = "index_revisions_on_item_uuid"',
)
const indexRevisionsOnItemUuidExists = indexRevisionsOnItemUuid && indexRevisionsOnItemUuid.length > 0
if (indexRevisionsOnItemUuidExists) {
await queryRunner.query('DROP INDEX `index_revisions_on_item_uuid` ON `revisions`')
}
await queryRunner.query('ALTER TABLE `revisions` CHANGE `item_uuid` `item_uuid` varchar(36) NOT NULL')
await queryRunner.query('ALTER TABLE `items` CHANGE `user_uuid` `user_uuid` varchar(36) NOT NULL')
await queryRunner.query(
'ALTER TABLE `revisions` ADD CONSTRAINT `FK_ab3b92e54701fe3010022a31d90` FOREIGN KEY (`item_uuid`) REFERENCES `items`(`uuid`) ON DELETE CASCADE ON UPDATE NO ACTION',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `revisions` DROP FOREIGN KEY `FK_ab3b92e54701fe3010022a31d90`')
await queryRunner.query('ALTER TABLE `items` CHANGE `user_uuid` `user_uuid` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `revisions` CHANGE `item_uuid` `item_uuid` varchar(36) NULL')
await queryRunner.query('CREATE INDEX `index_revisions_on_item_uuid` ON `revisions` (`item_uuid`)')
}
}

View File

@@ -0,0 +1,13 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class addItemContentSize1637738491169 implements MigrationInterface {
name = 'addItemContentSize1637738491169'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `items` ADD `content_size` INT UNSIGNED NULL')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `content_size`')
}
}

View File

@@ -0,0 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class removeExtensionSettings1639134926025 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP TABLE `extension_settings`')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class removeSfExtensionItems1642073387521 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.manager.query('DELETE FROM items WHERE content_type = "SF|Extension"')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class removeUserAgent1647501696205 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `last_user_agent`')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,13 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class addUpdatedWithSession1654518291191 implements MigrationInterface {
name = 'addUpdatedWithSession1654518291191'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `items` ADD `updated_with_session` varchar(36) NULL')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `updated_with_session`')
}
}

View File

@@ -0,0 +1,16 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddNotifications1689671563304 implements MigrationInterface {
name = 'AddNotifications1689671563304'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `notifications` (`uuid` varchar(36) NOT NULL, `user_uuid` varchar(36) NOT NULL, `type` varchar(36) NOT NULL, `payload` text NOT NULL, `created_at_timestamp` bigint NOT NULL, `updated_at_timestamp` bigint NOT NULL, INDEX `index_notifications_on_user_uuid` (`user_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `index_notifications_on_user_uuid` ON `notifications`')
await queryRunner.query('DROP TABLE `notifications`')
}
}

View File

@@ -0,0 +1,25 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddSharedVaultAndKeySystemAssociations1689671563305 implements MigrationInterface {
name = 'AddSharedVaultAndKeySystemAssociations1689671563305'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE `shared_vault_associations` (`uuid` varchar(36) NOT NULL, `shared_vault_uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `last_edited_by` varchar(36) NOT NULL, `created_at_timestamp` bigint NOT NULL, `updated_at_timestamp` bigint NOT NULL, INDEX `shared_vault_uuid_on_shared_vault_associations` (`shared_vault_uuid`), INDEX `item_uuid_on_shared_vault_associations` (`item_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
await queryRunner.query(
'CREATE TABLE `key_system_associations` (`uuid` varchar(36) NOT NULL, `key_system_uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `created_at_timestamp` bigint NOT NULL, `updated_at_timestamp` bigint NOT NULL, INDEX `key_system_uuid_on_key_system_associations` (`key_system_uuid`), INDEX `item_uuid_on_key_system_associations` (`item_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `item_uuid_on_key_system_associations` ON `key_system_associations`')
await queryRunner.query('DROP INDEX `key_system_uuid_on_key_system_associations` ON `key_system_associations`')
await queryRunner.query('DROP TABLE `key_system_associations`')
await queryRunner.query('DROP INDEX `item_uuid_on_shared_vault_associations` ON `shared_vault_associations`')
await queryRunner.query(
'DROP INDEX `shared_vault_uuid_on_shared_vault_associations` ON `shared_vault_associations`',
)
await queryRunner.query('DROP TABLE `shared_vault_associations`')
}
}

View File

@@ -0,0 +1,29 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddSharedVaultsWithUsersAndInvites1689677728282 implements MigrationInterface {
name = 'AddSharedVaultsWithUsersAndInvites1689677728282'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE `shared_vaults` (`uuid` varchar(36) NOT NULL, `user_uuid` varchar(36) NOT NULL, `file_upload_bytes_used` int NOT NULL, `file_upload_bytes_limit` int NOT NULL, `created_at_timestamp` bigint NOT NULL, `updated_at_timestamp` bigint NOT NULL, INDEX `user_uuid_on_shared_vaults` (`user_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
await queryRunner.query(
'CREATE TABLE `shared_vault_users` (`uuid` varchar(36) NOT NULL, `shared_vault_uuid` varchar(36) NOT NULL, `user_uuid` varchar(36) NOT NULL, `permission` varchar(24) NOT NULL, `created_at_timestamp` bigint NOT NULL, `updated_at_timestamp` bigint NOT NULL, INDEX `shared_vault_uuid_on_shared_vault_users` (`shared_vault_uuid`), INDEX `user_uuid_on_shared_vault_users` (`user_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
await queryRunner.query(
'CREATE TABLE `shared_vault_invites` (`uuid` varchar(36) NOT NULL, `shared_vault_uuid` varchar(36) NOT NULL, `user_uuid` varchar(36) NOT NULL, `sender_uuid` varchar(36) NOT NULL, `encrypted_message` text NOT NULL, `permission` varchar(24) NOT NULL, `created_at_timestamp` bigint NOT NULL, `updated_at_timestamp` bigint NOT NULL, INDEX `shared_vault_uuid_on_shared_vault_invites` (`shared_vault_uuid`), INDEX `user_uuid_on_shared_vault_invites` (`user_uuid`), INDEX `sender_uuid_on_shared_vault_invites` (`sender_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `sender_uuid_on_shared_vault_invites` ON `shared_vault_invites`')
await queryRunner.query('DROP INDEX `user_uuid_on_shared_vault_invites` ON `shared_vault_invites`')
await queryRunner.query('DROP INDEX `shared_vault_uuid_on_shared_vault_invites` ON `shared_vault_invites`')
await queryRunner.query('DROP TABLE `shared_vault_invites`')
await queryRunner.query('DROP INDEX `user_uuid_on_shared_vault_users` ON `shared_vault_users`')
await queryRunner.query('DROP INDEX `shared_vault_uuid_on_shared_vault_users` ON `shared_vault_users`')
await queryRunner.query('DROP TABLE `shared_vault_users`')
await queryRunner.query('DROP INDEX `user_uuid_on_shared_vaults` ON `shared_vaults`')
await queryRunner.query('DROP TABLE `shared_vaults`')
}
}

View File

@@ -0,0 +1,17 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddMessages1689745128577 implements MigrationInterface {
name = 'AddMessages1689745128577'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE `messages` (`uuid` varchar(36) NOT NULL, `recipient_uuid` varchar(36) NOT NULL, `sender_uuid` varchar(36) NOT NULL, `encrypted_message` text NOT NULL, `replaceability_identifier` varchar(255) NULL, `created_at_timestamp` bigint NOT NULL, `updated_at_timestamp` bigint NOT NULL, INDEX `recipient_uuid_on_messages` (`recipient_uuid`), INDEX `sender_uuid_on_messages` (`sender_uuid`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `sender_uuid_on_messages` ON `messages`')
await queryRunner.query('DROP INDEX `recipient_uuid_on_messages` ON `messages`')
await queryRunner.query('DROP TABLE `messages`')
}
}

View File

@@ -0,0 +1,27 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class RenameKeyMessageIdentifier1689746180559 implements MigrationInterface {
name = 'RenameKeyMessageIdentifier1689746180559'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `key_system_uuid_on_key_system_associations` ON `key_system_associations`')
await queryRunner.query(
'ALTER TABLE `key_system_associations` CHANGE `key_system_uuid` `key_system_identifier` varchar(36) NOT NULL',
)
await queryRunner.query(
'CREATE INDEX `key_system_identifier_on_key_system_associations` ON `key_system_associations` (`key_system_identifier`)',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'DROP INDEX `key_system_identifier_on_key_system_associations` ON `key_system_associations`',
)
await queryRunner.query(
'ALTER TABLE `key_system_associations` CHANGE `key_system_identifier` `key_system_uuid` varchar(36) NOT NULL',
)
await queryRunner.query(
'CREATE INDEX `key_system_uuid_on_key_system_associations` ON `key_system_associations` (`key_system_uuid`)',
)
}
}

View File

@@ -0,0 +1,23 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class DeletePrivileges1690900526061 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const itemsWithPrivilegesContentTypeQueryResult = await queryRunner.manager.query(
'SELECT COUNT(*) as count FROM items i WHERE i.content_type = "SN|Privileges"',
)
const itemsWithPrivilegesContentTypeCount = +itemsWithPrivilegesContentTypeQueryResult[0].count
const batchSize = 1_000
const batchCount = Math.ceil(itemsWithPrivilegesContentTypeCount / batchSize)
for (let batchIndex = 0; batchIndex < batchCount; batchIndex++) {
await queryRunner.startTransaction()
await queryRunner.manager.query(`DELETE FROM items WHERE content_type = "SN|Privileges" LIMIT ${batchSize}`)
await queryRunner.commitTransaction()
}
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,11 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class UpdateUnknownContent1690975361562 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.manager.query('UPDATE items SET content_type = "Note" WHERE content_type = "Unknown"')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,22 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class RemoveRevisionsForeignKey1692176803410 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const revisionsTableExistsQueryResult = await queryRunner.manager.query(
'SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = "revisions"',
)
const revisionsTableExists = revisionsTableExistsQueryResult[0].count === 1
if (revisionsTableExists) {
try {
await queryRunner.query('ALTER TABLE `revisions` DROP FOREIGN KEY `FK_ab3b92e54701fe3010022a31d90`')
} catch (error) {
// eslint-disable-next-line no-console
console.log('Error dropping foreign key: ', (error as Error).message)
}
}
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,21 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class RemoveAssociations1692264556858 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'DROP INDEX `key_system_identifier_on_key_system_associations` ON `key_system_associations`',
)
await queryRunner.query('DROP INDEX `item_uuid_on_key_system_associations` ON `key_system_associations`')
await queryRunner.query('DROP TABLE `key_system_associations`')
await queryRunner.query('DROP INDEX `item_uuid_on_shared_vault_associations` ON `shared_vault_associations`')
await queryRunner.query(
'DROP INDEX `shared_vault_uuid_on_shared_vault_associations` ON `shared_vault_associations`',
)
await queryRunner.query('DROP TABLE `shared_vault_associations`')
}
public async down(): Promise<void> {
return
}
}

View File

@@ -0,0 +1,13 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class RemoveSharedVaultLimit1692619430384 implements MigrationInterface {
name = 'RemoveSharedVaultLimit1692619430384'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `shared_vaults` DROP COLUMN `file_upload_bytes_limit`')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `shared_vaults` ADD `file_upload_bytes_limit` int NOT NULL')
}
}

View File

@@ -0,0 +1,19 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddSharedVaultInformation1693219736168 implements MigrationInterface {
name = 'AddSharedVaultInformation1693219736168'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `items` ADD `last_edited_by` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `items` ADD `shared_vault_uuid` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `items` ADD `key_system_identifier` varchar(36) NULL')
await queryRunner.query('CREATE INDEX `index_items_on_shared_vault_uuid` ON `items` (`shared_vault_uuid`)')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `index_items_on_shared_vault_uuid` ON `items`')
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `key_system_identifier`')
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `shared_vault_uuid`')
await queryRunner.query('ALTER TABLE `items` DROP COLUMN `last_edited_by`')
}
}

View File

@@ -0,0 +1,65 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddSharedVaultInformation1693220037441 implements MigrationInterface {
name = 'AddSharedVaultInformation1693220037441'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX "index_items_on_user_uuid_and_content_type"')
await queryRunner.query('DROP INDEX "user_uuid_and_updated_at_timestamp_and_created_at_timestamp"')
await queryRunner.query('DROP INDEX "user_uuid_and_deleted"')
await queryRunner.query('DROP INDEX "updated_at_timestamp"')
await queryRunner.query('DROP INDEX "index_items_on_deleted"')
await queryRunner.query('DROP INDEX "index_items_on_user_uuid"')
await queryRunner.query('DROP INDEX "index_items_on_content_type"')
await queryRunner.query(
'CREATE TABLE "temporary_items" ("uuid" varchar PRIMARY KEY NOT NULL, "duplicate_of" varchar(36), "items_key_id" varchar(255), "content" text, "content_type" varchar(255), "content_size" integer, "enc_item_key" text, "auth_hash" varchar(255), "user_uuid" varchar(36) NOT NULL, "deleted" tinyint(1) DEFAULT (0), "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, "created_at_timestamp" bigint NOT NULL, "updated_at_timestamp" bigint NOT NULL, "updated_with_session" varchar(36), "last_edited_by" varchar(36), "shared_vault_uuid" varchar(36), "key_system_identifier" varchar(36))',
)
await queryRunner.query(
'INSERT INTO "temporary_items"("uuid", "duplicate_of", "items_key_id", "content", "content_type", "content_size", "enc_item_key", "auth_hash", "user_uuid", "deleted", "created_at", "updated_at", "created_at_timestamp", "updated_at_timestamp", "updated_with_session") SELECT "uuid", "duplicate_of", "items_key_id", "content", "content_type", "content_size", "enc_item_key", "auth_hash", "user_uuid", "deleted", "created_at", "updated_at", "created_at_timestamp", "updated_at_timestamp", "updated_with_session" FROM "items"',
)
await queryRunner.query('DROP TABLE "items"')
await queryRunner.query('ALTER TABLE "temporary_items" RENAME TO "items"')
await queryRunner.query(
'CREATE INDEX "index_items_on_user_uuid_and_content_type" ON "items" ("user_uuid", "content_type") ',
)
await queryRunner.query(
'CREATE INDEX "user_uuid_and_updated_at_timestamp_and_created_at_timestamp" ON "items" ("user_uuid", "updated_at_timestamp", "created_at_timestamp") ',
)
await queryRunner.query('CREATE INDEX "user_uuid_and_deleted" ON "items" ("user_uuid", "deleted") ')
await queryRunner.query('CREATE INDEX "updated_at_timestamp" ON "items" ("updated_at_timestamp") ')
await queryRunner.query('CREATE INDEX "index_items_on_deleted" ON "items" ("deleted") ')
await queryRunner.query('CREATE INDEX "index_items_on_user_uuid" ON "items" ("user_uuid") ')
await queryRunner.query('CREATE INDEX "index_items_on_content_type" ON "items" ("content_type") ')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX "index_items_on_shared_vault_uuid"')
await queryRunner.query('DROP INDEX "user_uuid_and_deleted"')
await queryRunner.query('DROP INDEX "user_uuid_on_shared_vaults"')
await queryRunner.query('DROP INDEX "index_items_on_content_type"')
await queryRunner.query('DROP INDEX "index_items_on_user_uuid"')
await queryRunner.query('DROP INDEX "index_items_on_deleted"')
await queryRunner.query('DROP INDEX "updated_at_timestamp"')
await queryRunner.query('DROP INDEX "user_uuid_and_updated_at_timestamp_and_created_at_timestamp"')
await queryRunner.query('DROP INDEX "index_items_on_user_uuid_and_content_type"')
await queryRunner.query('ALTER TABLE "items" RENAME TO "temporary_items"')
await queryRunner.query(
'CREATE TABLE "items" ("uuid" varchar PRIMARY KEY NOT NULL, "duplicate_of" varchar(36), "items_key_id" varchar(255), "content" text, "content_type" varchar(255), "content_size" integer, "enc_item_key" text, "auth_hash" varchar(255), "user_uuid" varchar(36) NOT NULL, "deleted" tinyint(1) DEFAULT (0), "created_at" datetime(6) NOT NULL, "updated_at" datetime(6) NOT NULL, "created_at_timestamp" bigint NOT NULL, "updated_at_timestamp" bigint NOT NULL, "updated_with_session" varchar(36), "last_edited_by" varchar(36), "shared_vault_uuid" varchar(36), "key_system_identifier" varchar(36))',
)
await queryRunner.query(
'INSERT INTO "items"("uuid", "duplicate_of", "items_key_id", "content", "content_type", "content_size", "enc_item_key", "auth_hash", "user_uuid", "deleted", "created_at", "updated_at", "created_at_timestamp", "updated_at_timestamp", "updated_with_session", "last_edited_by", "shared_vault_uuid", "key_system_identifier") SELECT "uuid", "duplicate_of", "items_key_id", "content", "content_type", "content_size", "enc_item_key", "auth_hash", "user_uuid", "deleted", "created_at", "updated_at", "created_at_timestamp", "updated_at_timestamp", "updated_with_session", "last_edited_by", "shared_vault_uuid", "key_system_identifier" FROM "temporary_items"',
)
await queryRunner.query('DROP TABLE "temporary_items"')
await queryRunner.query('CREATE INDEX "index_items_on_content_type" ON "items" ("content_type") ')
await queryRunner.query('CREATE INDEX "index_items_on_user_uuid" ON "items" ("user_uuid") ')
await queryRunner.query('CREATE INDEX "index_items_on_deleted" ON "items" ("deleted") ')
await queryRunner.query('CREATE INDEX "updated_at_timestamp" ON "items" ("updated_at_timestamp") ')
await queryRunner.query(
'CREATE INDEX "user_uuid_and_updated_at_timestamp_and_created_at_timestamp" ON "items" ("user_uuid", "updated_at_timestamp", "created_at_timestamp") ',
)
await queryRunner.query(
'CREATE INDEX "index_items_on_user_uuid_and_content_type" ON "items" ("user_uuid", "content_type") ',
)
await queryRunner.query('CREATE INDEX "user_uuid_and_deleted" ON "items" ("user_uuid", "deleted") ')
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/syncing-server",
"version": "1.84.0",
"version": "1.86.0",
"engines": {
"node": ">=18.0.0 <21.0.0"
},
@@ -49,7 +49,7 @@
"inversify": "^6.0.1",
"inversify-express-utils": "^6.4.3",
"jsonwebtoken": "^9.0.0",
"mongodb": "^5.7.0",
"mongodb": "^6.0.0",
"mysql2": "^3.0.1",
"nodemon": "^2.0.19",
"prettyjson": "^1.2.5",

View File

@@ -6,7 +6,7 @@ import TYPES from './Types'
import { AppDataSource } from './DataSource'
import { SNSClient, SNSClientConfig } from '@aws-sdk/client-sns'
import { ItemRepositoryInterface } from '../Domain/Item/ItemRepositoryInterface'
import { TypeORMItemRepository } from '../Infra/TypeORM/TypeORMItemRepository'
import { SQLLegacyItemRepository } from '../Infra/TypeORM/SQLLegacyItemRepository'
import { MongoRepository, Repository } from 'typeorm'
import { Item } from '../Domain/Item/Item'
import {
@@ -61,8 +61,8 @@ import { S3ItemBackupService } from '../Infra/S3/S3ItemBackupService'
import { ControllerContainer, ControllerContainerInterface, MapperInterface } from '@standardnotes/domain-core'
import { BaseItemsController } from '../Infra/InversifyExpressUtils/Base/BaseItemsController'
import { Transform } from 'stream'
import { TypeORMItem } from '../Infra/TypeORM/TypeORMItem'
import { ItemPersistenceMapper } from '../Mapping/Persistence/ItemPersistenceMapper'
import { SQLLegacyItem } from '../Infra/TypeORM/SQLLegacyItem'
import { SQLLegacyItemPersistenceMapper } from '../Mapping/Persistence/SQLLegacyItemPersistenceMapper'
import { ItemHttpRepresentation } from '../Mapping/Http/ItemHttpRepresentation'
import { ItemHttpMapper } from '../Mapping/Http/ItemHttpMapper'
import { SavedItemHttpRepresentation } from '../Mapping/Http/SavedItemHttpRepresentation'
@@ -158,6 +158,9 @@ import { TransitionItemsFromPrimaryToSecondaryDatabaseForUser } from '../Domain/
import { SharedVaultFileMovedEventHandler } from '../Domain/Handler/SharedVaultFileMovedEventHandler'
import { TransitionStatusUpdatedEventHandler } from '../Domain/Handler/TransitionStatusUpdatedEventHandler'
import { TriggerTransitionFromPrimaryToSecondaryDatabaseForUser } from '../Domain/UseCase/Transition/TriggerTransitionFromPrimaryToSecondaryDatabaseForUser/TriggerTransitionFromPrimaryToSecondaryDatabaseForUser'
import { SQLItem } from '../Infra/TypeORM/SQLItem'
import { SQLItemPersistenceMapper } from '../Mapping/Persistence/SQLItemPersistenceMapper'
import { SQLItemRepository } from '../Infra/TypeORM/SQLItemRepository'
export class ContainerConfigLoader {
private readonly DEFAULT_CONTENT_SIZE_TRANSFER_LIMIT = 10_000_000
@@ -210,6 +213,8 @@ export class ContainerConfigLoader {
container.bind<TimerInterface>(TYPES.Sync_Timer).toConstantValue(new Timer())
const isConfiguredForHomeServer = env.get('MODE', true) === 'home-server'
const isConfiguredForSelfHosting = env.get('MODE', true) === 'self-hosted'
const isConfiguredForHomeServerOrSelfHosting = isConfiguredForHomeServer || isConfiguredForSelfHosting
const isSecondaryDatabaseEnabled = env.get('SECONDARY_DB_ENABLED', true) === 'true'
container.bind<Env>(TYPES.Sync_Env).toConstantValue(env)
@@ -303,8 +308,11 @@ export class ContainerConfigLoader {
// Mapping
container
.bind<MapperInterface<Item, TypeORMItem>>(TYPES.Sync_ItemPersistenceMapper)
.toConstantValue(new ItemPersistenceMapper())
.bind<MapperInterface<Item, SQLLegacyItem>>(TYPES.Sync_SQLLegacyItemPersistenceMapper)
.toConstantValue(new SQLLegacyItemPersistenceMapper())
container
.bind<MapperInterface<Item, SQLItem>>(TYPES.Sync_SQLItemPersistenceMapper)
.toConstantValue(new SQLItemPersistenceMapper())
container
.bind<MapperInterface<ItemHash, ItemHashHttpRepresentation>>(TYPES.Sync_ItemHashHttpMapper)
.toConstantValue(new ItemHashHttpMapper())
@@ -360,8 +368,11 @@ export class ContainerConfigLoader {
// ORM
container
.bind<Repository<TypeORMItem>>(TYPES.Sync_ORMItemRepository)
.toDynamicValue(() => appDataSource.getRepository(TypeORMItem))
.bind<Repository<SQLLegacyItem>>(TYPES.Sync_ORMLegacyItemRepository)
.toDynamicValue(() => appDataSource.getRepository(SQLLegacyItem))
container
.bind<Repository<SQLItem>>(TYPES.Sync_ORMItemRepository)
.toConstantValue(appDataSource.getRepository(SQLItem))
container
.bind<Repository<TypeORMSharedVault>>(TYPES.Sync_ORMSharedVaultRepository)
.toConstantValue(appDataSource.getRepository(TypeORMSharedVault))
@@ -401,19 +412,25 @@ export class ContainerConfigLoader {
// Repositories
container
.bind<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository)
.bind<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository)
.toConstantValue(
new TypeORMItemRepository(
container.get<Repository<TypeORMItem>>(TYPES.Sync_ORMItemRepository),
container.get<MapperInterface<Item, TypeORMItem>>(TYPES.Sync_ItemPersistenceMapper),
container.get<Logger>(TYPES.Sync_Logger),
),
isConfiguredForHomeServerOrSelfHosting
? new SQLItemRepository(
container.get<Repository<SQLItem>>(TYPES.Sync_ORMItemRepository),
container.get<MapperInterface<Item, SQLItem>>(TYPES.Sync_SQLItemPersistenceMapper),
container.get<Logger>(TYPES.Sync_Logger),
)
: new SQLLegacyItemRepository(
container.get<Repository<SQLLegacyItem>>(TYPES.Sync_ORMLegacyItemRepository),
container.get<MapperInterface<Item, SQLLegacyItem>>(TYPES.Sync_SQLLegacyItemPersistenceMapper),
container.get<Logger>(TYPES.Sync_Logger),
),
)
container
.bind<ItemRepositoryResolverInterface>(TYPES.Sync_ItemRepositoryResolver)
.toConstantValue(
new TypeORMItemRepositoryResolver(
container.get<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository),
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
isSecondaryDatabaseEnabled ? container.get<ItemRepositoryInterface>(TYPES.Sync_MongoDBItemRepository) : null,
),
)
@@ -777,7 +794,7 @@ export class ContainerConfigLoader {
)
.toConstantValue(
new TransitionItemsFromPrimaryToSecondaryDatabaseForUser(
container.get<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository),
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
isSecondaryDatabaseEnabled ? container.get<ItemRepositoryInterface>(TYPES.Sync_MongoDBItemRepository) : null,
container.get<TimerInterface>(TYPES.Sync_Timer),
container.get<Logger>(TYPES.Sync_Logger),
@@ -843,7 +860,7 @@ export class ContainerConfigLoader {
.bind<DuplicateItemSyncedEventHandler>(TYPES.Sync_DuplicateItemSyncedEventHandler)
.toConstantValue(
new DuplicateItemSyncedEventHandler(
container.get<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository),
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
isSecondaryDatabaseEnabled ? container.get<ItemRepositoryInterface>(TYPES.Sync_MongoDBItemRepository) : null,
container.get<DomainEventFactoryInterface>(TYPES.Sync_DomainEventFactory),
container.get<DomainEventPublisherInterface>(TYPES.Sync_DomainEventPublisher),
@@ -854,7 +871,7 @@ export class ContainerConfigLoader {
.bind<AccountDeletionRequestedEventHandler>(TYPES.Sync_AccountDeletionRequestedEventHandler)
.toConstantValue(
new AccountDeletionRequestedEventHandler(
container.get<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository),
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
isSecondaryDatabaseEnabled ? container.get<ItemRepositoryInterface>(TYPES.Sync_MongoDBItemRepository) : null,
container.get<Logger>(TYPES.Sync_Logger),
),
@@ -863,7 +880,7 @@ export class ContainerConfigLoader {
.bind<ItemRevisionCreationRequestedEventHandler>(TYPES.Sync_ItemRevisionCreationRequestedEventHandler)
.toConstantValue(
new ItemRevisionCreationRequestedEventHandler(
container.get<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository),
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
isSecondaryDatabaseEnabled ? container.get<ItemRepositoryInterface>(TYPES.Sync_MongoDBItemRepository) : null,
container.get<ItemBackupServiceInterface>(TYPES.Sync_ItemBackupService),
container.get<DomainEventFactoryInterface>(TYPES.Sync_DomainEventFactory),
@@ -918,7 +935,7 @@ export class ContainerConfigLoader {
.toConstantValue(
new ExtensionsHttpService(
container.get<AxiosInstance>(TYPES.Sync_HTTPClient),
container.get<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository),
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
isSecondaryDatabaseEnabled ? container.get<ItemRepositoryInterface>(TYPES.Sync_MongoDBItemRepository) : null,
container.get<ContentDecoderInterface>(TYPES.Sync_ContentDecoder),
container.get<DomainEventPublisherInterface>(TYPES.Sync_DomainEventPublisher),
@@ -964,7 +981,7 @@ export class ContainerConfigLoader {
.bind<EmailBackupRequestedEventHandler>(TYPES.Sync_EmailBackupRequestedEventHandler)
.toConstantValue(
new EmailBackupRequestedEventHandler(
container.get<ItemRepositoryInterface>(TYPES.Sync_MySQLItemRepository),
container.get<ItemRepositoryInterface>(TYPES.Sync_SQLItemRepository),
isSecondaryDatabaseEnabled
? container.get<ItemRepositoryInterface>(TYPES.Sync_MongoDBItemRepository)
: null,

View File

@@ -2,13 +2,14 @@ import { DataSource, EntityTarget, LoggerOptions, MongoRepository, ObjectLiteral
import { MysqlConnectionOptions } from 'typeorm/driver/mysql/MysqlConnectionOptions'
import { Env } from './Env'
import { SqliteConnectionOptions } from 'typeorm/driver/sqlite/SqliteConnectionOptions'
import { TypeORMItem } from '../Infra/TypeORM/TypeORMItem'
import { SQLLegacyItem } from '../Infra/TypeORM/SQLLegacyItem'
import { TypeORMNotification } from '../Infra/TypeORM/TypeORMNotification'
import { TypeORMSharedVault } from '../Infra/TypeORM/TypeORMSharedVault'
import { TypeORMSharedVaultUser } from '../Infra/TypeORM/TypeORMSharedVaultUser'
import { TypeORMSharedVaultInvite } from '../Infra/TypeORM/TypeORMSharedVaultInvite'
import { TypeORMMessage } from '../Infra/TypeORM/TypeORMMessage'
import { MongoDBItem } from '../Infra/TypeORM/MongoDBItem'
import { SQLItem } from '../Infra/TypeORM/SQLItem'
export class AppDataSource {
private _dataSource: DataSource | undefined
@@ -67,22 +68,30 @@ export class AppDataSource {
this.env.load()
const isConfiguredForMySQL = this.env.get('DB_TYPE') === 'mysql'
const isConfiguredForHomeServerOrSelfHosting =
this.env.get('MODE', true) === 'home-server' || this.env.get('MODE', true) === 'self-hosted'
const maxQueryExecutionTime = this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
? +this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
: 45_000
const migrationsSourceDirectoryName = isConfiguredForMySQL
? isConfiguredForHomeServerOrSelfHosting
? 'mysql'
: 'mysql-legacy'
: 'sqlite'
const commonDataSourceOptions = {
maxQueryExecutionTime,
entities: [
TypeORMItem,
isConfiguredForHomeServerOrSelfHosting ? SQLItem : SQLLegacyItem,
TypeORMNotification,
TypeORMSharedVault,
TypeORMSharedVaultUser,
TypeORMSharedVaultInvite,
TypeORMMessage,
],
migrations: [`${__dirname}/../../migrations/${isConfiguredForMySQL ? 'mysql' : 'sqlite'}/*.js`],
migrations: [`${__dirname}/../../migrations/${migrationsSourceDirectoryName}/*.js`],
migrationsRun: true,
logging: <LoggerOptions>this.env.get('DB_DEBUG_LEVEL', true) ?? 'info',
}

View File

@@ -8,7 +8,7 @@ const TYPES = {
Sync_Env: Symbol.for('Sync_Env'),
// Repositories
Sync_ItemRepositoryResolver: Symbol.for('Sync_ItemRepositoryResolver'),
Sync_MySQLItemRepository: Symbol.for('Sync_MySQLItemRepository'),
Sync_SQLItemRepository: Symbol.for('Sync_SQLItemRepository'),
Sync_MongoDBItemRepository: Symbol.for('Sync_MongoDBItemRepository'),
Sync_SharedVaultRepository: Symbol.for('Sync_SharedVaultRepository'),
Sync_SharedVaultInviteRepository: Symbol.for('Sync_SharedVaultInviteRepository'),
@@ -17,6 +17,7 @@ const TYPES = {
Sync_MessageRepository: Symbol.for('Sync_MessageRepository'),
// ORM
Sync_ORMItemRepository: Symbol.for('Sync_ORMItemRepository'),
Sync_ORMLegacyItemRepository: Symbol.for('Sync_ORMLegacyItemRepository'),
Sync_ORMSharedVaultRepository: Symbol.for('Sync_ORMSharedVaultRepository'),
Sync_ORMSharedVaultInviteRepository: Symbol.for('Sync_ORMSharedVaultInviteRepository'),
Sync_ORMSharedVaultUserRepository: Symbol.for('Sync_ORMSharedVaultUserRepository'),
@@ -131,7 +132,8 @@ const TYPES = {
Sync_MessagePersistenceMapper: Symbol.for('Sync_MessagePersistenceMapper'),
Sync_MessageHttpMapper: Symbol.for('Sync_MessageHttpMapper'),
Sync_NotificationHttpMapper: Symbol.for('Sync_NotificationHttpMapper'),
Sync_ItemPersistenceMapper: Symbol.for('Sync_ItemPersistenceMapper'),
Sync_SQLLegacyItemPersistenceMapper: Symbol.for('Sync_SQLLegacyItemPersistenceMapper'),
Sync_SQLItemPersistenceMapper: Symbol.for('Sync_SQLItemPersistenceMapper'),
Sync_MongoDBItemPersistenceMapper: Symbol.for('Sync_MongoDBItemPersistenceMapper'),
Sync_ItemHttpMapper: Symbol.for('Sync_ItemHttpMapper'),
Sync_ItemHashHttpMapper: Symbol.for('Sync_ItemHashHttpMapper'),

View File

@@ -74,6 +74,7 @@ describe('DuplicateItemSyncedEventHandler', () => {
logger = {} as jest.Mocked<Logger>
logger.warn = jest.fn()
logger.debug = jest.fn()
event = {} as jest.Mocked<DuplicateItemSyncedEvent>
event.createdAt = new Date(1)

View File

@@ -31,13 +31,13 @@ export class DuplicateItemSyncedEventHandler implements DomainEventHandlerInterf
const item = await itemRepository.findByUuidAndUserUuid(event.payload.itemUuid, event.payload.userUuid)
if (item === null) {
this.logger.warn(`Could not find item with uuid ${event.payload.itemUuid}`)
this.logger.debug(`Could not find item with uuid ${event.payload.itemUuid}`)
return
}
if (!item.props.duplicateOf) {
this.logger.warn(`Item ${event.payload.itemUuid} does not point to any duplicate`)
this.logger.debug(`Item ${event.payload.itemUuid} does not point to any duplicate`)
return
}

View File

@@ -58,6 +58,7 @@ export class MongoDBItem {
declare lastEditedBy: string | null
@Column()
@Index('index_items_on_shared_vault_uuid')
declare sharedVaultUuid: string | null
@Column()

View File

@@ -48,7 +48,7 @@ export class MongoDBItemRepository implements ItemRepositoryInterface {
async findContentSizeForComputingTransferLimit(query: ItemQuery): Promise<ItemContentSizeDescriptor[]> {
const options = this.createFindOptions(query)
const rawItems = await this.mongoRepository.find({
select: ['uuid', 'contentSize'],
select: ['_id', 'contentSize'],
...options,
})
@@ -175,7 +175,11 @@ export class MongoDBItemRepository implements ItemRepositoryInterface {
where: undefined,
}
if (query.sortBy !== undefined && query.sortOrder !== undefined) {
options.order = { [query.sortBy]: query.sortOrder }
const sortBySnakeToCamelCase = query.sortBy
.toLowerCase()
.replace(/([-_][a-z])/g, (group) => group.toUpperCase().replace('-', '').replace('_', ''))
options.order = { [sortBySnakeToCamelCase]: query.sortOrder }
}
if (query.uuids && query.uuids.length > 0) {
@@ -185,7 +189,9 @@ export class MongoDBItemRepository implements ItemRepositoryInterface {
}
}
if (query.deleted !== undefined) {
options.where = { ...options.where, deleted: { $eq: query.deleted } }
const deletedMixedValues = query.deleted === true ? [true, 1] : [false, 0]
options.where = { ...options.where, deleted: { $in: deletedMixedValues } }
}
if (query.contentType) {
if (Array.isArray(query.contentType)) {

View File

@@ -0,0 +1,31 @@
import { Column, Entity, Index } from 'typeorm'
import { SQLLegacyItem } from './SQLLegacyItem'
@Entity({ name: 'items' })
export class SQLItem extends SQLLegacyItem {
@Column({
type: 'varchar',
name: 'last_edited_by',
length: 36,
nullable: true,
})
declare lastEditedBy: string | null
@Column({
type: 'varchar',
name: 'shared_vault_uuid',
length: 36,
nullable: true,
})
@Index('index_items_on_shared_vault_uuid')
declare sharedVaultUuid: string | null
@Column({
type: 'varchar',
name: 'key_system_identifier',
length: 36,
nullable: true,
})
declare keySystemIdentifier: string | null
}

View File

@@ -0,0 +1,78 @@
import { Repository, SelectQueryBuilder } from 'typeorm'
import { MapperInterface } from '@standardnotes/domain-core'
import { Logger } from 'winston'
import { Item } from '../../Domain/Item/Item'
import { SQLItem } from './SQLItem'
import { SQLLegacyItemRepository } from './SQLLegacyItemRepository'
import { ItemQuery } from '../../Domain/Item/ItemQuery'
export class SQLItemRepository extends SQLLegacyItemRepository {
constructor(
protected override ormRepository: Repository<SQLItem>,
protected override mapper: MapperInterface<Item, SQLItem>,
protected override logger: Logger,
) {
super(ormRepository, mapper, logger)
}
protected override createFindAllQueryBuilder(query: ItemQuery): SelectQueryBuilder<SQLItem> {
const queryBuilder = this.ormRepository.createQueryBuilder('item')
if (query.sortBy !== undefined && query.sortOrder !== undefined) {
queryBuilder.orderBy(`item.${query.sortBy}`, query.sortOrder)
}
if (query.includeSharedVaultUuids !== undefined && query.includeSharedVaultUuids.length > 0) {
if (query.userUuid) {
queryBuilder.where('(item.user_uuid = :userUuid OR item.shared_vault_uuid IN (:...includeSharedVaultUuids))', {
userUuid: query.userUuid,
includeSharedVaultUuids: query.includeSharedVaultUuids,
})
} else {
queryBuilder.where('item.shared_vault_uuid IN (:...includeSharedVaultUuids)', {
includeSharedVaultUuids: query.includeSharedVaultUuids,
})
}
} else if (query.exclusiveSharedVaultUuids !== undefined && query.exclusiveSharedVaultUuids.length > 0) {
queryBuilder.where('item.shared_vault_uuid IN (:...exclusiveSharedVaultUuids)', {
exclusiveSharedVaultUuids: query.exclusiveSharedVaultUuids,
})
} else if (query.userUuid !== undefined) {
queryBuilder.where('item.user_uuid = :userUuid', { userUuid: query.userUuid })
}
if (query.uuids && query.uuids.length > 0) {
queryBuilder.andWhere('item.uuid IN (:...uuids)', { uuids: query.uuids })
}
if (query.deleted !== undefined) {
queryBuilder.andWhere('item.deleted = :deleted', { deleted: query.deleted })
}
if (query.contentType) {
if (Array.isArray(query.contentType)) {
queryBuilder.andWhere('item.content_type IN (:...contentTypes)', { contentTypes: query.contentType })
} else {
queryBuilder.andWhere('item.content_type = :contentType', { contentType: query.contentType })
}
}
if (query.lastSyncTime && query.syncTimeComparison) {
queryBuilder.andWhere(`item.updated_at_timestamp ${query.syncTimeComparison} :lastSyncTime`, {
lastSyncTime: query.lastSyncTime,
})
}
if (query.createdBetween !== undefined) {
queryBuilder.andWhere('item.created_at >= :createdAfter AND item.created_at <= :createdBefore', {
createdAfter: query.createdBetween[0].toISOString(),
createdBefore: query.createdBetween[1].toISOString(),
})
}
if (query.offset !== undefined) {
queryBuilder.skip(query.offset)
}
if (query.limit !== undefined) {
queryBuilder.take(query.limit)
}
return queryBuilder
}
}

View File

@@ -8,7 +8,7 @@ import { Column, Entity, Index, PrimaryGeneratedColumn } from 'typeorm'
'createdAtTimestamp',
])
@Index('user_uuid_and_deleted', ['userUuid', 'deleted'])
export class TypeORMItem {
export class SQLLegacyItem {
@PrimaryGeneratedColumn('uuid')
declare uuid: string

View File

@@ -6,14 +6,14 @@ import { Item } from '../../Domain/Item/Item'
import { ItemQuery } from '../../Domain/Item/ItemQuery'
import { ItemRepositoryInterface } from '../../Domain/Item/ItemRepositoryInterface'
import { ExtendedIntegrityPayload } from '../../Domain/Item/ExtendedIntegrityPayload'
import { TypeORMItem } from './TypeORMItem'
import { SQLLegacyItem } from './SQLLegacyItem'
import { ItemContentSizeDescriptor } from '../../Domain/Item/ItemContentSizeDescriptor'
export class TypeORMItemRepository implements ItemRepositoryInterface {
export class SQLLegacyItemRepository implements ItemRepositoryInterface {
constructor(
private ormRepository: Repository<TypeORMItem>,
private mapper: MapperInterface<Item, TypeORMItem>,
private logger: Logger,
protected ormRepository: Repository<SQLLegacyItem>,
protected mapper: MapperInterface<Item, SQLLegacyItem>,
protected logger: Logger,
) {}
async save(item: Item): Promise<void> {
@@ -179,7 +179,7 @@ export class TypeORMItemRepository implements ItemRepositoryInterface {
.execute()
}
private createFindAllQueryBuilder(query: ItemQuery): SelectQueryBuilder<TypeORMItem> {
protected createFindAllQueryBuilder(query: ItemQuery): SelectQueryBuilder<SQLLegacyItem> {
const queryBuilder = this.ormRepository.createQueryBuilder('item')
if (query.sortBy !== undefined && query.sortOrder !== undefined) {

View File

@@ -5,13 +5,13 @@ import { ItemRepositoryResolverInterface } from '../../Domain/Item/ItemRepositor
export class TypeORMItemRepositoryResolver implements ItemRepositoryResolverInterface {
constructor(
private mysqlItemRepository: ItemRepositoryInterface,
private sqlItemRepository: ItemRepositoryInterface,
private mongoDbItemRepository: ItemRepositoryInterface | null,
) {}
resolve(roleNames: RoleNameCollection): ItemRepositoryInterface {
if (!this.mongoDbItemRepository) {
return this.mysqlItemRepository
return this.sqlItemRepository
}
const transitionRoleName = RoleName.create(RoleName.NAMES.TransitionUser).getValue()
@@ -20,6 +20,6 @@ export class TypeORMItemRepositoryResolver implements ItemRepositoryResolverInte
return this.mongoDbItemRepository
}
return this.mysqlItemRepository
return this.sqlItemRepository
}
}

View File

@@ -99,7 +99,7 @@ export class MongoDBItemPersistenceMapper implements MapperInterface<Item, Mongo
encItemKey: projection.encItemKey,
authHash: projection.authHash,
userUuid,
deleted: projection.deleted,
deleted: !!projection.deleted,
dates,
timestamps,
updatedWithSession,
@@ -127,7 +127,7 @@ export class MongoDBItemPersistenceMapper implements MapperInterface<Item, Mongo
mongoDbItem.encItemKey = domain.props.encItemKey
mongoDbItem.authHash = domain.props.authHash
mongoDbItem.userUuid = domain.props.userUuid.value
mongoDbItem.deleted = domain.props.deleted
mongoDbItem.deleted = !!domain.props.deleted
mongoDbItem.createdAt = domain.props.dates.createdAt
mongoDbItem.updatedAt = domain.props.dates.updatedAt
mongoDbItem.createdAtTimestamp = domain.props.timestamps.createdAt

View File

@@ -0,0 +1,148 @@
import { Timestamps, MapperInterface, UniqueEntityId, Uuid, ContentType, Dates } from '@standardnotes/domain-core'
import { Item } from '../../Domain/Item/Item'
import { SQLItem } from '../../Infra/TypeORM/SQLItem'
import { KeySystemAssociation } from '../../Domain/KeySystem/KeySystemAssociation'
import { SharedVaultAssociation } from '../../Domain/SharedVault/SharedVaultAssociation'
export class SQLItemPersistenceMapper implements MapperInterface<Item, SQLItem> {
toDomain(projection: SQLItem): Item {
const uuidOrError = Uuid.create(projection.uuid)
if (uuidOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${uuidOrError.getError()}`)
}
const uuid = uuidOrError.getValue()
let duplicateOf = null
if (projection.duplicateOf) {
const duplicateOfOrError = Uuid.create(projection.duplicateOf)
if (duplicateOfOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${duplicateOfOrError.getError()}`)
}
duplicateOf = duplicateOfOrError.getValue()
}
const contentTypeOrError = ContentType.create(projection.contentType)
if (contentTypeOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${contentTypeOrError.getError()}`)
}
const contentType = contentTypeOrError.getValue()
const userUuidOrError = Uuid.create(projection.userUuid)
if (userUuidOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${userUuidOrError.getError()}`)
}
const userUuid = userUuidOrError.getValue()
const datesOrError = Dates.create(projection.createdAt, projection.updatedAt)
if (datesOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${datesOrError.getError()}`)
}
const dates = datesOrError.getValue()
const timestampsOrError = Timestamps.create(projection.createdAtTimestamp, projection.updatedAtTimestamp)
if (timestampsOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${timestampsOrError.getError()}`)
}
const timestamps = timestampsOrError.getValue()
let updatedWithSession = null
if (projection.updatedWithSession) {
const updatedWithSessionOrError = Uuid.create(projection.updatedWithSession)
if (updatedWithSessionOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${updatedWithSessionOrError.getError()}`)
}
updatedWithSession = updatedWithSessionOrError.getValue()
}
let sharedVaultAssociation: SharedVaultAssociation | undefined = undefined
if (projection.sharedVaultUuid && projection.lastEditedBy) {
const sharedVaultUuidOrError = Uuid.create(projection.sharedVaultUuid)
if (sharedVaultUuidOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${sharedVaultUuidOrError.getError()}`)
}
const sharedVaultUuid = sharedVaultUuidOrError.getValue()
const lastEditedByOrError = Uuid.create(projection.lastEditedBy)
if (lastEditedByOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${lastEditedByOrError.getError()}`)
}
const lastEditedBy = lastEditedByOrError.getValue()
const sharedVaultAssociationOrError = SharedVaultAssociation.create({
sharedVaultUuid,
lastEditedBy,
})
if (sharedVaultAssociationOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${sharedVaultAssociationOrError.getError()}`)
}
sharedVaultAssociation = sharedVaultAssociationOrError.getValue()
}
let keySystemAssociation: KeySystemAssociation | undefined = undefined
if (projection.keySystemIdentifier) {
const keySystemAssociationOrError = KeySystemAssociation.create(projection.keySystemIdentifier)
if (keySystemAssociationOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${keySystemAssociationOrError.getError()}`)
}
keySystemAssociation = keySystemAssociationOrError.getValue()
}
const itemOrError = Item.create(
{
duplicateOf,
itemsKeyId: projection.itemsKeyId,
content: projection.content,
contentType,
contentSize: projection.contentSize ?? undefined,
encItemKey: projection.encItemKey,
authHash: projection.authHash,
userUuid,
deleted: projection.deleted,
dates,
timestamps,
updatedWithSession,
sharedVaultAssociation,
keySystemAssociation,
},
new UniqueEntityId(uuid.value),
)
if (itemOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${itemOrError.getError()}`)
}
return itemOrError.getValue()
}
toProjection(domain: Item): SQLItem {
const typeorm = new SQLItem()
typeorm.uuid = domain.id.toString()
typeorm.duplicateOf = domain.props.duplicateOf ? domain.props.duplicateOf.value : null
typeorm.itemsKeyId = domain.props.itemsKeyId
typeorm.content = domain.props.content
typeorm.contentType = domain.props.contentType.value
typeorm.contentSize = domain.props.contentSize ?? null
typeorm.encItemKey = domain.props.encItemKey
typeorm.authHash = domain.props.authHash
typeorm.userUuid = domain.props.userUuid.value
typeorm.deleted = domain.props.deleted
typeorm.createdAt = domain.props.dates.createdAt
typeorm.updatedAt = domain.props.dates.updatedAt
typeorm.createdAtTimestamp = domain.props.timestamps.createdAt
typeorm.updatedAtTimestamp = domain.props.timestamps.updatedAt
typeorm.updatedWithSession = domain.props.updatedWithSession ? domain.props.updatedWithSession.value : null
typeorm.lastEditedBy = domain.props.sharedVaultAssociation
? domain.props.sharedVaultAssociation.props.lastEditedBy.value
: null
typeorm.sharedVaultUuid = domain.props.sharedVaultAssociation
? domain.props.sharedVaultAssociation.props.sharedVaultUuid.value
: null
typeorm.keySystemIdentifier = domain.props.keySystemAssociation
? domain.props.keySystemAssociation.props.keySystemIdentifier
: null
return typeorm
}
}

View File

@@ -2,10 +2,10 @@ import { Timestamps, MapperInterface, UniqueEntityId, Uuid, ContentType, Dates }
import { Item } from '../../Domain/Item/Item'
import { TypeORMItem } from '../../Infra/TypeORM/TypeORMItem'
import { SQLLegacyItem } from '../../Infra/TypeORM/SQLLegacyItem'
export class ItemPersistenceMapper implements MapperInterface<Item, TypeORMItem> {
toDomain(projection: TypeORMItem): Item {
export class SQLLegacyItemPersistenceMapper implements MapperInterface<Item, SQLLegacyItem> {
toDomain(projection: SQLLegacyItem): Item {
const uuidOrError = Uuid.create(projection.uuid)
if (uuidOrError.isFailed()) {
throw new Error(`Failed to create item from projection: ${uuidOrError.getError()}`)
@@ -78,8 +78,8 @@ export class ItemPersistenceMapper implements MapperInterface<Item, TypeORMItem>
return itemOrError.getValue()
}
toProjection(domain: Item): TypeORMItem {
const typeorm = new TypeORMItem()
toProjection(domain: Item): SQLLegacyItem {
const typeorm = new SQLLegacyItem()
typeorm.uuid = domain.id.toString()
typeorm.duplicateOf = domain.props.duplicateOf ? domain.props.duplicateOf.value : null

View File

@@ -2666,6 +2666,15 @@ __metadata:
languageName: node
linkType: hard
"@mongodb-js/saslprep@npm:^1.1.0":
version: 1.1.0
resolution: "@mongodb-js/saslprep@npm:1.1.0"
dependencies:
sparse-bitfield: "npm:^3.0.3"
checksum: 2cf6d124d48d517716eb3a18a09de27bd9b190863692234494954bc7d80cf69e65f6c3165f7d4bbf399c3e70a7e195ac8fb93fbc720f01250d7d987f681d8708
languageName: node
linkType: hard
"@mrleebo/prisma-ast@npm:^0.5.2":
version: 0.5.2
resolution: "@mrleebo/prisma-ast@npm:0.5.2"
@@ -3901,6 +3910,7 @@ __metadata:
inversify: "npm:^6.0.1"
inversify-express-utils: "npm:^6.4.3"
jest: "npm:^29.5.0"
mongodb: "npm:^6.0.0"
mysql2: "npm:^3.0.1"
newrelic: "npm:^10.1.2"
npm-check-updates: "npm:^16.0.1"
@@ -4101,7 +4111,7 @@ __metadata:
inversify-express-utils: "npm:^6.4.3"
jest: "npm:^29.5.0"
jsonwebtoken: "npm:^9.0.0"
mongodb: "npm:^5.7.0"
mongodb: "npm:^6.0.0"
mysql2: "npm:^3.0.1"
newrelic: "npm:^10.1.2"
nodemon: "npm:^2.0.19"
@@ -5646,10 +5656,10 @@ __metadata:
languageName: node
linkType: hard
"bson@npm:^5.4.0":
version: 5.4.0
resolution: "bson@npm:5.4.0"
checksum: 2c913a45c05bf8f1f8120c05e0e4ac9a864928853193c4794634b0c941a7d64397b9cbfe9fa9aba7249eb89d075911c5953efbb1be6b4e0848a0760660dca628
"bson@npm:^6.0.0":
version: 6.0.0
resolution: "bson@npm:6.0.0"
checksum: 7290998ee8eb7d105f9168e5940a6a04743001fe39674d897d802da31c8b326a2934b9e782ba1650906264513fdd27777a802451e69193ba10d6163032214d0a
languageName: node
linkType: hard
@@ -10243,35 +10253,37 @@ __metadata:
languageName: node
linkType: hard
"mongodb@npm:^5.7.0":
version: 5.7.0
resolution: "mongodb@npm:5.7.0"
"mongodb@npm:^6.0.0":
version: 6.0.0
resolution: "mongodb@npm:6.0.0"
dependencies:
bson: "npm:^5.4.0"
"@mongodb-js/saslprep": "npm:^1.1.0"
bson: "npm:^6.0.0"
mongodb-connection-string-url: "npm:^2.6.0"
saslprep: "npm:^1.0.3"
socks: "npm:^2.7.1"
peerDependencies:
"@aws-sdk/credential-providers": ^3.201.0
"@aws-sdk/credential-providers": ^3.188.0
"@mongodb-js/zstd": ^1.1.0
gcp-metadata: ^5.2.0
kerberos: ^2.0.1
mongodb-client-encryption: ">=2.3.0 <3"
mongodb-client-encryption: ">=6.0.0 <7"
snappy: ^7.2.2
dependenciesMeta:
saslprep:
optional: true
socks: ^2.7.1
peerDependenciesMeta:
"@aws-sdk/credential-providers":
optional: true
"@mongodb-js/zstd":
optional: true
gcp-metadata:
optional: true
kerberos:
optional: true
mongodb-client-encryption:
optional: true
snappy:
optional: true
checksum: 23a291ffe7e990f25b527f2d4bd1a848b866211596cc30a36cbe86d773f3bcd74d688aa0a7158b35e24271264d15c35832fcced639b81df4cab7303cdd8442c0
socks:
optional: true
checksum: daec6dc9dc937a9a38c1b1605ef93088928821615499d8f297cf41fcea774682ab1e6a645c43fb34b584b3a5077c74e650d6d9fbf474010a20f5f93279f492d5
languageName: node
linkType: hard
@@ -12077,15 +12089,6 @@ __metadata:
languageName: node
linkType: hard
"saslprep@npm:^1.0.3":
version: 1.0.3
resolution: "saslprep@npm:1.0.3"
dependencies:
sparse-bitfield: "npm:^3.0.3"
checksum: 23ebcda091621541fb9db9635ff36b9be81dc35a79a2adbf2a8309e162bcc9607513488aa3a9da757f11e856592ab8a727ac45c98c6084ff93d627509a882b84
languageName: node
linkType: hard
"schema-utils@npm:^3.1.1, schema-utils@npm:^3.1.2":
version: 3.1.2
resolution: "schema-utils@npm:3.1.2"
@@ -12375,7 +12378,7 @@ __metadata:
languageName: node
linkType: hard
"socks@npm:^2.6.2, socks@npm:^2.7.1":
"socks@npm:^2.6.2":
version: 2.7.1
resolution: "socks@npm:2.7.1"
dependencies: