Compare commits

..

13 Commits

Author SHA1 Message Date
standardci
ca57c8e7b5 chore(release): publish new version
- @standardnotes/auth-server@1.141.1
 - @standardnotes/home-server@1.15.34
 - @standardnotes/revisions-server@1.33.6
 - @standardnotes/syncing-server@1.95.1
2023-09-11 13:00:46 +00:00
Karol Sójko
a82b9a0c8a fix: disable running migrations in worker mode of a given service 2023-09-11 14:27:52 +02:00
standardci
ea7e9d73c4 chore(release): publish new version
- @standardnotes/api-gateway@1.74.4
 - @standardnotes/auth-server@1.141.0
 - @standardnotes/home-server@1.15.33
 - @standardnotes/revisions-server@1.33.5
2023-09-11 11:23:06 +00:00
Karol Sójko
117b7b4b99 fix(revisions): removing queries 2023-09-11 12:42:40 +02:00
Karol Sójko
b4bf11d9da fix(revisions): legacy table syncing and select for metadata 2023-09-11 12:23:11 +02:00
Karol Sójko
0306e10469 fix: db debug level on e2e 2023-09-11 12:03:15 +02:00
Karol Sójko
0ab47013f2 fix(api-gateway): awaiting for other services to start 2023-09-11 12:00:43 +02:00
Karol Sójko
836883b82d fix(revisions): rename table only if exists 2023-09-11 11:56:57 +02:00
Karol Sójko
ed671be9c5 fix(revisions): conflict with table naming 2023-09-11 11:52:39 +02:00
Karol Sójko
9676a2586c fix(revisions): add item_uuid to revisions metadata http representation 2023-09-11 11:39:42 +02:00
Karol Sójko
e95ba61c7f feat(auth): add procedure to transition users created between dates (#816) 2023-09-11 10:40:40 +02:00
standardci
a0718aea26 chore(release): publish new version
- @standardnotes/home-server@1.15.32
 - @standardnotes/revisions-server@1.33.4
2023-09-11 07:17:47 +00:00
Karol Sójko
156fa7a618 fix(revisions): add shared vault uuid to revision metadata http representation 2023-09-11 08:44:18 +02:00
52 changed files with 429 additions and 160 deletions

1
.github/ci.env vendored
View File

@@ -4,6 +4,7 @@ DB_USERNAME=std_notes_user
DB_PASSWORD=changeme123
DB_DATABASE=standard_notes_db
DB_PORT=3306
DB_DEBUG_LEVEL=all
DB_SQLITE_DATABASE_PATH=standard_notes_db
REDIS_PORT=6379
REDIS_HOST=cache

View File

@@ -141,6 +141,7 @@ jobs:
echo "DB_USERNAME=standardnotes" >> packages/home-server/.env
echo "DB_PASSWORD=standardnotes" >> packages/home-server/.env
echo "DB_TYPE=${{ matrix.db_type }}" >> packages/home-server/.env
echo "DB_DEBUG_LEVEL=all" >> packages/home-server/.env
echo "REDIS_URL=redis://localhost:6379" >> packages/home-server/.env
echo "CACHE_TYPE=${{ matrix.cache_type }}" >> packages/home-server/.env
echo "SECONDARY_DB_ENABLED=${{ matrix.secondary_db_enabled }}" >> packages/home-server/.env

View File

@@ -3,6 +3,12 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.74.4](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.74.3...@standardnotes/api-gateway@1.74.4) (2023-09-11)
### Bug Fixes
* **api-gateway:** awaiting for other services to start ([0ab4701](https://github.com/standardnotes/api-gateway/commit/0ab47013f210dca7aa404966798011947fb5c362))
## [1.74.3](https://github.com/standardnotes/api-gateway/compare/@standardnotes/api-gateway@1.74.2...@standardnotes/api-gateway@1.74.3) (2023-09-08)
**Note:** Version bump only for package @standardnotes/api-gateway

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/api-gateway",
"version": "1.74.3",
"version": "1.74.4",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -2,5 +2,8 @@
set -euo pipefail
sh supervisor/wait-for.sh localhost $AUTH_SERVER_PORT
sh supervisor/wait-for.sh localhost $FILES_SERVER_PORT
sh supervisor/wait-for.sh localhost $REVISIONS_SERVER_PORT
sh supervisor/wait-for.sh localhost $SYNCING_SERVER_PORT
node docker/entrypoint-server.js

View File

@@ -3,6 +3,18 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.141.1](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.141.0...@standardnotes/auth-server@1.141.1) (2023-09-11)
### Bug Fixes
* disable running migrations in worker mode of a given service ([a82b9a0](https://github.com/standardnotes/server/commit/a82b9a0c8a023ba8a450ff9e34bcd62f928fcab3))
# [1.141.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.140.0...@standardnotes/auth-server@1.141.0) (2023-09-11)
### Features
* **auth:** add procedure to transition users created between dates ([#816](https://github.com/standardnotes/server/issues/816)) ([e95ba61](https://github.com/standardnotes/server/commit/e95ba61c7f769736698ebbc38179d6dc05a8cc5e))
# [1.140.0](https://github.com/standardnotes/server/compare/@standardnotes/auth-server@1.139.0...@standardnotes/auth-server@1.140.0) (2023-09-08)
### Bug Fixes

View File

@@ -75,7 +75,7 @@ const requestBackups = async (
})
}
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
dayjs.extend(utc)

View File

@@ -18,7 +18,7 @@ const cleanup = async (
await cleanupExpiredSessions.execute({ date })
}
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
const env: Env = new Env()
env.load()

View File

@@ -8,7 +8,7 @@ import TYPES from '../src/Bootstrap/Types'
import { Env } from '../src/Bootstrap/Env'
import { PersistStatistics } from '../src/Domain/UseCase/PersistStatistics/PersistStatistics'
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
const env: Env = new Env()
env.load()

View File

@@ -0,0 +1,66 @@
import 'reflect-metadata'
import { Logger } from 'winston'
import * as dayjs from 'dayjs'
import * as utc from 'dayjs/plugin/utc'
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
import TYPES from '../src/Bootstrap/Types'
import { Env } from '../src/Bootstrap/Env'
import { DomainEventPublisherInterface } from '@standardnotes/domain-events'
import { DomainEventFactoryInterface } from '../src/Domain/Event/DomainEventFactoryInterface'
import { UserRepositoryInterface } from '../src/Domain/User/UserRepositoryInterface'
const inputArgs = process.argv.slice(2)
const startDateString = inputArgs[0]
const endDateString = inputArgs[1]
const requestTransition = async (
userRepository: UserRepositoryInterface,
logger: Logger,
domainEventFactory: DomainEventFactoryInterface,
domainEventPublisher: DomainEventPublisherInterface,
): Promise<void> => {
const startDate = new Date(startDateString)
const endDate = new Date(endDateString)
const users = await userRepository.findAllCreatedBetween(startDate, endDate)
logger.info(`Found ${users.length} users created between ${startDateString} and ${endDateString}`)
for (const user of users) {
const transitionRequestedEvent = domainEventFactory.createTransitionRequestedEvent({ userUuid: user.uuid })
await domainEventPublisher.publish(transitionRequestedEvent)
}
}
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
dayjs.extend(utc)
const env: Env = new Env()
env.load()
const logger: Logger = container.get(TYPES.Auth_Logger)
logger.info(`Starting transition request for users created between ${startDateString} and ${endDateString}`)
const userRepository: UserRepositoryInterface = container.get(TYPES.Auth_UserRepository)
const domainEventFactory: DomainEventFactoryInterface = container.get(TYPES.Auth_DomainEventFactory)
const domainEventPublisher: DomainEventPublisherInterface = container.get(TYPES.Auth_DomainEventPublisher)
Promise.resolve(requestTransition(userRepository, logger, domainEventFactory, domainEventPublisher))
.then(() => {
logger.info(`Finished transition request for users created between ${startDateString} and ${endDateString}`)
process.exit(0)
})
.catch((error) => {
logger.error(
`Error while requesting transition for users created between ${startDateString} and ${endDateString}: ${error}`,
)
process.exit(1)
})
})

View File

@@ -63,7 +63,7 @@ const requestBackups = async (
return
}
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
dayjs.extend(utc)

View File

@@ -9,7 +9,7 @@ import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-eve
import * as dayjs from 'dayjs'
import * as utc from 'dayjs/plugin/utc'
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
dayjs.extend(utc)

View File

@@ -0,0 +1,11 @@
'use strict'
const path = require('path')
const pnp = require(path.normalize(path.resolve(__dirname, '../../..', '.pnp.cjs'))).setup()
const index = require(path.normalize(path.resolve(__dirname, '../dist/bin/transition.js')))
Object.defineProperty(exports, '__esModule', { value: true })
exports.default = index

View File

@@ -55,6 +55,13 @@ case "$COMMAND" in
node docker/entrypoint-backup.js one_drive daily
;;
'transition' )
START_DATE=$1 && shift 1
END_DATE=$1 && shift 1
echo "[Docker] Starting Transition..."
node docker/entrypoint-transition.js $START_DATE $END_DATE
;;
* )
echo "[Docker] Unknown command"
;;

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/auth-server",
"version": "1.140.0",
"version": "1.141.1",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -274,6 +274,8 @@ import { UserAddedToSharedVaultEventHandler } from '../Domain/Handler/UserAddedT
import { UserRemovedFromSharedVaultEventHandler } from '../Domain/Handler/UserRemovedFromSharedVaultEventHandler'
export class ContainerConfigLoader {
constructor(private mode: 'server' | 'worker' = 'server') {}
async load(configuration?: {
controllerConatiner?: ControllerContainerInterface
directCallDomainEventPublisher?: DirectCallDomainEventPublisher
@@ -310,7 +312,7 @@ export class ContainerConfigLoader {
}
container.bind<winston.Logger>(TYPES.Auth_Logger).toConstantValue(logger)
const appDataSource = new AppDataSource(env)
const appDataSource = new AppDataSource({ env, runMigrations: this.mode === 'server' })
await appDataSource.initialize()
logger.debug('Database initialized')

View File

@@ -23,7 +23,12 @@ import { TypeORMSharedVaultUser } from '../Infra/TypeORM/TypeORMSharedVaultUser'
export class AppDataSource {
private _dataSource: DataSource | undefined
constructor(private env: Env) {}
constructor(
private configuration: {
env: Env
runMigrations: boolean
},
) {}
getRepository<Entity extends ObjectLiteral>(target: EntityTarget<Entity>): Repository<Entity> {
if (!this._dataSource) {
@@ -38,12 +43,12 @@ export class AppDataSource {
}
get dataSource(): DataSource {
this.env.load()
this.configuration.env.load()
const isConfiguredForMySQL = this.env.get('DB_TYPE') === 'mysql'
const isConfiguredForMySQL = this.configuration.env.get('DB_TYPE') === 'mysql'
const maxQueryExecutionTime = this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
? +this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
const maxQueryExecutionTime = this.configuration.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
? +this.configuration.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
: 45_000
const commonDataSourceOptions = {
@@ -68,28 +73,28 @@ export class AppDataSource {
TypeORMSharedVaultUser,
],
migrations: [`${__dirname}/../../migrations/${isConfiguredForMySQL ? 'mysql' : 'sqlite'}/*.js`],
migrationsRun: true,
logging: <LoggerOptions>this.env.get('DB_DEBUG_LEVEL', true) ?? 'info',
migrationsRun: this.configuration.runMigrations,
logging: <LoggerOptions>this.configuration.env.get('DB_DEBUG_LEVEL', true) ?? 'info',
}
if (isConfiguredForMySQL) {
const inReplicaMode = this.env.get('DB_REPLICA_HOST', true) ? true : false
const inReplicaMode = this.configuration.env.get('DB_REPLICA_HOST', true) ? true : false
const replicationConfig = {
master: {
host: this.env.get('DB_HOST'),
port: parseInt(this.env.get('DB_PORT')),
username: this.env.get('DB_USERNAME'),
password: this.env.get('DB_PASSWORD'),
database: this.env.get('DB_DATABASE'),
host: this.configuration.env.get('DB_HOST'),
port: parseInt(this.configuration.env.get('DB_PORT')),
username: this.configuration.env.get('DB_USERNAME'),
password: this.configuration.env.get('DB_PASSWORD'),
database: this.configuration.env.get('DB_DATABASE'),
},
slaves: [
{
host: this.env.get('DB_REPLICA_HOST', true),
port: parseInt(this.env.get('DB_PORT')),
username: this.env.get('DB_USERNAME'),
password: this.env.get('DB_PASSWORD'),
database: this.env.get('DB_DATABASE'),
host: this.configuration.env.get('DB_REPLICA_HOST', true),
port: parseInt(this.configuration.env.get('DB_PORT')),
username: this.configuration.env.get('DB_USERNAME'),
password: this.configuration.env.get('DB_PASSWORD'),
database: this.configuration.env.get('DB_DATABASE'),
},
],
removeNodeErrorCount: 10,
@@ -103,11 +108,11 @@ export class AppDataSource {
supportBigNumbers: true,
bigNumberStrings: false,
replication: inReplicaMode ? replicationConfig : undefined,
host: inReplicaMode ? undefined : this.env.get('DB_HOST'),
port: inReplicaMode ? undefined : parseInt(this.env.get('DB_PORT')),
username: inReplicaMode ? undefined : this.env.get('DB_USERNAME'),
password: inReplicaMode ? undefined : this.env.get('DB_PASSWORD'),
database: inReplicaMode ? undefined : this.env.get('DB_DATABASE'),
host: inReplicaMode ? undefined : this.configuration.env.get('DB_HOST'),
port: inReplicaMode ? undefined : parseInt(this.configuration.env.get('DB_PORT')),
username: inReplicaMode ? undefined : this.configuration.env.get('DB_USERNAME'),
password: inReplicaMode ? undefined : this.configuration.env.get('DB_PASSWORD'),
database: inReplicaMode ? undefined : this.configuration.env.get('DB_DATABASE'),
}
this._dataSource = new DataSource(mySQLDataSourceOptions)
@@ -115,7 +120,7 @@ export class AppDataSource {
const sqliteDataSourceOptions: SqliteConnectionOptions = {
...commonDataSourceOptions,
type: 'sqlite',
database: this.env.get('DB_SQLITE_DATABASE_PATH'),
database: this.configuration.env.get('DB_SQLITE_DATABASE_PATH'),
enableWAL: true,
busyErrorRetry: 2000,
}

View File

@@ -4,4 +4,4 @@ import { Env } from './Env'
const env: Env = new Env()
env.load()
export const MigrationsDataSource = new AppDataSource(env).dataSource
export const MigrationsDataSource = new AppDataSource({ env, runMigrations: true }).dataSource

View File

@@ -8,6 +8,7 @@ export interface UserRepositoryInterface {
streamTeam(memberEmail?: Email): Promise<ReadStream>
findOneByUuid(uuid: Uuid): Promise<User | null>
findOneByUsernameOrEmail(usernameOrEmail: Email | Username): Promise<User | null>
findAllCreatedBetween(start: Date, end: Date): Promise<User[]>
save(user: User): Promise<User>
remove(user: User): Promise<User>
}

View File

@@ -14,6 +14,13 @@ export class TypeORMUserRepository implements UserRepositoryInterface {
private ormRepository: Repository<User>,
) {}
async findAllCreatedBetween(start: Date, end: Date): Promise<User[]> {
return this.ormRepository
.createQueryBuilder('user')
.where('user.created_at BETWEEN :start AND :end', { start, end })
.getMany()
}
async save(user: User): Promise<User> {
return this.ormRepository.save(user)
}

View File

@@ -3,6 +3,18 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.15.34](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.33...@standardnotes/home-server@1.15.34) (2023-09-11)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.33](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.32...@standardnotes/home-server@1.15.33) (2023-09-11)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.32](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.31...@standardnotes/home-server@1.15.32) (2023-09-11)
**Note:** Version bump only for package @standardnotes/home-server
## [1.15.31](https://github.com/standardnotes/server/compare/@standardnotes/home-server@1.15.30...@standardnotes/home-server@1.15.31) (2023-09-08)
**Note:** Version bump only for package @standardnotes/home-server

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/home-server",
"version": "1.15.31",
"version": "1.15.34",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -3,6 +3,28 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.33.6](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.33.5...@standardnotes/revisions-server@1.33.6) (2023-09-11)
### Bug Fixes
* disable running migrations in worker mode of a given service ([a82b9a0](https://github.com/standardnotes/server/commit/a82b9a0c8a023ba8a450ff9e34bcd62f928fcab3))
## [1.33.5](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.33.4...@standardnotes/revisions-server@1.33.5) (2023-09-11)
### Bug Fixes
* **revisions:** add item_uuid to revisions metadata http representation ([9676a25](https://github.com/standardnotes/server/commit/9676a2586cabc344f3c244a5a20d4e1d57ac4c35))
* **revisions:** conflict with table naming ([ed671be](https://github.com/standardnotes/server/commit/ed671be9c56eb8d54b8dd52a69d5e30e07203aa4))
* **revisions:** legacy table syncing and select for metadata ([b4bf11d](https://github.com/standardnotes/server/commit/b4bf11d9da44cc1d980b6922f532ff487fe582df))
* **revisions:** removing queries ([117b7b4](https://github.com/standardnotes/server/commit/117b7b4b99e09aebfd081433646ec05c2ebf308f))
* **revisions:** rename table only if exists ([836883b](https://github.com/standardnotes/server/commit/836883b82dd436d0e9b3ae3b70f734ceb508e227))
## [1.33.4](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.33.3...@standardnotes/revisions-server@1.33.4) (2023-09-11)
### Bug Fixes
* **revisions:** add shared vault uuid to revision metadata http representation ([156fa7a](https://github.com/standardnotes/server/commit/156fa7a618b9f33a839580019d0398c1b838697d))
## [1.33.3](https://github.com/standardnotes/server/compare/@standardnotes/revisions-server@1.33.2...@standardnotes/revisions-server@1.33.3) (2023-09-08)
**Note:** Version bump only for package @standardnotes/revisions-server

View File

@@ -7,7 +7,7 @@ import { Env } from '../src/Bootstrap/Env'
import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-events'
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
const env: Env = new Env()
env.load()

View File

@@ -4,19 +4,19 @@ export class init1669113322388 implements MigrationInterface {
name = 'init1669113322388'
public async up(queryRunner: QueryRunner): Promise<void> {
await this.syncSchemaBetweenLegacyRevisions(queryRunner)
await queryRunner.query(
'CREATE TABLE IF NOT EXISTS `revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `user_uuid` varchar(36) NOT NULL, `content` mediumtext NULL, `content_type` varchar(255) NULL, `items_key_id` varchar(255) NULL, `enc_item_key` text NULL, `auth_hash` varchar(255) NULL, `creation_date` date NULL, `created_at` datetime(6) NULL, `updated_at` datetime(6) NULL, INDEX `item_uuid` (`item_uuid`), INDEX `user_uuid` (`user_uuid`), INDEX `creation_date` (`creation_date`), INDEX `created_at` (`created_at`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
'CREATE TABLE `revisions_revisions` (`uuid` varchar(36) NOT NULL, `item_uuid` varchar(36) NOT NULL, `user_uuid` varchar(36) NULL, `content` mediumtext NULL, `content_type` varchar(255) NULL, `items_key_id` varchar(255) NULL, `enc_item_key` text NULL, `auth_hash` varchar(255) NULL, `creation_date` date NULL, `created_at` datetime(6) NULL, `updated_at` datetime(6) NULL, INDEX `item_uuid` (`item_uuid`), INDEX `user_uuid` (`user_uuid`), INDEX `creation_date` (`creation_date`), INDEX `created_at` (`created_at`), PRIMARY KEY (`uuid`)) ENGINE=InnoDB',
)
await this.syncSchemaBetweenLegacyRevisions(queryRunner)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `created_at` ON `revisions`')
await queryRunner.query('DROP INDEX `creation_date` ON `revisions`')
await queryRunner.query('DROP INDEX `user_uuid` ON `revisions`')
await queryRunner.query('DROP INDEX `item_uuid` ON `revisions`')
await queryRunner.query('DROP TABLE `revisions`')
await queryRunner.query('DROP INDEX `created_at` ON `revisions_revisions`')
await queryRunner.query('DROP INDEX `creation_date` ON `revisions_revisions`')
await queryRunner.query('DROP INDEX `user_uuid` ON `revisions_revisions`')
await queryRunner.query('DROP INDEX `item_uuid` ON `revisions_revisions`')
await queryRunner.query('DROP TABLE `revisions_revisions`')
}
private async syncSchemaBetweenLegacyRevisions(queryRunner: QueryRunner): Promise<void> {
@@ -28,14 +28,10 @@ export class init1669113322388 implements MigrationInterface {
return
}
const revisionsTableHasUserUuidColumnQueryResult = await queryRunner.manager.query(
'SELECT COUNT(*) as count FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = "revisions" AND column_name = "user_uuid"',
await queryRunner.query(
'INSERT INTO `revisions_revisions`(`uuid`, `item_uuid`, `user_uuid`, `content`, `content_type`, `items_key_id`, `enc_item_key`, `auth_hash`, `creation_date`, `created_at`, `updated_at`) SELECT `uuid`, `item_uuid`, NULL, `content`, `content_type`, `items_key_id`, `enc_item_key`, `auth_hash`, `creation_date`, `created_at`, `updated_at` FROM `revisions`',
)
const revisionsTableHasUserUuidColumn = revisionsTableHasUserUuidColumnQueryResult[0].count === 1
if (revisionsTableHasUserUuidColumn) {
return
}
await queryRunner.query('ALTER TABLE `revisions` ADD COLUMN `user_uuid` varchar(36) NULL')
await queryRunner.query('DROP TABLE `revisions`')
}
}

View File

@@ -5,24 +5,24 @@ export class removeDateIndexes1669636497932 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
const indexRevisionsOnCreatedAt = await queryRunner.manager.query(
'SHOW INDEX FROM `revisions` where `key_name` = "created_at"',
'SHOW INDEX FROM `revisions_revisions` where `key_name` = "created_at"',
)
const indexRevisionsOnCreatedAtExist = indexRevisionsOnCreatedAt && indexRevisionsOnCreatedAt.length > 0
if (indexRevisionsOnCreatedAtExist) {
await queryRunner.query('DROP INDEX `created_at` ON `revisions`')
await queryRunner.query('DROP INDEX `created_at` ON `revisions_revisions`')
}
const indexRevisionsOnCreationDate = await queryRunner.manager.query(
'SHOW INDEX FROM `revisions` where `key_name` = "creation_date"',
'SHOW INDEX FROM `revisions_revisions` where `key_name` = "creation_date"',
)
const indexRevisionsOnCreationDateAtExist = indexRevisionsOnCreationDate && indexRevisionsOnCreationDate.length > 0
if (indexRevisionsOnCreationDateAtExist) {
await queryRunner.query('DROP INDEX `creation_date` ON `revisions`')
await queryRunner.query('DROP INDEX `creation_date` ON `revisions_revisions`')
}
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('CREATE INDEX `creation_date` ON `revisions` (`creation_date`)')
await queryRunner.query('CREATE INDEX `created_at` ON `revisions` (`created_at`)')
await queryRunner.query('CREATE INDEX `creation_date` ON `revisions_revisions` (`creation_date`)')
await queryRunner.query('CREATE INDEX `created_at` ON `revisions_revisions` (`created_at`)')
}
}

View File

@@ -4,10 +4,10 @@ export class makeUserUuidNullable1669735585016 implements MigrationInterface {
name = 'makeUserUuidNullable1669735585016'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `revisions` CHANGE `user_uuid` `user_uuid` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `revisions_revisions` CHANGE `user_uuid` `user_uuid` varchar(36) NULL')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `revisions` CHANGE `user_uuid` `user_uuid` varchar(36) NOT NULL')
await queryRunner.query('ALTER TABLE `revisions_revisions` CHANGE `user_uuid` `user_uuid` varchar(36) NOT NULL')
}
}

View File

@@ -2,16 +2,18 @@ import { MigrationInterface, QueryRunner } from 'typeorm'
export class AddSharedVaultInformation1693915383950 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('ALTER TABLE `revisions` ADD `edited_by` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `revisions` ADD `shared_vault_uuid` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `revisions` ADD `key_system_identifier` varchar(36) NULL')
await queryRunner.query('CREATE INDEX `index_revisions_on_shared_vault_uuid` ON `revisions` (`shared_vault_uuid`)')
await queryRunner.query('ALTER TABLE `revisions_revisions` ADD `edited_by` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `revisions_revisions` ADD `shared_vault_uuid` varchar(36) NULL')
await queryRunner.query('ALTER TABLE `revisions_revisions` ADD `key_system_identifier` varchar(36) NULL')
await queryRunner.query(
'CREATE INDEX `index_revisions_on_shared_vault_uuid` ON `revisions_revisions` (`shared_vault_uuid`)',
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX `index_revisions_on_shared_vault_uuid` ON `revisions`')
await queryRunner.query('ALTER TABLE `revisions` DROP COLUMN `key_system_identifier`')
await queryRunner.query('ALTER TABLE `revisions` DROP COLUMN `shared_vault_uuid`')
await queryRunner.query('ALTER TABLE `revisions` DROP COLUMN `last_edited_by`')
await queryRunner.query('DROP INDEX `index_revisions_on_shared_vault_uuid` ON `revisions_revisions`')
await queryRunner.query('ALTER TABLE `revisions_revisions` DROP COLUMN `key_system_identifier`')
await queryRunner.query('ALTER TABLE `revisions_revisions` DROP COLUMN `shared_vault_uuid`')
await queryRunner.query('ALTER TABLE `revisions_revisions` DROP COLUMN `last_edited_by`')
}
}

View File

@@ -0,0 +1,19 @@
import { MigrationInterface, QueryRunner } from 'typeorm'
export class RenameRevisionsTable1694425333972 implements MigrationInterface {
name = 'RenameRevisionsTable1694425333972'
public async up(queryRunner: QueryRunner): Promise<void> {
const revisionsTableExistsQueryResult = await queryRunner.manager.query(
'SELECT COUNT(*) as count FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = "revisions"',
)
const revisionsTableExists = revisionsTableExistsQueryResult[0].count === 1
if (revisionsTableExists) {
await queryRunner.query('RENAME TABLE `revisions` TO `revisions_revisions`')
}
}
public async down(): Promise<void> {
return
}
}

View File

@@ -5,15 +5,15 @@ export class initialBoilerplate1682678053275 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
'CREATE TABLE "revisions" ("uuid" varchar PRIMARY KEY NOT NULL, "item_uuid" varchar(36) NOT NULL, "user_uuid" varchar(36), "content" text, "content_type" varchar(255), "items_key_id" varchar(255), "enc_item_key" text, "auth_hash" varchar(255), "creation_date" date, "created_at" datetime(6), "updated_at" datetime(6))',
'CREATE TABLE "revisions_revisions" ("uuid" varchar PRIMARY KEY NOT NULL, "item_uuid" varchar(36) NOT NULL, "user_uuid" varchar(36), "content" text, "content_type" varchar(255), "items_key_id" varchar(255), "enc_item_key" text, "auth_hash" varchar(255), "creation_date" date, "created_at" datetime(6), "updated_at" datetime(6))',
)
await queryRunner.query('CREATE INDEX "item_uuid" ON "revisions" ("item_uuid") ')
await queryRunner.query('CREATE INDEX "user_uuid" ON "revisions" ("user_uuid") ')
await queryRunner.query('CREATE INDEX "item_uuid" ON "revisions_revisions" ("item_uuid") ')
await queryRunner.query('CREATE INDEX "user_uuid" ON "revisions_revisions" ("user_uuid") ')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX "user_uuid"')
await queryRunner.query('DROP INDEX "item_uuid"')
await queryRunner.query('DROP TABLE "revisions"')
await queryRunner.query('DROP TABLE "revisions_revisions"')
}
}

View File

@@ -10,28 +10,30 @@ export class AddSharedVaultInformation1693915775491 implements MigrationInterfac
'CREATE TABLE "temporary_revisions" ("uuid" varchar PRIMARY KEY NOT NULL, "item_uuid" varchar(36) NOT NULL, "user_uuid" varchar(36), "content" text, "content_type" varchar(255), "items_key_id" varchar(255), "enc_item_key" text, "auth_hash" varchar(255), "creation_date" date, "created_at" datetime(6), "updated_at" datetime(6), "edited_by" varchar(36), "shared_vault_uuid" varchar(36), "key_system_identifier" varchar(36))',
)
await queryRunner.query(
'INSERT INTO "temporary_revisions"("uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at") SELECT "uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at" FROM "revisions"',
'INSERT INTO "temporary_revisions"("uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at") SELECT "uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at" FROM "revisions_revisions"',
)
await queryRunner.query('DROP TABLE "revisions_revisions"')
await queryRunner.query('ALTER TABLE "temporary_revisions" RENAME TO "revisions_revisions"')
await queryRunner.query('CREATE INDEX "user_uuid" ON "revisions_revisions" ("user_uuid") ')
await queryRunner.query('CREATE INDEX "item_uuid" ON "revisions_revisions" ("item_uuid") ')
await queryRunner.query(
'CREATE INDEX "index_revisions_on_shared_vault_uuid" ON "revisions_revisions" ("shared_vault_uuid") ',
)
await queryRunner.query('DROP TABLE "revisions"')
await queryRunner.query('ALTER TABLE "temporary_revisions" RENAME TO "revisions"')
await queryRunner.query('CREATE INDEX "user_uuid" ON "revisions" ("user_uuid") ')
await queryRunner.query('CREATE INDEX "item_uuid" ON "revisions" ("item_uuid") ')
await queryRunner.query('CREATE INDEX "index_revisions_on_shared_vault_uuid" ON "revisions" ("shared_vault_uuid") ')
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query('DROP INDEX "index_revisions_on_shared_vault_uuid"')
await queryRunner.query('DROP INDEX "item_uuid"')
await queryRunner.query('DROP INDEX "user_uuid"')
await queryRunner.query('ALTER TABLE "revisions" RENAME TO "temporary_revisions"')
await queryRunner.query('ALTER TABLE "revisions_revisions" RENAME TO "temporary_revisions"')
await queryRunner.query(
'CREATE TABLE "revisions" ("uuid" varchar PRIMARY KEY NOT NULL, "item_uuid" varchar(36) NOT NULL, "user_uuid" varchar(36), "content" text, "content_type" varchar(255), "items_key_id" varchar(255), "enc_item_key" text, "auth_hash" varchar(255), "creation_date" date, "created_at" datetime(6), "updated_at" datetime(6))',
'CREATE TABLE "revisions_revisions" ("uuid" varchar PRIMARY KEY NOT NULL, "item_uuid" varchar(36) NOT NULL, "user_uuid" varchar(36), "content" text, "content_type" varchar(255), "items_key_id" varchar(255), "enc_item_key" text, "auth_hash" varchar(255), "creation_date" date, "created_at" datetime(6), "updated_at" datetime(6))',
)
await queryRunner.query(
'INSERT INTO "revisions"("uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at") SELECT "uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at" FROM "temporary_revisions"',
'INSERT INTO "revisions_revisions"("uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at") SELECT "uuid", "item_uuid", "user_uuid", "content", "content_type", "items_key_id", "enc_item_key", "auth_hash", "creation_date", "created_at", "updated_at" FROM "temporary_revisions"',
)
await queryRunner.query('DROP TABLE "temporary_revisions"')
await queryRunner.query('CREATE INDEX "item_uuid" ON "revisions" ("item_uuid") ')
await queryRunner.query('CREATE INDEX "user_uuid" ON "revisions" ("user_uuid") ')
await queryRunner.query('CREATE INDEX "item_uuid" ON "revisions_revisions" ("item_uuid") ')
await queryRunner.query('CREATE INDEX "user_uuid" ON "revisions_revisions" ("user_uuid") ')
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/revisions-server",
"version": "1.33.3",
"version": "1.33.6",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -70,6 +70,8 @@ import { RemoveRevisionsFromSharedVault } from '../Domain/UseCase/RemoveRevision
import { ItemRemovedFromSharedVaultEventHandler } from '../Domain/Handler/ItemRemovedFromSharedVaultEventHandler'
export class ContainerConfigLoader {
constructor(private mode: 'server' | 'worker' = 'server') {}
async load(configuration?: {
controllerConatiner?: ControllerContainerInterface
directCallDomainEventPublisher?: DirectCallDomainEventPublisher
@@ -115,7 +117,7 @@ export class ContainerConfigLoader {
container.bind<TimerInterface>(TYPES.Revisions_Timer).toDynamicValue(() => new Timer())
const appDataSource = new AppDataSource(env)
const appDataSource = new AppDataSource({ env, runMigrations: this.mode === 'server' })
await appDataSource.initialize()
logger.debug('Database initialized')

View File

@@ -12,7 +12,12 @@ export class AppDataSource {
private _dataSource: DataSource | undefined
private _secondaryDataSource: DataSource | undefined
constructor(private env: Env) {}
constructor(
private configuration: {
env: Env
runMigrations: boolean
},
) {}
getRepository<Entity extends ObjectLiteral>(target: EntityTarget<Entity>): Repository<Entity> {
if (!this._dataSource) {
@@ -39,20 +44,20 @@ export class AppDataSource {
}
get secondaryDataSource(): DataSource | undefined {
this.env.load()
this.configuration.env.load()
if (this.env.get('SECONDARY_DB_ENABLED', true) !== 'true') {
if (this.configuration.env.get('SECONDARY_DB_ENABLED', true) !== 'true') {
return undefined
}
this._secondaryDataSource = new DataSource({
type: 'mongodb',
host: this.env.get('MONGO_HOST'),
host: this.configuration.env.get('MONGO_HOST'),
authSource: 'admin',
port: parseInt(this.env.get('MONGO_PORT')),
username: this.env.get('MONGO_USERNAME'),
password: this.env.get('MONGO_PASSWORD', true),
database: this.env.get('MONGO_DATABASE'),
port: parseInt(this.configuration.env.get('MONGO_PORT')),
username: this.configuration.env.get('MONGO_USERNAME'),
password: this.configuration.env.get('MONGO_PASSWORD', true),
database: this.configuration.env.get('MONGO_DATABASE'),
entities: [MongoDBRevision],
retryWrites: false,
synchronize: true,
@@ -62,15 +67,16 @@ export class AppDataSource {
}
get dataSource(): DataSource {
this.env.load()
this.configuration.env.load()
const isConfiguredForMySQL = this.env.get('DB_TYPE') === 'mysql'
const isConfiguredForMySQL = this.configuration.env.get('DB_TYPE') === 'mysql'
const isConfiguredForHomeServerOrSelfHosting =
this.env.get('MODE', true) === 'home-server' || this.env.get('MODE', true) === 'self-hosted'
this.configuration.env.get('MODE', true) === 'home-server' ||
this.configuration.env.get('MODE', true) === 'self-hosted'
const maxQueryExecutionTime = this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
? +this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
const maxQueryExecutionTime = this.configuration.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
? +this.configuration.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
: 45_000
const migrationsSourceDirectoryName = isConfiguredForMySQL
@@ -83,28 +89,28 @@ export class AppDataSource {
maxQueryExecutionTime,
entities: [isConfiguredForHomeServerOrSelfHosting ? SQLRevision : SQLLegacyRevision],
migrations: [`${__dirname}/../../migrations/${migrationsSourceDirectoryName}/*.js`],
migrationsRun: true,
logging: <LoggerOptions>this.env.get('DB_DEBUG_LEVEL', true) ?? 'info',
migrationsRun: this.configuration.runMigrations,
logging: <LoggerOptions>this.configuration.env.get('DB_DEBUG_LEVEL', true) ?? 'info',
}
if (isConfiguredForMySQL) {
const inReplicaMode = this.env.get('DB_REPLICA_HOST', true) ? true : false
const inReplicaMode = this.configuration.env.get('DB_REPLICA_HOST', true) ? true : false
const replicationConfig = {
master: {
host: this.env.get('DB_HOST'),
port: parseInt(this.env.get('DB_PORT')),
username: this.env.get('DB_USERNAME'),
password: this.env.get('DB_PASSWORD'),
database: this.env.get('DB_DATABASE'),
host: this.configuration.env.get('DB_HOST'),
port: parseInt(this.configuration.env.get('DB_PORT')),
username: this.configuration.env.get('DB_USERNAME'),
password: this.configuration.env.get('DB_PASSWORD'),
database: this.configuration.env.get('DB_DATABASE'),
},
slaves: [
{
host: this.env.get('DB_REPLICA_HOST', true),
port: parseInt(this.env.get('DB_PORT')),
username: this.env.get('DB_USERNAME'),
password: this.env.get('DB_PASSWORD'),
database: this.env.get('DB_DATABASE'),
host: this.configuration.env.get('DB_REPLICA_HOST', true),
port: parseInt(this.configuration.env.get('DB_PORT')),
username: this.configuration.env.get('DB_USERNAME'),
password: this.configuration.env.get('DB_PASSWORD'),
database: this.configuration.env.get('DB_DATABASE'),
},
],
removeNodeErrorCount: 10,
@@ -118,11 +124,11 @@ export class AppDataSource {
supportBigNumbers: true,
bigNumberStrings: false,
replication: inReplicaMode ? replicationConfig : undefined,
host: inReplicaMode ? undefined : this.env.get('DB_HOST'),
port: inReplicaMode ? undefined : parseInt(this.env.get('DB_PORT')),
username: inReplicaMode ? undefined : this.env.get('DB_USERNAME'),
password: inReplicaMode ? undefined : this.env.get('DB_PASSWORD'),
database: inReplicaMode ? undefined : this.env.get('DB_DATABASE'),
host: inReplicaMode ? undefined : this.configuration.env.get('DB_HOST'),
port: inReplicaMode ? undefined : parseInt(this.configuration.env.get('DB_PORT')),
username: inReplicaMode ? undefined : this.configuration.env.get('DB_USERNAME'),
password: inReplicaMode ? undefined : this.configuration.env.get('DB_PASSWORD'),
database: inReplicaMode ? undefined : this.configuration.env.get('DB_DATABASE'),
}
this._dataSource = new DataSource(mySQLDataSourceOptions)
@@ -130,7 +136,7 @@ export class AppDataSource {
const sqliteDataSourceOptions: SqliteConnectionOptions = {
...commonDataSourceOptions,
type: 'sqlite',
database: this.env.get('DB_SQLITE_DATABASE_PATH'),
database: this.configuration.env.get('DB_SQLITE_DATABASE_PATH'),
enableWAL: true,
busyErrorRetry: 2000,
}

View File

@@ -4,4 +4,4 @@ import { Env } from './Env'
const env: Env = new Env()
env.load()
export const MigrationsDataSource = new AppDataSource(env).dataSource
export const MigrationsDataSource = new AppDataSource({ env, runMigrations: true }).dataSource

View File

@@ -1,6 +1,8 @@
import { ContentType, Dates } from '@standardnotes/domain-core'
import { ContentType, Dates, Uuid } from '@standardnotes/domain-core'
export interface RevisionMetadataProps {
contentType: ContentType
itemUuid: Uuid
sharedVaultUuid: Uuid | null
dates: Dates
}

View File

@@ -126,7 +126,7 @@ export class MongoDBRevisionRepository implements RevisionRepositoryInterface {
let persistence = []
if (sharedVaultUuids.length > 0) {
persistence = await this.mongoRepository.find({
select: ['_id', 'contentType', 'createdAt', 'updatedAt'],
select: ['_id', 'contentType', 'createdAt', 'updatedAt', 'sharedVaultUuid', 'itemUuid'],
where: {
$and: [
{ itemUuid: { $eq: itemUuid.value } },
@@ -144,7 +144,7 @@ export class MongoDBRevisionRepository implements RevisionRepositoryInterface {
})
} else {
persistence = await this.mongoRepository.find({
select: ['_id', 'contentType', 'createdAt', 'updatedAt'],
select: ['_id', 'contentType', 'createdAt', 'updatedAt', 'sharedVaultUuid', 'itemUuid'],
where: {
$and: [{ itemUuid: { $eq: itemUuid.value } }, { userUuid: { $eq: userUuid.value } }],
},

View File

@@ -129,6 +129,7 @@ export class SQLLegacyRevisionRepository implements RevisionRepositoryInterface
.addSelect('content_type', 'contentType')
.addSelect('created_at', 'createdAt')
.addSelect('updated_at', 'updatedAt')
.addSelect('item_uuid', 'itemUuid')
.where('item_uuid = :itemUuid', { itemUuid: itemUuid.value })
.andWhere('user_uuid = :userUuid', { userUuid: userUuid.value })
.orderBy('created_at', 'DESC')

View File

@@ -2,7 +2,7 @@ import { Column, Entity, Index } from 'typeorm'
import { SQLLegacyRevision } from './SQLLegacyRevision'
@Entity({ name: 'revisions' })
@Entity({ name: 'revisions_revisions' })
export class SQLRevision extends SQLLegacyRevision {
@Column({
type: 'varchar',

View File

@@ -17,6 +17,27 @@ export class SQLRevisionRepository extends SQLLegacyRevisionRepository {
super(ormRepository, revisionMetadataMapper, revisionMapper, logger)
}
override async removeByUserUuid(userUuid: Uuid): Promise<void> {
await this.ormRepository
.createQueryBuilder()
.delete()
.from('revisions_revisions')
.where('user_uuid = :userUuid', { userUuid: userUuid.value })
.execute()
}
override async removeOneByUuid(revisionUuid: Uuid, userUuid: Uuid): Promise<void> {
await this.ormRepository
.createQueryBuilder()
.delete()
.from('revisions_revisions')
.where('uuid = :revisionUuid AND user_uuid = :userUuid', {
userUuid: userUuid.value,
revisionUuid: revisionUuid.value,
})
.execute()
}
override async findOneByUuid(revisionUuid: Uuid, userUuid: Uuid, sharedVaultUuids: Uuid[]): Promise<Revision | null> {
const queryBuilder = this.ormRepository.createQueryBuilder()
@@ -71,6 +92,8 @@ export class SQLRevisionRepository extends SQLLegacyRevisionRepository {
.addSelect('content_type', 'contentType')
.addSelect('created_at', 'createdAt')
.addSelect('updated_at', 'updatedAt')
.addSelect('shared_vault_uuid', 'sharedVaultUuid')
.addSelect('item_uuid', 'itemUuid')
.orderBy('created_at', 'DESC')
if (sharedVaultUuids.length > 0) {

View File

@@ -15,10 +15,12 @@ export class RevisionMetadataHttpMapper
toProjection(domain: RevisionMetadata): RevisionMetadataHttpRepresentation {
return {
uuid: domain.id.toString(),
item_uuid: domain.props.itemUuid.value,
content_type: domain.props.contentType.value as string,
created_at: domain.props.dates.createdAt.toISOString(),
updated_at: domain.props.dates.updatedAt.toISOString(),
required_role: this.getRequiredRoleToViewRevision.execute({ createdAt: domain.props.dates.createdAt }).getValue(),
shared_vault_uuid: domain.props.sharedVaultUuid ? domain.props.sharedVaultUuid.value : null,
}
}
}

View File

@@ -1,7 +1,9 @@
export interface RevisionMetadataHttpRepresentation {
uuid: string
item_uuid: string
content_type: string
created_at: string
updated_at: string
required_role: string
shared_vault_uuid: string | null
}

View File

@@ -1,4 +1,4 @@
import { MapperInterface, Dates, UniqueEntityId, ContentType } from '@standardnotes/domain-core'
import { MapperInterface, Dates, UniqueEntityId, ContentType, Uuid } from '@standardnotes/domain-core'
import { RevisionMetadata } from '../../../Domain/Revision/RevisionMetadata'
import { MongoDBRevision } from '../../../Infra/TypeORM/MongoDB/MongoDBRevision'
@@ -20,10 +20,27 @@ export class MongoDBRevisionMetadataPersistenceMapper implements MapperInterface
}
const dates = datesOrError.getValue()
let sharedVaultUuid = null
if (projection.sharedVaultUuid) {
const sharedVaultUuidOrError = Uuid.create(projection.sharedVaultUuid)
if (sharedVaultUuidOrError.isFailed()) {
throw new Error(`Could not create shared vault uuid: ${sharedVaultUuidOrError.getError()}`)
}
sharedVaultUuid = sharedVaultUuidOrError.getValue()
}
const itemUuidOrError = Uuid.create(projection.itemUuid)
if (itemUuidOrError.isFailed()) {
throw new Error(`Could not create item uuid: ${itemUuidOrError.getError()}`)
}
const itemUuid = itemUuidOrError.getValue()
const revisionMetadataOrError = RevisionMetadata.create(
{
contentType,
dates,
sharedVaultUuid,
itemUuid,
},
new UniqueEntityId(projection._id.toHexString()),
)

View File

@@ -1,4 +1,4 @@
import { MapperInterface, Dates, UniqueEntityId, ContentType } from '@standardnotes/domain-core'
import { MapperInterface, Dates, UniqueEntityId, ContentType, Uuid } from '@standardnotes/domain-core'
import { RevisionMetadata } from '../../../Domain/Revision/RevisionMetadata'
import { SQLLegacyRevision } from '../../../Infra/TypeORM/SQL/SQLLegacyRevision'
@@ -22,10 +22,18 @@ export class SQLLegacyRevisionMetadataPersistenceMapper
}
const dates = datesOrError.getValue()
const itemUuidOrError = Uuid.create(projection.itemUuid)
if (itemUuidOrError.isFailed()) {
throw new Error(`Could not create item uuid: ${itemUuidOrError.getError()}`)
}
const itemUuid = itemUuidOrError.getValue()
const revisionMetadataOrError = RevisionMetadata.create(
{
contentType,
dates,
sharedVaultUuid: null,
itemUuid,
},
new UniqueEntityId(projection.uuid),
)

View File

@@ -1,4 +1,4 @@
import { MapperInterface, Dates, UniqueEntityId, ContentType } from '@standardnotes/domain-core'
import { MapperInterface, Dates, UniqueEntityId, ContentType, Uuid } from '@standardnotes/domain-core'
import { RevisionMetadata } from '../../../Domain/Revision/RevisionMetadata'
import { SQLRevision } from '../../../Infra/TypeORM/SQL/SQLRevision'
@@ -20,10 +20,27 @@ export class SQLRevisionMetadataPersistenceMapper implements MapperInterface<Rev
}
const dates = datesOrError.getValue()
let sharedVaultUuid = null
if (projection.sharedVaultUuid) {
const sharedVaultUuidOrError = Uuid.create(projection.sharedVaultUuid)
if (sharedVaultUuidOrError.isFailed()) {
throw new Error(`Could not create shared vault uuid: ${sharedVaultUuidOrError.getError()}`)
}
sharedVaultUuid = sharedVaultUuidOrError.getValue()
}
const itemUuidOrError = Uuid.create(projection.itemUuid)
if (itemUuidOrError.isFailed()) {
throw new Error(`Could not create item uuid: ${itemUuidOrError.getError()}`)
}
const itemUuid = itemUuidOrError.getValue()
const revisionMetadataOrError = RevisionMetadata.create(
{
contentType,
dates,
sharedVaultUuid,
itemUuid,
},
new UniqueEntityId(projection.uuid),
)

View File

@@ -3,6 +3,12 @@
All notable changes to this project will be documented in this file.
See [Conventional Commits](https://conventionalcommits.org) for commit guidelines.
## [1.95.1](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.95.0...@standardnotes/syncing-server@1.95.1) (2023-09-11)
### Bug Fixes
* disable running migrations in worker mode of a given service ([a82b9a0](https://github.com/standardnotes/syncing-server-js/commit/a82b9a0c8a023ba8a450ff9e34bcd62f928fcab3))
# [1.95.0](https://github.com/standardnotes/syncing-server-js/compare/@standardnotes/syncing-server@1.94.0...@standardnotes/syncing-server@1.95.0) (2023-09-08)
### Features

View File

@@ -24,7 +24,7 @@ const requestTransition = async (
return
}
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
const env: Env = new Env()
env.load()

View File

@@ -7,7 +7,7 @@ import { Env } from '../src/Bootstrap/Env'
import { DomainEventSubscriberFactoryInterface } from '@standardnotes/domain-events'
import { ContainerConfigLoader } from '../src/Bootstrap/Container'
const container = new ContainerConfigLoader()
const container = new ContainerConfigLoader('worker')
void container.load().then((container) => {
const env: Env = new Env()
env.load()

View File

@@ -1,6 +1,6 @@
{
"name": "@standardnotes/syncing-server",
"version": "1.95.0",
"version": "1.95.1",
"engines": {
"node": ">=18.0.0 <21.0.0"
},

View File

@@ -173,6 +173,8 @@ export class ContainerConfigLoader {
private readonly DEFAULT_MAX_ITEMS_LIMIT = 300
private readonly DEFAULT_FILE_UPLOAD_PATH = `${__dirname}/../../uploads`
constructor(private mode: 'server' | 'worker' = 'server') {}
async load(configuration?: {
controllerConatiner?: ControllerContainerInterface
directCallDomainEventPublisher?: DirectCallDomainEventPublisher
@@ -211,7 +213,7 @@ export class ContainerConfigLoader {
}
container.bind<winston.Logger>(TYPES.Sync_Logger).toConstantValue(logger)
const appDataSource = new AppDataSource(env)
const appDataSource = new AppDataSource({ env, runMigrations: this.mode === 'server' })
await appDataSource.initialize()
logger.debug('Database initialized')

View File

@@ -15,7 +15,12 @@ export class AppDataSource {
private _dataSource: DataSource | undefined
private _secondaryDataSource: DataSource | undefined
constructor(private env: Env) {}
constructor(
private configuration: {
env: Env
runMigrations: boolean
},
) {}
getRepository<Entity extends ObjectLiteral>(target: EntityTarget<Entity>): Repository<Entity> {
if (!this._dataSource) {
@@ -42,20 +47,20 @@ export class AppDataSource {
}
get secondaryDataSource(): DataSource | undefined {
this.env.load()
this.configuration.env.load()
if (this.env.get('SECONDARY_DB_ENABLED', true) !== 'true') {
if (this.configuration.env.get('SECONDARY_DB_ENABLED', true) !== 'true') {
return undefined
}
this._secondaryDataSource = new DataSource({
type: 'mongodb',
host: this.env.get('MONGO_HOST'),
host: this.configuration.env.get('MONGO_HOST'),
authSource: 'admin',
port: parseInt(this.env.get('MONGO_PORT')),
username: this.env.get('MONGO_USERNAME'),
password: this.env.get('MONGO_PASSWORD', true),
database: this.env.get('MONGO_DATABASE'),
port: parseInt(this.configuration.env.get('MONGO_PORT')),
username: this.configuration.env.get('MONGO_USERNAME'),
password: this.configuration.env.get('MONGO_PASSWORD', true),
database: this.configuration.env.get('MONGO_DATABASE'),
entities: [MongoDBItem],
retryWrites: false,
synchronize: true,
@@ -65,14 +70,15 @@ export class AppDataSource {
}
get dataSource(): DataSource {
this.env.load()
this.configuration.env.load()
const isConfiguredForMySQL = this.env.get('DB_TYPE') === 'mysql'
const isConfiguredForMySQL = this.configuration.env.get('DB_TYPE') === 'mysql'
const isConfiguredForHomeServerOrSelfHosting =
this.env.get('MODE', true) === 'home-server' || this.env.get('MODE', true) === 'self-hosted'
this.configuration.env.get('MODE', true) === 'home-server' ||
this.configuration.env.get('MODE', true) === 'self-hosted'
const maxQueryExecutionTime = this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
? +this.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
const maxQueryExecutionTime = this.configuration.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
? +this.configuration.env.get('DB_MAX_QUERY_EXECUTION_TIME', true)
: 45_000
const migrationsSourceDirectoryName = isConfiguredForMySQL
@@ -92,28 +98,28 @@ export class AppDataSource {
TypeORMMessage,
],
migrations: [`${__dirname}/../../migrations/${migrationsSourceDirectoryName}/*.js`],
migrationsRun: true,
logging: <LoggerOptions>this.env.get('DB_DEBUG_LEVEL', true) ?? 'info',
migrationsRun: this.configuration.runMigrations,
logging: <LoggerOptions>this.configuration.env.get('DB_DEBUG_LEVEL', true) ?? 'info',
}
if (isConfiguredForMySQL) {
const inReplicaMode = this.env.get('DB_REPLICA_HOST', true) ? true : false
const inReplicaMode = this.configuration.env.get('DB_REPLICA_HOST', true) ? true : false
const replicationConfig = {
master: {
host: this.env.get('DB_HOST'),
port: parseInt(this.env.get('DB_PORT')),
username: this.env.get('DB_USERNAME'),
password: this.env.get('DB_PASSWORD'),
database: this.env.get('DB_DATABASE'),
host: this.configuration.env.get('DB_HOST'),
port: parseInt(this.configuration.env.get('DB_PORT')),
username: this.configuration.env.get('DB_USERNAME'),
password: this.configuration.env.get('DB_PASSWORD'),
database: this.configuration.env.get('DB_DATABASE'),
},
slaves: [
{
host: this.env.get('DB_REPLICA_HOST', true),
port: parseInt(this.env.get('DB_PORT')),
username: this.env.get('DB_USERNAME'),
password: this.env.get('DB_PASSWORD'),
database: this.env.get('DB_DATABASE'),
host: this.configuration.env.get('DB_REPLICA_HOST', true),
port: parseInt(this.configuration.env.get('DB_PORT')),
username: this.configuration.env.get('DB_USERNAME'),
password: this.configuration.env.get('DB_PASSWORD'),
database: this.configuration.env.get('DB_DATABASE'),
},
],
removeNodeErrorCount: 10,
@@ -127,11 +133,11 @@ export class AppDataSource {
supportBigNumbers: true,
bigNumberStrings: false,
replication: inReplicaMode ? replicationConfig : undefined,
host: inReplicaMode ? undefined : this.env.get('DB_HOST'),
port: inReplicaMode ? undefined : parseInt(this.env.get('DB_PORT')),
username: inReplicaMode ? undefined : this.env.get('DB_USERNAME'),
password: inReplicaMode ? undefined : this.env.get('DB_PASSWORD'),
database: inReplicaMode ? undefined : this.env.get('DB_DATABASE'),
host: inReplicaMode ? undefined : this.configuration.env.get('DB_HOST'),
port: inReplicaMode ? undefined : parseInt(this.configuration.env.get('DB_PORT')),
username: inReplicaMode ? undefined : this.configuration.env.get('DB_USERNAME'),
password: inReplicaMode ? undefined : this.configuration.env.get('DB_PASSWORD'),
database: inReplicaMode ? undefined : this.configuration.env.get('DB_DATABASE'),
}
this._dataSource = new DataSource(mySQLDataSourceOptions)
@@ -139,7 +145,7 @@ export class AppDataSource {
const sqliteDataSourceOptions: SqliteConnectionOptions = {
...commonDataSourceOptions,
type: 'sqlite',
database: this.env.get('DB_SQLITE_DATABASE_PATH'),
database: this.configuration.env.get('DB_SQLITE_DATABASE_PATH'),
enableWAL: true,
busyErrorRetry: 2000,
}

View File

@@ -4,4 +4,4 @@ import { Env } from './Env'
const env: Env = new Env()
env.load()
export const MigrationsDataSource = new AppDataSource(env).dataSource
export const MigrationsDataSource = new AppDataSource({ env, runMigrations: true }).dataSource