diff --git a/JWLMerge.BackupFileServices/BackupFileService.cs b/JWLMerge.BackupFileServices/BackupFileService.cs new file mode 100644 index 0000000..8bdac2b --- /dev/null +++ b/JWLMerge.BackupFileServices/BackupFileService.cs @@ -0,0 +1,361 @@ +namespace JWLMerge.BackupFileServices +{ + using System; + using System.Collections.Generic; + using System.IO; + using System.IO.Compression; + using System.Linq; + using System.Security.Cryptography; + using System.Text; + using Events; + using Exceptions; + using Helpers; + using Models; + using Models.Database; + using Models.ManifestFile; + using Newtonsoft.Json; + using Newtonsoft.Json.Linq; + using Serilog; + + public sealed class BackupFileService : IBackupFileService + { + private const int ManifestVersionSupported = 1; + private const int DatabaseVersionSupported = 5; + private const string ManifestEntryName = "manifest.json"; + private const string DatabaseEntryName = "userData.db"; + + private readonly Merger _merger = new Merger(); + + public event EventHandler ProgressEvent; + + public BackupFileService() + { + _merger.ProgressEvent += MergerProgressEvent; + } + + /// + public BackupFile Load(string backupFilePath) + { + if (string.IsNullOrEmpty(backupFilePath)) + { + throw new ArgumentNullException(nameof(backupFilePath)); + } + + if (!File.Exists(backupFilePath)) + { + throw new BackupFileServicesException($"File does not exist: {backupFilePath}"); + } + + ProgressMessage($"Loading {backupFilePath}"); + + using (var archive = new ZipArchive(File.OpenRead(backupFilePath), ZipArchiveMode.Read)) + { + var manifest = ReadManifest(archive); + + var database = ReadDatabase(archive, manifest.UserDataBackup.DatabaseName); + + return new BackupFile + { + Manifest = manifest, + Database = database + }; + } + } + + /// + public BackupFile CreateBlank() + { + ProgressMessage("Creating blank file..."); + return new BackupFile(); + } + + /// + public void WriteNewDatabase( + BackupFile backup, + string newDatabaseFilePath, + string originalJwlibraryFilePathForSchema) + { + ProgressMessage("Writing merged database file..."); + + using (var memoryStream = new MemoryStream()) + { + using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, true)) + { + Log.Logger.Debug("Created ZipArchive"); + + var tmpDatabaseFileName = ExtractDatabaseToFile(originalJwlibraryFilePathForSchema); + try + { + backup.Manifest.UserDataBackup.Hash = GenerateDatabaseHash(tmpDatabaseFileName); + AddDatabaseEntryToArchive(archive, backup.Database, tmpDatabaseFileName); + } + finally + { + Log.Logger.Debug("Deleting {tmpDatabaseFileName}", tmpDatabaseFileName); + File.Delete(tmpDatabaseFileName); + } + + var manifestEntry = archive.CreateEntry(ManifestEntryName); + + using (var entryStream = manifestEntry.Open()) + { + var streamWriter = new StreamWriter(entryStream); + streamWriter.Write(JsonConvert.SerializeObject(backup.Manifest)); + } + } + + using (var fileStream = new FileStream(newDatabaseFilePath, FileMode.Create)) + { + ProgressMessage("Finishing..."); + + memoryStream.Seek(0, SeekOrigin.Begin); + memoryStream.CopyTo(fileStream); + } + } + } + + /// + public BackupFile Merge(IReadOnlyCollection files) + { + ProgressMessage($"Merging backup {files.Count} files..."); + + int fileNumber = 1; + var originals = new List(); + foreach (var file in files) + { + Log.Logger.Debug("Merging file {fileNumber} = {fileName}", fileNumber++, file); + Log.Logger.Debug("============"); + + var backupFile = Load(file); + Clean(backupFile); + originals.Add(backupFile); + } + + // just pick the first manifest as the basis for the + // manifest in the final merged file... + var newManifest = UpdateManifest(originals.First().Manifest); + + var mergedDatabase = MergeDatabases(originals); + return new BackupFile { Manifest = newManifest, Database = mergedDatabase }; + } + + private Manifest UpdateManifest(Manifest manifestToBaseOn) + { + Log.Logger.Debug("Updating manifest"); + + Manifest result = manifestToBaseOn.Clone(); + + DateTime now = DateTime.Now; + string simpleDateString = $"{now.Year}-{now.Month:D2}-{now.Day:D2}"; + + result.Name = $"merged_{simpleDateString}"; + result.CreationDate = simpleDateString; + result.UserDataBackup.DeviceName = "JWLMerge"; + result.UserDataBackup.DatabaseName = DatabaseEntryName; + + Log.Logger.Debug("Updated manifest"); + + return result; + } + + private Database MergeDatabases(IEnumerable jwlibraryFiles) + { + ProgressMessage("Merging databases..."); + return _merger.Merge(jwlibraryFiles.Select(x => x.Database)); + } + + private void MergerProgressEvent(object sender, ProgressEventArgs e) + { + OnProgressEvent(e); + } + + private void Clean(BackupFile backupFile) + { + Log.Logger.Debug("Cleaning backup file {backupFile}", backupFile.Manifest.Name); + + var cleaner = new Cleaner(backupFile); + int rowsRemoved = cleaner.Clean(); + if (rowsRemoved > 0) + { + ProgressMessage($"Removed {rowsRemoved} inaccessible rows"); + } + } + + private Database ReadDatabase(ZipArchive archive, string databaseName) + { + ProgressMessage($"Reading database {databaseName}..."); + + var databaseEntry = archive.Entries.FirstOrDefault(x => x.Name.Equals(databaseName)); + if (databaseEntry == null) + { + throw new BackupFileServicesException("Could not find database entry in jwlibrary file"); + } + + Database result; + var tmpFile = Path.GetTempFileName(); + try + { + Log.Logger.Debug("Extracting database to {tmpFile}", tmpFile); + databaseEntry.ExtractToFile(tmpFile, overwrite: true); + + DataAccessLayer dataAccessLayer = new DataAccessLayer(tmpFile); + result = dataAccessLayer.ReadDatabase(); + } + finally + { + Log.Logger.Debug("Deleting {tmpFile}", tmpFile); + File.Delete(tmpFile); + } + + return result; + } + + private string ExtractDatabaseToFile(string jwlibraryFile) + { + Log.Logger.Debug("Opening ZipArchive {jwlibraryFile}", jwlibraryFile); + + using (var archive = new ZipArchive(File.OpenRead(jwlibraryFile), ZipArchiveMode.Read)) + { + var manifest = ReadManifest(archive); + + var databaseEntry = archive.Entries.FirstOrDefault(x => x.Name.Equals(manifest.UserDataBackup.DatabaseName)); + var tmpFile = Path.GetTempFileName(); + databaseEntry.ExtractToFile(tmpFile, overwrite: true); + + Log.Logger.Information("Created temp file: {tmpDatabaseFileName}", tmpFile); + return tmpFile; + } + } + + private Manifest ReadManifest(ZipArchive archive) + { + ProgressMessage("Reading manifest..."); + + var manifestEntry = archive.Entries.FirstOrDefault(x => x.Name.Equals(ManifestEntryName)); + if (manifestEntry == null) + { + throw new BackupFileServicesException("Could not find manifest entry in jwlibrary file"); + } + + using (StreamReader stream = new StreamReader(manifestEntry.Open())) + { + var fileContents = stream.ReadToEnd(); + + Log.Logger.Debug("Parsing manifest"); + dynamic data = JObject.Parse(fileContents); + + int manifestVersion = data.version ?? 0; + if (!SupportManifestVersion(manifestVersion)) + { + throw new BackupFileServicesException($"Manifest version {manifestVersion} is not supported"); + } + + int databaseVersion = data.userDataBackup.schemaVersion ?? 0; + if (!SupportDatabaseVersion(databaseVersion)) + { + throw new BackupFileServicesException($"Database version {databaseVersion} is not supported"); + } + + var result = JsonConvert.DeserializeObject(fileContents); + + var prettyJson = JsonConvert.SerializeObject(result, Formatting.Indented); + Log.Logger.Debug("Parsed manifest {manifestJson}", prettyJson); + + return result; + } + } + + private bool SupportDatabaseVersion(int version) + { + return version == DatabaseVersionSupported; + } + + private bool SupportManifestVersion(int version) + { + return version == ManifestVersionSupported; + } + + /// + /// Generates the sha256 database hash that is required in the manifest.json file. + /// + /// + /// The database file path. + /// + /// The hash. + private string GenerateDatabaseHash(string databaseFilePath) + { + ProgressMessage("Generating database hash..."); + + using (FileStream fs = new FileStream(databaseFilePath, FileMode.Open)) + { + BufferedStream bs = new BufferedStream(fs); + using (SHA256Managed sha1 = new SHA256Managed()) + { + byte[] hash = sha1.ComputeHash(bs); + StringBuilder sb = new StringBuilder(2 * hash.Length); + foreach (byte b in hash) + { + sb.AppendFormat("{0:x2}", b); + } + + return sb.ToString(); + } + } + } + + private void AddDatabaseEntryToArchive( + ZipArchive archive, + Database database, + string originalDatabaseFilePathForSchema) + { + ProgressMessage("Adding database to archive..."); + + var tmpDatabaseFile = CreateTemporaryDatabaseFile(database, originalDatabaseFilePathForSchema); + try + { + archive.CreateEntryFromFile(tmpDatabaseFile, DatabaseEntryName); + } + finally + { + File.Delete(tmpDatabaseFile); + } + } + + private string CreateTemporaryDatabaseFile( + Database backupDatabase, + string originalDatabaseFilePathForSchema) + { + string tmpFile = Path.GetTempFileName(); + + Log.Logger.Debug("Creating temporary database file {tmpFile}", tmpFile); + + { + var dataAccessLayer = new DataAccessLayer(originalDatabaseFilePathForSchema); + dataAccessLayer.CreateEmptyClone(tmpFile); + } + + { + var dataAccessLayer = new DataAccessLayer(tmpFile); + dataAccessLayer.PopulateTables(backupDatabase); + } + + return tmpFile; + } + + private void OnProgressEvent(ProgressEventArgs e) + { + ProgressEvent?.Invoke(this, e); + } + + private void OnProgressEvent(string message) + { + OnProgressEvent(new ProgressEventArgs { Message = message }); + } + + private void ProgressMessage(string logMessage) + { + Log.Logger.Information(logMessage); + OnProgressEvent(logMessage); + } + } +} diff --git a/JWLMerge.BackupFileServices/Events/ProgressEventArgs.cs b/JWLMerge.BackupFileServices/Events/ProgressEventArgs.cs new file mode 100644 index 0000000..be57c52 --- /dev/null +++ b/JWLMerge.BackupFileServices/Events/ProgressEventArgs.cs @@ -0,0 +1,9 @@ +namespace JWLMerge.BackupFileServices.Events +{ + using System; + + public class ProgressEventArgs : EventArgs + { + public string Message { get; set; } + } +} diff --git a/JWLMerge.BackupFileServices/Exceptions/BackupFileServicesException.cs b/JWLMerge.BackupFileServices/Exceptions/BackupFileServicesException.cs new file mode 100644 index 0000000..fa07ebc --- /dev/null +++ b/JWLMerge.BackupFileServices/Exceptions/BackupFileServicesException.cs @@ -0,0 +1,13 @@ +namespace JWLMerge.BackupFileServices.Exceptions +{ + using System; + + [Serializable] + public class BackupFileServicesException : Exception + { + public BackupFileServicesException(string errorMessage) + : base(errorMessage) + { + } + } +} diff --git a/JWLMerge.BackupFileServices/Helpers/Cleaner.cs b/JWLMerge.BackupFileServices/Helpers/Cleaner.cs new file mode 100644 index 0000000..8ce1071 --- /dev/null +++ b/JWLMerge.BackupFileServices/Helpers/Cleaner.cs @@ -0,0 +1,141 @@ +namespace JWLMerge.BackupFileServices.Helpers +{ + using System.Collections.Generic; + using System.Linq; + using Models; + using Serilog; + + /// + /// Cleans jwlibrary files by removing redundant or anomalous database rows. + /// + internal class Cleaner + { + private readonly BackupFile _backupFile; + + public Cleaner(BackupFile backupFile) + { + _backupFile = backupFile; + } + + /// + /// Cleans the data, removing unused rows. + /// + /// Number of rows removed. + public int Clean() + { + return CleanBlockRanges() + CleanLocations(); + } + + private HashSet GetUserMarkIdsInUse() + { + var result = new HashSet(); + + foreach (var userMark in _backupFile.Database.UserMarks) + { + result.Add(userMark.UserMarkId); + } + + return result; + } + + private HashSet GetLocationIdsInUse() + { + var result = new HashSet(); + + foreach (var bookmark in _backupFile.Database.Bookmarks) + { + result.Add(bookmark.LocationId); + result.Add(bookmark.PublicationLocationId); + } + + foreach (var note in _backupFile.Database.Notes) + { + if (note.LocationId != null) + { + result.Add(note.LocationId.Value); + } + } + + foreach (var userMark in _backupFile.Database.UserMarks) + { + result.Add(userMark.LocationId); + } + + Log.Logger.Debug($"Found {result.Count} location Ids in use"); + + return result; + } + + /// + /// Cleans the locations. + /// + /// Number of location rows removed. + private int CleanLocations() + { + int removed = 0; + + var locations = _backupFile.Database.Locations; + if (locations.Any()) + { + var locationIds = GetLocationIdsInUse(); + + foreach (var location in Enumerable.Reverse(locations)) + { + if (!locationIds.Contains(location.LocationId)) + { + Log.Logger.Debug($"Removing redundant location id: {location.LocationId}"); + locations.Remove(location); + ++removed; + } + } + } + + return removed; + } + + /// + /// Cleans the block ranges. + /// + /// Number of ranges removed. + private int CleanBlockRanges() + { + int removed = 0; + + var userMarkIdsFound = new HashSet(); + + var ranges = _backupFile.Database.BlockRanges; + if (ranges.Any()) + { + var userMarkIds = GetUserMarkIdsInUse(); + + foreach (var range in Enumerable.Reverse(ranges)) + { + if (!userMarkIds.Contains(range.UserMarkId)) + { + Log.Logger.Debug($"Removing redundant range: {range.BlockRangeId}"); + ranges.Remove(range); + ++removed; + } + else + { + if (userMarkIdsFound.Contains(range.UserMarkId)) + { + // don't know how to handle this situation - we are expecting + // a unique constraint on the UserMarkId column but have found + // occasional duplication! + Log.Logger.Debug($"Removing redundant range (duplicate UserMarkId): {range.BlockRangeId}"); + ranges.Remove(range); + ++removed; + } + else + { + userMarkIdsFound.Add(range.UserMarkId); + } + } + } + } + + return removed; + } + } +} diff --git a/JWLMerge.BackupFileServices/Helpers/DataAccessLayer.cs b/JWLMerge.BackupFileServices/Helpers/DataAccessLayer.cs new file mode 100644 index 0000000..7f22738 --- /dev/null +++ b/JWLMerge.BackupFileServices/Helpers/DataAccessLayer.cs @@ -0,0 +1,348 @@ +namespace JWLMerge.BackupFileServices.Helpers +{ + using System; + using System.Collections.Generic; + using System.Data.SQLite; + using System.Linq; + using System.Reflection; + using System.Text; + using Models.Database; + using Serilog; + + /// + /// Isolates all data access to the SQLite database embedded in + /// jwlibrary files. + /// + internal class DataAccessLayer + { + private readonly string _databaseFilePath; + + public DataAccessLayer(string databaseFilePath) + { + _databaseFilePath = databaseFilePath; + } + + /// + /// Creates a new empty database using the schema from the current database. + /// + /// The clone file path (the new database). + public void CreateEmptyClone(string cloneFilePath) + { + Log.Logger.Debug($"Creating empty clone: {cloneFilePath}"); + + using (var source = CreateConnection(_databaseFilePath)) + using (var destination = CreateConnection(cloneFilePath)) + { + source.BackupDatabase(destination, "main", "main", -1, null, -1); + ClearData(destination); + } + } + + /// + /// Populates the current database using the specified data. + /// + /// The data to use. + public void PopulateTables(Database dataToUse) + { + using (var connection = CreateConnection()) + { + PopulateTable(connection, dataToUse.Locations); + PopulateTable(connection, dataToUse.Notes); + PopulateTable(connection, dataToUse.UserMarks); + PopulateTable(connection, dataToUse.Tags); + PopulateTable(connection, dataToUse.TagMaps); + PopulateTable(connection, dataToUse.BlockRanges); + } + } + + /// + /// Reads the current database. + /// + /// + public Database ReadDatabase() + { + var result = new Database(); + + using (var connection = CreateConnection()) + { + result.LastModified = ReadAllRows(connection, ReadLastModified).FirstOrDefault(); + result.Locations = ReadAllRows(connection, ReadLocation); + result.Notes = ReadAllRows(connection, ReadNote); + result.Tags = ReadAllRows(connection, ReadTag); + result.TagMaps = ReadAllRows(connection, ReadTagMap); + result.BlockRanges = ReadAllRows(connection, ReadBlockRange); + result.Bookmarks = ReadAllRows(connection, ReadBookmark); + result.UserMarks = ReadAllRows(connection, ReadUserMark); + } + + return result; + } + + private List ReadAllRows( + SQLiteConnection connection, + Func readRowFunction) + { + using (SQLiteCommand cmd = connection.CreateCommand()) + { + var result = new List(); + var tableName = typeof(TRowType).Name; + + cmd.CommandText = $"select * from {tableName}"; + Log.Logger.Debug($"SQL: {cmd.CommandText}"); + + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + result.Add(readRowFunction(reader)); + } + } + + Log.Logger.Debug($"SQL resultset count: {result.Count}"); + + return result; + } + } + + private string ReadString(SQLiteDataReader reader, string columnName) + { + return reader[columnName].ToString(); + } + + private string ReadNullableString(SQLiteDataReader reader, string columnName) + { + var value = reader[columnName]; + return value == DBNull.Value ? null : value.ToString(); + } + + private int ReadInt(SQLiteDataReader reader, string columnName) + { + return Convert.ToInt32(reader[columnName]); + } + + private int? ReadNullableInt(SQLiteDataReader reader, string columnName) + { + var value = reader[columnName]; + return value == DBNull.Value ? (int?)null : Convert.ToInt32(value); + } + + private Location ReadLocation(SQLiteDataReader reader) + { + return new Location + { + LocationId = ReadInt(reader, "LocationId"), + BookNumber = ReadNullableInt(reader, "BookNumber"), + ChapterNumber = ReadNullableInt(reader, "ChapterNumber"), + DocumentId = ReadNullableInt(reader, "DocumentId"), + Track = ReadNullableInt(reader, "Track"), + IssueTagNumber = ReadInt(reader, "IssueTagNumber"), + KeySymbol = ReadString(reader, "KeySymbol"), + MepsLanguage = ReadInt(reader, "MepsLanguage"), + Type = ReadInt(reader, "Type"), + Title = ReadNullableString(reader, "Title") + }; + } + + private Note ReadNote(SQLiteDataReader reader) + { + return new Note + { + NoteId = ReadInt(reader, "NoteId"), + Guid = ReadString(reader, "Guid"), + UserMarkId = ReadNullableInt(reader, "UserMarkId"), + LocationId = ReadNullableInt(reader, "LocationId"), + Title = ReadNullableString(reader, "Title"), + Content = ReadNullableString(reader, "Content"), + LastModified = ReadString(reader, "LastModified"), + BlockType = ReadInt(reader, "BlockType"), + BlockIdentifier = ReadNullableInt(reader, "BlockIdentifier") + }; + } + + private Tag ReadTag(SQLiteDataReader reader) + { + return new Tag + { + TagId = ReadInt(reader, "TagId"), + Type = ReadInt(reader, "Type"), + Name = ReadString(reader, "Name") + }; + } + + private TagMap ReadTagMap(SQLiteDataReader reader) + { + return new TagMap + { + TagMapId = ReadInt(reader, "TagMapId"), + Type = ReadInt(reader, "Type"), + TypeId = ReadInt(reader, "TypeId"), + TagId = ReadInt(reader, "TagId"), + Position = ReadInt(reader, "Position") + }; + } + + private BlockRange ReadBlockRange(SQLiteDataReader reader) + { + return new BlockRange + { + BlockRangeId = ReadInt(reader, "BlockRangeId"), + BlockType = ReadInt(reader, "BlockType"), + Identifier = ReadInt(reader, "Identifier"), + StartToken = ReadNullableInt(reader, "StartToken"), + EndToken = ReadNullableInt(reader, "EndToken"), + UserMarkId = ReadInt(reader, "UserMarkId") + }; + } + + private Bookmark ReadBookmark(SQLiteDataReader reader) + { + return new Bookmark + { + BookmarkId = ReadInt(reader, "BookmarkId"), + LocationId = ReadInt(reader, "LocationId"), + PublicationLocationId = ReadInt(reader, "PublicationLocationId"), + Slot = ReadInt(reader, "Slot"), + Title = ReadString(reader, "Title"), + Snippet = ReadNullableString(reader, "Snippet"), + BlockType = ReadInt(reader, "BlockType"), + BlockIdentifier = ReadInt(reader, "BlockIdentifier") + }; + } + + private LastModified ReadLastModified(SQLiteDataReader reader) + { + return new LastModified + { + TimeLastModified = ReadString(reader, "LastModified") + }; + } + + private UserMark ReadUserMark(SQLiteDataReader reader) + { + return new UserMark + { + UserMarkId = ReadInt(reader, "UserMarkId"), + ColorIndex = ReadInt(reader, "ColorIndex"), + LocationId = ReadInt(reader, "LocationId"), + StyleIndex = ReadInt(reader, "StyleIndex"), + UserMarkGuid = ReadString(reader, "UserMarkGuid"), + Version = ReadInt(reader, "Version") + }; + } + + private SQLiteConnection CreateConnection() + { + return CreateConnection(_databaseFilePath); + } + + private SQLiteConnection CreateConnection(string filePath) + { + var connectionString = $"Data Source={filePath};Version=3;"; + Log.Logger.Debug("SQL create connection: {connection}", connectionString); + + var connection = new SQLiteConnection(connectionString); + connection.Open(); + return connection; + } + + private void ClearData(SQLiteConnection connection) + { + ClearTable(connection, "UserMark"); + ClearTable(connection, "TagMap"); + ClearTable(connection, "Tag"); + ClearTable(connection, "Note"); + ClearTable(connection, "Location"); + ClearTable(connection, "Bookmark"); + ClearTable(connection, "BlockRange"); + + UpdateLastModified(connection); + + VacuumDatabase(connection); + } + + private void VacuumDatabase(SQLiteConnection connection) + { + using (var command = connection.CreateCommand()) + { + command.CommandText = "vacuum;"; + Log.Logger.Debug($"SQL: {command.CommandText}"); + + command.ExecuteNonQuery(); + } + } + + private void UpdateLastModified(SQLiteConnection connection) + { + using (var command = connection.CreateCommand()) + { + command.CommandText = "delete from LastModified; insert into LastModified default values"; + Log.Logger.Debug($"SQL: {command.CommandText}"); + + command.ExecuteNonQuery(); + } + } + + private void ClearTable(SQLiteConnection connection, string tableName) + { + using (var command = connection.CreateCommand()) + { + command.CommandText = $"delete from {tableName}"; + Log.Logger.Debug($"SQL: {command.CommandText}"); + + command.ExecuteNonQuery(); + } + } + + private void PopulateTable(SQLiteConnection connection, List rows) + { + var tableName = typeof(TRowType).Name; + var columnNames = GetColumnNames(); + var columnNamesCsv = string.Join(",", columnNames); + var paramNames = GetParamNames(columnNames); + var paramNamesCsv = string.Join(",", paramNames); + + using (var transaction = connection.BeginTransaction()) + { + foreach (var row in rows) + { + using (SQLiteCommand cmd = connection.CreateCommand()) + { + StringBuilder sb = new StringBuilder(); + sb.AppendLine($"insert into {tableName} ({columnNamesCsv}) values ({paramNamesCsv})"); + + cmd.CommandText = sb.ToString(); + AddPopulateTableParams(cmd, columnNames, paramNames, row); + + cmd.ExecuteNonQuery(); + } + } + + transaction.Commit(); + } + } + + private void AddPopulateTableParams( + SQLiteCommand cmd, + List columnNames, + List paramNames, + TRowType row) + { + for (int n = 0; n < columnNames.Count; ++n) + { + var value = row.GetType().GetProperty(columnNames[n])?.GetValue(row); + cmd.Parameters.AddWithValue(paramNames[n], value); + } + } + + private List GetParamNames(IReadOnlyCollection columnNames) + { + return columnNames.Select(columnName => $"@{columnName}").ToList(); + } + + private List GetColumnNames() + { + PropertyInfo[] properties = typeof(TRowType).GetProperties(); + return properties.Select(property => property.Name).ToList(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Helpers/IdTranslator.cs b/JWLMerge.BackupFileServices/Helpers/IdTranslator.cs new file mode 100644 index 0000000..a75e841 --- /dev/null +++ b/JWLMerge.BackupFileServices/Helpers/IdTranslator.cs @@ -0,0 +1,32 @@ +namespace JWLMerge.BackupFileServices.Helpers +{ + using System.Collections.Generic; + + /// + /// Used by the to map old and new id values./> + /// + internal class IdTranslator + { + private readonly Dictionary _ids; + + public IdTranslator() + { + _ids = new Dictionary(); + } + + public int GetTranslatedId(int oldId) + { + return _ids.TryGetValue(oldId, out var translatedId) ? translatedId : 0; + } + + public void Add(int oldId, int translatedId) + { + _ids[oldId] = translatedId; + } + + public void Clear() + { + _ids.Clear(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Helpers/Merger.cs b/JWLMerge.BackupFileServices/Helpers/Merger.cs new file mode 100644 index 0000000..9b42ac8 --- /dev/null +++ b/JWLMerge.BackupFileServices/Helpers/Merger.cs @@ -0,0 +1,277 @@ +namespace JWLMerge.BackupFileServices.Helpers +{ + using System; + using System.Collections.Generic; + using Events; + using Models.Database; + using Serilog; + + /// + /// Merges the SQLite databases. + /// + internal sealed class Merger + { + private readonly IdTranslator _translatedLocationIds = new IdTranslator(); + private readonly IdTranslator _translatedTagIds = new IdTranslator(); + private readonly IdTranslator _translatedUserMarkIds = new IdTranslator(); + private readonly IdTranslator _translatedNoteIds = new IdTranslator(); + + private int _maxLocationId; + private int _maxUserMarkId; + private int _maxNoteId; + private int _maxTagId; + private int _maxTagMapId; + private int _maxBlockRangeId; + + public event EventHandler ProgressEvent; + + /// + /// Merges the specified databases. + /// + /// The databases to merge. + /// + public Database Merge(IEnumerable databasesToMerge) + { + var result = new Database(); + result.InitBlank(); + + ClearMaxIds(); + + foreach (var database in databasesToMerge) + { + Merge(database, result); + } + + return result; + } + + private void ClearMaxIds() + { + _maxLocationId = 0; + _maxUserMarkId = 0; + _maxNoteId = 0; + _maxTagId = 0; + _maxTagMapId = 0; + _maxBlockRangeId = 0; + } + + private void Merge(Database source, Database destination) + { + ClearTranslators(); + + MergeUserMarks(source, destination); + MergeNotes(source, destination); + MergeTags(source, destination); + MergeTagMap(source, destination); + MergeBlockRanges(source, destination); + + destination.ReinitializeIndexes(); + } + + private void ClearTranslators() + { + _translatedLocationIds.Clear(); + _translatedTagIds.Clear(); + _translatedUserMarkIds.Clear(); + _translatedNoteIds.Clear(); + } + + private void MergeBlockRanges(Database source, Database destination) + { + ProgressMessage("Merging block ranges..."); + + foreach (var range in source.BlockRanges) + { + var userMarkId = _translatedUserMarkIds.GetTranslatedId(range.UserMarkId); + var existingRange = destination.FindBlockRange(userMarkId); + if (existingRange == null) + { + InsertBlockRange(range, destination); + } + } + } + + private void MergeTagMap(Database source, Database destination) + { + ProgressMessage("Merging tag map..."); + + foreach (var tagMap in source.TagMaps) + { + if (tagMap.Type == 1) + { + // a tag on a note... + var tagId = _translatedTagIds.GetTranslatedId(tagMap.TagId); + var noteId = _translatedNoteIds.GetTranslatedId(tagMap.TypeId); + + var existingTagMap = destination.FindTagMap(tagId, noteId); + if (existingTagMap == null) + { + InsertTagMap(tagMap, destination); + } + } + } + } + + private void MergeTags(Database source, Database destination) + { + ProgressMessage("Merging tags..."); + + foreach (var tag in source.Tags) + { + var existingTag = destination.FindTag(tag.Name); + if (existingTag != null) + { + _translatedTagIds.Add(tag.TagId, existingTag.TagId); + } + else + { + InsertTag(tag, destination); + } + } + } + + private void MergeUserMarks(Database source, Database destination) + { + ProgressMessage("Merging user marks..."); + + foreach (var userMark in source.UserMarks) + { + var existingUserMark = destination.FindUserMark(userMark.UserMarkGuid); + if (existingUserMark != null) + { + // user mark already exists in destination... + _translatedUserMarkIds.Add(userMark.UserMarkId, existingUserMark.UserMarkId); + } + else + { + var referencedLocation = userMark.LocationId; + var location = source.FindLocation(referencedLocation); + + InsertLocation(location, destination); + InsertUserMark(userMark, destination); + } + } + } + + private void InsertLocation(Location location, Database destination) + { + if (_translatedLocationIds.GetTranslatedId(location.LocationId) == 0) + { + Location newLocation = location.Clone(); + newLocation.LocationId = ++_maxLocationId; + destination.Locations.Add(newLocation); + + _translatedLocationIds.Add(location.LocationId, newLocation.LocationId); + } + } + + private void InsertUserMark(UserMark userMark, Database destination) + { + UserMark newUserMark = userMark.Clone(); + newUserMark.UserMarkId = ++_maxUserMarkId; + newUserMark.LocationId = _translatedLocationIds.GetTranslatedId(userMark.LocationId); + destination.UserMarks.Add(newUserMark); + + _translatedUserMarkIds.Add(userMark.UserMarkId, newUserMark.UserMarkId); + } + + private void InsertTag(Tag tag, Database destination) + { + Tag newTag = tag.Clone(); + newTag.TagId = ++_maxTagId; + destination.Tags.Add(newTag); + + _translatedTagIds.Add(tag.TagId, newTag.TagId); + } + + private void InsertTagMap(TagMap tagMap, Database destination) + { + TagMap newTagMap = tagMap.Clone(); + newTagMap.TagMapId = ++_maxTagMapId; + newTagMap.TagId = _translatedTagIds.GetTranslatedId(tagMap.TagId); + newTagMap.TypeId = _translatedNoteIds.GetTranslatedId(tagMap.TypeId); + + destination.TagMaps.Add(newTagMap); + } + + private void InsertNote(Note note, Database destination) + { + Note newNote = note.Clone(); + newNote.NoteId = ++_maxNoteId; + + if (note.UserMarkId != null) + { + newNote.UserMarkId = _translatedUserMarkIds.GetTranslatedId(note.UserMarkId.Value); + } + + if (note.LocationId != null) + { + newNote.LocationId = _translatedLocationIds.GetTranslatedId(note.LocationId.Value); + } + + destination.Notes.Add(newNote); + _translatedNoteIds.Add(note.NoteId, newNote.NoteId); + } + + private void InsertBlockRange(BlockRange range, Database destination) + { + BlockRange newRange = range.Clone(); + newRange.BlockRangeId = ++_maxBlockRangeId; + + newRange.UserMarkId = _translatedUserMarkIds.GetTranslatedId(range.UserMarkId); + destination.BlockRanges.Add(newRange); + } + + private void MergeNotes(Database source, Database destination) + { + ProgressMessage("Merging notes..."); + + foreach (var note in source.Notes) + { + var existingNote = destination.FindNote(note.Guid); + if (existingNote != null) + { + // note already exists in destination... + if (existingNote.GetLastModifiedDateTime() < note.GetLastModifiedDateTime()) + { + // ...but it's older + UpdateNote(note, existingNote); + } + + _translatedNoteIds.Add(note.NoteId, existingNote.NoteId); + } + else + { + // a new note... + if (note.LocationId != null && _translatedLocationIds.GetTranslatedId(note.LocationId.Value) == 0) + { + var referencedLocation = note.LocationId.Value; + var location = source.FindLocation(referencedLocation); + + InsertLocation(location, destination); + } + + InsertNote(note, destination); + } + } + } + + private void UpdateNote(Note source, Note destination) + { + destination.Title = source.Title; + destination.Content = source.Content; + destination.LastModified = source.LastModified; + } + + private void OnProgressEvent(string message) + { + ProgressEvent?.Invoke(this, new ProgressEventArgs { Message = message }); + } + + private void ProgressMessage(string logMessage) + { + Log.Logger.Information(logMessage); + OnProgressEvent(logMessage); + } + } +} diff --git a/JWLMerge.BackupFileServices/IBackupFileService.cs b/JWLMerge.BackupFileServices/IBackupFileService.cs new file mode 100644 index 0000000..4870591 --- /dev/null +++ b/JWLMerge.BackupFileServices/IBackupFileService.cs @@ -0,0 +1,52 @@ +namespace JWLMerge.BackupFileServices +{ + using System; + using System.Collections.Generic; + using Events; + using Models; + + /// + /// The BackupFileService interface. + /// + public interface IBackupFileService + { + event EventHandler ProgressEvent; + + /// + /// Loads the specified backup file. + /// + /// + /// The backup file path. + /// + /// + /// The . + /// + BackupFile Load(string backupFilePath); + + /// + /// Merges the specified backup files. + /// + /// The files. + /// Merged file + BackupFile Merge(IReadOnlyCollection files); + + /// + /// Creates a blank backup file. + /// + /// + /// A . + /// + BackupFile CreateBlank(); + + /// + /// Writes the specified backup to a "jwlibrary" file. + /// + /// The backup data. + /// The new database file path. + /// The original jwlibrary file path on which to base the new schema. + void WriteNewDatabase( + BackupFile backup, + string newDatabaseFilePath, + string originalJwlibraryFilePathForSchema); + } +} diff --git a/JWLMerge.BackupFileServices/JWLMerge.BackupFileServices.csproj b/JWLMerge.BackupFileServices/JWLMerge.BackupFileServices.csproj new file mode 100644 index 0000000..ed5599b --- /dev/null +++ b/JWLMerge.BackupFileServices/JWLMerge.BackupFileServices.csproj @@ -0,0 +1,100 @@ + + + + + Debug + AnyCPU + {83446629-CDBB-43FF-B628-1B8A3A9603C3} + Library + Properties + JWLMerge.BackupFileServices + JWLMerge.BackupFileServices + v4.6.2 + 512 + + + + + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + + ..\packages\Newtonsoft.Json.10.0.3\lib\net45\Newtonsoft.Json.dll + + + ..\packages\Serilog.2.6.0\lib\net46\Serilog.dll + + + ..\packages\Serilog.Sinks.File.3.2.0\lib\net45\Serilog.Sinks.File.dll + + + ..\packages\Serilog.Sinks.RollingFile.3.3.0\lib\net45\Serilog.Sinks.RollingFile.dll + + + + + ..\packages\System.Data.SQLite.Core.1.0.106.0\lib\net46\System.Data.SQLite.dll + + + + + + + + + + + + + Properties\SolutionInfo.cs + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + \ No newline at end of file diff --git a/JWLMerge.BackupFileServices/Models/BackupFile.cs b/JWLMerge.BackupFileServices/Models/BackupFile.cs new file mode 100644 index 0000000..af74088 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/BackupFile.cs @@ -0,0 +1,14 @@ +namespace JWLMerge.BackupFileServices.Models +{ + using ManifestFile; + + /// + /// The Backup file. + /// + public class BackupFile + { + public Manifest Manifest { get; set; } + + public Database.Database Database { get; set; } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/BlockRange.cs b/JWLMerge.BackupFileServices/Models/Database/BlockRange.cs new file mode 100644 index 0000000..795eeef --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/BlockRange.cs @@ -0,0 +1,46 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + public class BlockRange + { + /// + /// The block range identifier. + /// + public int BlockRangeId { get; set; } + + /// + /// The block type (1 or 2). + /// 1 = Publication + /// 2 = Bible + /// + public int BlockType { get; set; } + + /// + /// The paragraph or verse identifier. + /// i.e. the one-based paragraph (or verse if a Bible chapter) within the document. + /// + public int Identifier { get; set; } + + /// + /// The start token. + /// i.e. the zero-based word in a sentence that marks the start of the highlight. + /// + public int? StartToken { get; set; } + + /// + /// The end token. + /// i.e. the zero-based word in a sentence that marks the end of the highlight (inclusive). + /// + public int? EndToken { get; set; } + + /// + /// The user mark identifier. + /// Refers to userMark.UserMarkId + /// + public int UserMarkId { get; set; } + + public BlockRange Clone() + { + return (BlockRange)MemberwiseClone(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/Bookmark.cs b/JWLMerge.BackupFileServices/Models/Database/Bookmark.cs new file mode 100644 index 0000000..8ee43fd --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/Bookmark.cs @@ -0,0 +1,52 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + public class Bookmark + { + /// + /// The bookmark identifier. + /// + public int BookmarkId { get; set; } + + /// + /// The location identifier. + /// Refers to Location.LocationId + /// + public int LocationId { get; set; } + + /// + /// The publication location identifier. + /// Refers to Location.LocationId (with Location.Type = 1) + /// + public int PublicationLocationId { get; set; } + + /// + /// The slot in which the bookmark appears. + /// i.e. the zero-based order in which it is listed in the UI. + /// + public int Slot { get; set; } + + /// + /// The title text. + /// + public string Title { get; set; } + + /// + /// A snippet of the bookmarked text (can be null) + /// + public string Snippet { get; set; } + + /// + /// The block type. + /// 1 = Publication + /// 2 = Bible + /// + public int BlockType { get; set; } + + /// + /// The block identifier. + /// The paragraph or verse identifier. + /// i.e. the one-based paragraph (or verse if a Bible chapter) within the document. + /// + public int? BlockIdentifier { get; set; } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/Database.cs b/JWLMerge.BackupFileServices/Models/Database/Database.cs new file mode 100644 index 0000000..0a1b2f4 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/Database.cs @@ -0,0 +1,133 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + using System; + using System.Collections.Generic; + using System.Linq; + + public class Database + { + private Lazy> _noteIndex; + private Lazy> _userMarksIndex; + private Lazy> _locationsIndex; + private Lazy> _tagsIndex; + private Lazy> _tagMapIndex; + private Lazy> _blockRangeIndex; + + public Database() + { + ReinitializeIndexes(); + } + + public void ReinitializeIndexes() + { + _noteIndex = new Lazy>(NoteIndexValueFactory); + _userMarksIndex = new Lazy>(UserMarkIndexValueFactory); + _locationsIndex = new Lazy>(LocationsIndexValueFactory); + _tagsIndex = new Lazy>(TagIndexValueFactory); + _tagMapIndex = new Lazy>(TagMapIndexValueFactory); + _blockRangeIndex = new Lazy>(BlockRangeIndexValueFactory); + } + + public void InitBlank() + { + LastModified = new LastModified(); + Locations = new List(); + Notes = new List(); + Tags = new List(); + TagMaps = new List(); + BlockRanges = new List(); + UserMarks = new List(); + } + + public LastModified LastModified { get; set; } + + public List Locations { get; set; } + + public List Notes { get; set; } + + public List Tags { get; set; } + + public List TagMaps { get; set; } + + public List BlockRanges { get; set; } + + public List Bookmarks { get; set; } + + public List UserMarks { get; set; } + + public Note FindNote(string guid) + { + return _noteIndex.Value.TryGetValue(guid, out var note) ? note : null; + } + + public UserMark FindUserMark(string guid) + { + return _userMarksIndex.Value.TryGetValue(guid, out var userMark) ? userMark : null; + } + + public Tag FindTag(string tagName) + { + return _tagsIndex.Value.TryGetValue(tagName, out var tag) ? tag : null; + } + + public TagMap FindTagMap(int tagId, int noteId) + { + return _tagMapIndex.Value.TryGetValue(GetTagMapKey(tagId, noteId), out var tag) ? tag : null; + } + + public Location FindLocation(int locationId) + { + return _locationsIndex.Value.TryGetValue(locationId, out var location) ? location : null; + } + + public BlockRange FindBlockRange(int userMarkId) + { + // note that we find a block range by userMarkId. The BlockRange.UserMarkId column + // isn't marked as a unique index, but we assume it should be. + return _blockRangeIndex.Value.TryGetValue(userMarkId, out var range) ? range : null; + } + + private Dictionary NoteIndexValueFactory() + { + return Notes.ToDictionary(note => note.Guid); + } + + private Dictionary UserMarkIndexValueFactory() + { + return UserMarks.ToDictionary(userMark => userMark.UserMarkGuid); + } + + private Dictionary LocationsIndexValueFactory() + { + return Locations.ToDictionary(location => location.LocationId); + } + + private Dictionary BlockRangeIndexValueFactory() + { + return BlockRanges.ToDictionary(range => range.UserMarkId); + } + + private Dictionary TagIndexValueFactory() + { + return Tags.ToDictionary(tag => tag.Name); + } + + private string GetTagMapKey(int tagId, int noteId) + { + return $"{tagId}-{noteId}"; + } + + private Dictionary TagMapIndexValueFactory() + { + var result = new Dictionary(); + + foreach (var tagMap in TagMaps) + { + string key = GetTagMapKey(tagMap.TagId, tagMap.TypeId); + result.Add(key, tagMap); + } + + return result; + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/LastModified.cs b/JWLMerge.BackupFileServices/Models/Database/LastModified.cs new file mode 100644 index 0000000..391f515 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/LastModified.cs @@ -0,0 +1,13 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + using Newtonsoft.Json; + + public class LastModified + { + /// + /// Time stamp when the database was last modified. + /// + [JsonProperty(PropertyName = "LastModified")] + public string TimeLastModified { get; set; } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/Location.cs b/JWLMerge.BackupFileServices/Models/Database/Location.cs new file mode 100644 index 0000000..8b70a2d --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/Location.cs @@ -0,0 +1,65 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + /// + /// The Location table row. + /// + public class Location + { + /// + /// The location identifier. + /// + public int LocationId { get; set; } + + /// + /// The Bible book number (or null if not Bible). + /// + public int? BookNumber { get; set; } + + /// + /// The Bible chapter number (or null if not Bible). + /// + public int? ChapterNumber { get; set; } + + /// + /// The JWL document identifier. + /// + public int? DocumentId { get; set; } + + /// + /// The track. Semantics unknown! + /// + public int? Track { get; set; } + + /// + /// A refernce to the publication issue (if applicable), e.g. "20171100" + /// + public int IssueTagNumber { get; set; } + + /// + /// The JWL publication key symbol. + /// + public string KeySymbol { get; set; } + + /// + /// The MEPS identifier for the publication language. + /// + public int MepsLanguage { get; set; } + + /// + /// The type. + /// 0 = standard location entry + /// 1 = reference to a publication (see Bookmark.PublicationLocationId) + /// + public int Type { get; set; } + + /// + /// A location title (nullable). + /// + public string Title { get; set; } + + public Location Clone() + { + return (Location)MemberwiseClone(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/Note.cs b/JWLMerge.BackupFileServices/Models/Database/Note.cs new file mode 100644 index 0000000..ceeee60 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/Note.cs @@ -0,0 +1,71 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + using System; + + public class Note + { + /// + /// The note identifier. + /// + public int NoteId { get; set; } + + /// + /// A Guid (that should assist in merging notes). + /// + public string Guid { get; set; } + + /// + /// The user mark identifier (if the note is associated with user-highlighting). + /// A reference to UserMark.UserMarkId + /// + public int? UserMarkId { get; set; } + + /// + /// The location identifier (if the note is associated with a location - which it usually is) + /// + public int? LocationId { get; set; } + + /// + /// The user-defined note title. + /// + public string Title { get; set; } + + /// + /// The user-defined note content. + /// + public string Content { get; set; } + + /// + /// Time stamp when the note was last edited. ISO 8601 format. + /// + public string LastModified { get; set; } + + /// + /// The type of block associated with the note. + /// Valid values are possibly 0, 1, and 2. + /// Best guess at semantics: + /// 0 = The note is associated with the document rather than a block of text within it. + /// 1 = The note is associated with a paragraph in a publication. + /// 2 = The note is associated with a verse in the Bible. + /// In all cases, see also the UserMarkId which may better define the associated block of text. + /// + public int BlockType { get; set; } + + /// + /// The block identifier. Helps to locate the block of text associated with the note. + /// If the BlockType is 1 (a publication), then BlockIdentifier denotes the paragraph number. + /// If the BlockType is 2 (the Bible), then BlockIdentifier denotes the verse number. + /// + public int? BlockIdentifier { get; set; } + + public DateTime GetLastModifiedDateTime() + { + return DateTime.Parse(LastModified); + } + + public Note Clone() + { + return (Note)MemberwiseClone(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/Tag.cs b/JWLMerge.BackupFileServices/Models/Database/Tag.cs new file mode 100644 index 0000000..9de03dc --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/Tag.cs @@ -0,0 +1,26 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + public class Tag + { + /// + /// The tag identifier. + /// + public int TagId { get; set; } + + /// + /// The tag type. + /// There appear to be 2 tag types (0 = Favourite, 1 = User-defined). + /// + public int Type { get; set; } + + /// + /// The name of the tag. + /// + public string Name { get; set; } + + public Tag Clone() + { + return (Tag)MemberwiseClone(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/TagMap.cs b/JWLMerge.BackupFileServices/Models/Database/TagMap.cs new file mode 100644 index 0000000..111f618 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/TagMap.cs @@ -0,0 +1,39 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + public class TagMap + { + /// + /// The tag map identifier. + /// + public int TagMapId { get; set; } + + /// + /// The type of data that the tag is attached to. + /// Currently it looks like there is only 1 'type' - a Note (value = 1). + /// + public int Type { get; set; } + + /// + /// The identifier of the data that the tag is attached to. + /// Currently it looks like this always refers to Note.NoteId + /// + public int TypeId { get; set; } + + /// + /// The tag identifier. + /// Refers to Tag.TagId. + /// + public int TagId { get; set; } + + /// + /// The zero-based position of the tag map entry (among all entries having the same TagId). + /// (Tagged items can be ordered in the JWL application.) + /// + public int Position { get; set; } + + public TagMap Clone() + { + return (TagMap)MemberwiseClone(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/Database/UserMark.cs b/JWLMerge.BackupFileServices/Models/Database/UserMark.cs new file mode 100644 index 0000000..e741c50 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/Database/UserMark.cs @@ -0,0 +1,41 @@ +namespace JWLMerge.BackupFileServices.Models.Database +{ + public class UserMark + { + /// + /// The user mark identifier. + /// + public int UserMarkId { get; set; } + + /// + /// The index of the marking (highlight) color. + /// + public int ColorIndex { get; set; } + + /// + /// The location identifier. + /// Refers to Location.LocationId + /// + public int LocationId { get; set; } + + /// + /// The style index (unused?) + /// + public int StyleIndex { get; set; } + + /// + /// The guid. Useful in merging! + /// + public string UserMarkGuid { get; set; } + + /// + /// The highlight version. Semantics unknown! + /// + public int Version { get; set; } + + public UserMark Clone() + { + return (UserMark)MemberwiseClone(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/ManifestFile/Manifest.cs b/JWLMerge.BackupFileServices/Models/ManifestFile/Manifest.cs new file mode 100644 index 0000000..75fb015 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/ManifestFile/Manifest.cs @@ -0,0 +1,38 @@ +namespace JWLMerge.BackupFileServices.Models.ManifestFile +{ + /// + /// The manifest file. + /// + public class Manifest + { + /// + /// The name of the backup file (without the "jwlibrary" extension). + /// + public string Name { get; set; } + + /// + /// The local creation date in the form "YYYY-MM-DD" + /// + public string CreationDate { get; set; } + + /// + /// The manifest schema version. + /// + public int Version { get; set; } + + /// + /// The type. Semantics unknown! + /// + public int Type { get; set; } + + /// + /// Details of the backup database. + /// + public UserDataBackup UserDataBackup { get; set; } + + public Manifest Clone() + { + return (Manifest)MemberwiseClone(); + } + } +} diff --git a/JWLMerge.BackupFileServices/Models/ManifestFile/UserDataBackup.cs b/JWLMerge.BackupFileServices/Models/ManifestFile/UserDataBackup.cs new file mode 100644 index 0000000..9619861 --- /dev/null +++ b/JWLMerge.BackupFileServices/Models/ManifestFile/UserDataBackup.cs @@ -0,0 +1,36 @@ +namespace JWLMerge.BackupFileServices.Models.ManifestFile +{ + /// + /// The user data backup. + /// Part of the manifest. + /// + public class UserDataBackup + { + /// + /// The last modified date of the database in ISO 8601, e.g. "2018-01-17T14:37:27+00:00" + /// Corresponds to the value in the LastModifiedDate table. + /// + public string LastModifiedDate { get; set; } + + /// + /// The name of the source device (e.g. the name of the PC). + /// + public string DeviceName { get; set; } + + /// + /// The database name (always "userData.db"?) + /// + public string DatabaseName { get; set; } + + /// + /// A sha256 hash of the associated database file. + /// + public string Hash { get; set; } + + /// + /// The database schema version. + /// Note that the database records its own schema version in the user_version header pragma. + /// + public int SchemaVersion { get; set; } + } +} diff --git a/JWLMerge.BackupFileServices/Properties/AssemblyInfo.cs b/JWLMerge.BackupFileServices/Properties/AssemblyInfo.cs new file mode 100644 index 0000000..bbb626e --- /dev/null +++ b/JWLMerge.BackupFileServices/Properties/AssemblyInfo.cs @@ -0,0 +1,9 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +[assembly: AssemblyTitle("JWLMerge.BackupFileServices")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] + +[assembly: ComVisible(false)] + diff --git a/JWLMerge.BackupFileServices/packages.config b/JWLMerge.BackupFileServices/packages.config new file mode 100644 index 0000000..e2da8e6 --- /dev/null +++ b/JWLMerge.BackupFileServices/packages.config @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/JWLMerge.sln b/JWLMerge.sln new file mode 100644 index 0000000..c436bd9 --- /dev/null +++ b/JWLMerge.sln @@ -0,0 +1,42 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio 15 +VisualStudioVersion = 15.0.27130.2024 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "JWLMerge.BackupFileServices", "JWLMerge.BackupFileServices\JWLMerge.BackupFileServices.csproj", "{83446629-CDBB-43FF-B628-1B8A3A9603C3}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "JWL Database Schemas", "JWL Database Schemas", "{DA7BDF58-CFEA-489C-B18C-944D9986758D}" + ProjectSection(SolutionItems) = preProject + readme.txt = readme.txt + Version005.txt = Version005.txt + EndProjectSection +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "JWLMergeCLI", "JWLMergeCLI\JWLMergeCLI.csproj", "{2CBBA1C2-72C9-4287-A262-EC1D2A2F6E56}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{DA506BB2-675E-47BE-BC54-ED6EAE369243}" + ProjectSection(SolutionItems) = preProject + SolutionInfo.cs = SolutionInfo.cs + EndProjectSection +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {83446629-CDBB-43FF-B628-1B8A3A9603C3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {83446629-CDBB-43FF-B628-1B8A3A9603C3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {83446629-CDBB-43FF-B628-1B8A3A9603C3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {83446629-CDBB-43FF-B628-1B8A3A9603C3}.Release|Any CPU.Build.0 = Release|Any CPU + {2CBBA1C2-72C9-4287-A262-EC1D2A2F6E56}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2CBBA1C2-72C9-4287-A262-EC1D2A2F6E56}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2CBBA1C2-72C9-4287-A262-EC1D2A2F6E56}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2CBBA1C2-72C9-4287-A262-EC1D2A2F6E56}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {25CF4C37-61F2-4F17-B1D5-F2EF80D9A55B} + EndGlobalSection +EndGlobal diff --git a/JWLMergeCLI/App.config b/JWLMergeCLI/App.config new file mode 100644 index 0000000..b50c74f --- /dev/null +++ b/JWLMergeCLI/App.config @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/JWLMergeCLI/Exceptions/JWLMergeCLIException.cs b/JWLMergeCLI/Exceptions/JWLMergeCLIException.cs new file mode 100644 index 0000000..46ad2b9 --- /dev/null +++ b/JWLMergeCLI/Exceptions/JWLMergeCLIException.cs @@ -0,0 +1,14 @@ +namespace JWLMergeCLI.Exceptions +{ + using System; + + [Serializable] + // ReSharper disable once InconsistentNaming + public class JWLMergeCLIException : Exception + { + public JWLMergeCLIException(string message) + : base(message) + { + } + } +} diff --git a/JWLMergeCLI/JWLMergeCLI.csproj b/JWLMergeCLI/JWLMergeCLI.csproj new file mode 100644 index 0000000..0a9a4c1 --- /dev/null +++ b/JWLMergeCLI/JWLMergeCLI.csproj @@ -0,0 +1,86 @@ + + + + + Debug + AnyCPU + {2CBBA1C2-72C9-4287-A262-EC1D2A2F6E56} + Exe + JWLMergeCLI + JWLMergeCLI + v4.6.2 + 512 + true + + + + + AnyCPU + true + full + false + bin\Debug\ + DEBUG;TRACE + prompt + 4 + + + AnyCPU + pdbonly + true + bin\Release\ + TRACE + prompt + 4 + + + + ..\packages\Serilog.2.6.0\lib\net46\Serilog.dll + + + ..\packages\Serilog.Sinks.File.3.2.0\lib\net45\Serilog.Sinks.File.dll + + + ..\packages\Serilog.Sinks.RollingFile.3.3.0\lib\net45\Serilog.Sinks.RollingFile.dll + + + + + ..\packages\System.Data.SQLite.Core.1.0.106.0\lib\net46\System.Data.SQLite.dll + + + + + + + + + + + + Properties\SolutionInfo.cs + + + + + + + + + + + + + {83446629-cdbb-43ff-b628-1b8a3a9603c3} + JWLMerge.BackupFileServices + + + + + + + This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + \ No newline at end of file diff --git a/JWLMergeCLI/MainApp.cs b/JWLMergeCLI/MainApp.cs new file mode 100644 index 0000000..677a1e7 --- /dev/null +++ b/JWLMergeCLI/MainApp.cs @@ -0,0 +1,79 @@ +namespace JWLMergeCLI +{ + using System; + using System.Collections.Generic; + using System.IO; + using System.Linq; + using Exceptions; + using JWLMerge.BackupFileServices; + using JWLMerge.BackupFileServices.Events; + using Serilog; + + /// + /// The main app. + /// + internal sealed class MainApp + { + public event EventHandler ProgressEvent; + + /// + /// Runs the app. + /// + /// Program arguments + public void Run(string[] args) + { + var files = GetInputFiles(args); + + IBackupFileService backupFileService = new BackupFileService(); + backupFileService.ProgressEvent += BackupFileServiceProgress; + + var backup = backupFileService.Merge(files); + string outputFileName = $"{backup.Manifest.Name}.jwlibrary"; + backupFileService.WriteNewDatabase(backup, outputFileName, files.First()); + + var logMessage = $"{files.Count} backup files merged to {outputFileName}"; + Log.Logger.Information(logMessage); + OnProgressEvent(logMessage); + } + + private void BackupFileServiceProgress(object sender, ProgressEventArgs e) + { + OnProgressEvent(e); + } + + private IReadOnlyCollection GetInputFiles(string[] args) + { + OnProgressEvent("Checking files exist..."); + + var result = new List(); + + foreach (var arg in args) + { + if (!File.Exists(arg)) + { + throw new JWLMergeCLIException($"File does not exist: {arg}"); + } + + Log.Logger.Debug("Found file: {file}", arg); + result.Add(arg); + } + + if (result.Count < 2) + { + throw new JWLMergeCLIException("Specify at least 2 files to merge"); + } + + return result; + } + + private void OnProgressEvent(ProgressEventArgs e) + { + ProgressEvent?.Invoke(this, e); + } + + private void OnProgressEvent(string message) + { + OnProgressEvent(new ProgressEventArgs { Message = message }); + } + } +} diff --git a/JWLMergeCLI/Program.cs b/JWLMergeCLI/Program.cs new file mode 100644 index 0000000..470f511 --- /dev/null +++ b/JWLMergeCLI/Program.cs @@ -0,0 +1,81 @@ +namespace JWLMergeCLI +{ + using System; + using Serilog; + + public class Program + { + /// + /// The main entry point. + /// + /// + /// The args. + /// + public static void Main(string[] args) + { + Log.Logger = new LoggerConfiguration() + .WriteTo.RollingFile("logs\\log-{Date}.txt") + .MinimumLevel.Debug() + .CreateLogger(); + + try + { + Log.Logger.Information("Started"); + + if (args.Length < 2) + { + ShowUsage(); + } + else + { + var app = new MainApp(); + app.ProgressEvent += AppProgress; + app.Run(args); + } + + Environment.ExitCode = 0; + Log.Logger.Information("Finished"); + } + // ReSharper disable once CatchAllClause + catch (Exception ex) + { + Log.Logger.Error(ex, "Error"); + + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(ex.Message); + Console.ResetColor(); + Environment.ExitCode = 1; + } + + Log.CloseAndFlush(); + } + + private static void ShowUsage() + { + Console.ForegroundColor = ConsoleColor.Gray; + Console.WriteLine("Description:"); + Console.ResetColor(); + Console.WriteLine(" JWLMergeCLI is used to merge the contents of 2 or more jwlibrary backup"); + Console.WriteLine(" files. These files are produced by the JW Library backup command and"); + Console.WriteLine(" contain your personal study notes and highlighting."); + Console.WriteLine(); + + Console.ForegroundColor = ConsoleColor.Gray; + Console.WriteLine("Usage:"); + Console.ResetColor(); + Console.WriteLine(" JWLMergeCLI ..."); + Console.WriteLine(); + + Console.ForegroundColor = ConsoleColor.Gray; + Console.WriteLine("An example:"); + Console.ResetColor(); + Console.WriteLine(" JWLMergeCLI \"C:\\Backup_PC16.jwlibrary\" \"C:\\Backup_iPad.jwlibrary\""); + Console.WriteLine(); + } + + private static void AppProgress(object sender, JWLMerge.BackupFileServices.Events.ProgressEventArgs e) + { + Console.WriteLine(e.Message); + } + } +} diff --git a/JWLMergeCLI/Properties/AssemblyInfo.cs b/JWLMergeCLI/Properties/AssemblyInfo.cs new file mode 100644 index 0000000..dc7d7f8 --- /dev/null +++ b/JWLMergeCLI/Properties/AssemblyInfo.cs @@ -0,0 +1,12 @@ +using System.Reflection; +using System.Runtime.InteropServices; + +// General Information about an assembly is controlled through the following +// set of attributes. Change these attribute values to modify the information +// associated with an assembly. +[assembly: AssemblyTitle("JWLMergeCLI")] +[assembly: AssemblyDescription("")] +[assembly: AssemblyConfiguration("")] + +[assembly: ComVisible(false)] + diff --git a/JWLMergeCLI/packages.config b/JWLMergeCLI/packages.config new file mode 100644 index 0000000..b65adad --- /dev/null +++ b/JWLMergeCLI/packages.config @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/SolutionInfo.cs b/SolutionInfo.cs new file mode 100644 index 0000000..3cefacf --- /dev/null +++ b/SolutionInfo.cs @@ -0,0 +1,10 @@ +using System.Reflection; + +[assembly: AssemblyCompany("Antony Corbett")] +[assembly: AssemblyProduct("JWLMerge")] +[assembly: AssemblyCopyright("Copyright © 2018")] +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +[assembly: AssemblyVersion("1.0.0.6")] + diff --git a/Version005.txt b/Version005.txt new file mode 100644 index 0000000..3bcf1ac --- /dev/null +++ b/Version005.txt @@ -0,0 +1,88 @@ +CREATE TABLE BlockRange ( + BlockRangeId INTEGER NOT NULL PRIMARY KEY, + BlockType INTEGER NOT NULL, + Identifier INTEGER NOT NULL, + StartToken INTEGER, + EndToken INTEGER, + UserMarkId INTEGER NOT NULL, + CHECK (BlockType BETWEEN 1 AND 2), + FOREIGN KEY(UserMarkId) REFERENCES UserMark(UserMarkId) + ); + +CREATE TABLE Bookmark( + BookmarkId INTEGER NOT NULL PRIMARY KEY, + LocationId INTEGER NOT NULL, + PublicationLocationId INTEGER NOT NULL, + Slot INTEGER NOT NULL, + Title TEXT NOT NULL, + Snippet TEXT, + BlockType INTEGER NOT NULL DEFAULT 0, + BlockIdentifier INTEGER, + FOREIGN KEY(LocationId) REFERENCES Location(LocationId), + FOREIGN KEY(PublicationLocationId) REFERENCES Location(LocationId), + CONSTRAINT PublicationLocationId_Slot UNIQUE (PublicationLocationId, Slot)); + +CREATE TABLE LastModified(LastModified TEXT NOT NULL DEFAULT(strftime('%Y-%m-%dT%H:%M:%SZ', 'now'))); + +CREATE TABLE Location ( + LocationId INTEGER NOT NULL PRIMARY KEY, + BookNumber INTEGER, + ChapterNumber INTEGER, + DocumentId INTEGER, + Track INTEGER, + IssueTagNumber INTEGER NOT NULL DEFAULT 0, + KeySymbol TEXT NOT NULL, + MepsLanguage INTEGER NOT NULL, + Type INTEGER NOT NULL, + Title TEXT, + CHECK ( + (Type = 0 AND (DocumentId IS NOT NULL AND DocumentId != 0) AND BookNumber IS NULL AND ChapterNumber IS NULL AND Track IS NULL) OR + (Type = 0 AND DocumentId IS NULL AND (BookNumber IS NOT NULL AND BookNumber != 0) AND (ChapterNumber IS NOT NULL AND ChapterNumber != 0) AND Track IS NULL) OR + (Type = 1 AND BookNumber IS NULL AND ChapterNumber IS NULL AND DocumentId IS NULL AND Track IS NULL) OR + (Type IN (2, 3) AND BookNumber IS NULL AND ChapterNumber IS NULL) + ) + ); + +CREATE TABLE Note( NoteId INTEGER NOT NULL PRIMARY KEY, Guid TEXT NOT NULL UNIQUE, UserMarkId INTEGER, LocationId INTEGER, Title TEXT, Content TEXT, LastModified TEXT NOT NULL DEFAULT(strftime('%Y-%m-%dT%H:%M:%SZ', 'now')), BlockType INTEGER NOT NULL DEFAULT 0, BlockIdentifier INTEGER, CHECK ((BlockType = 0 AND BlockIdentifier IS NULL) OR (BlockType != 0 AND BlockIdentifier IS NOT NULL))); + +CREATE TABLE Tag(TagId INTEGER NOT NULL PRIMARY KEY, Type INTEGER NOT NULL,Name TEXT NOT NULL,UNIQUE (Type, Name), CHECK (length(Name) > 0)); + +CREATE TABLE TagMap ( + TagMapId INTEGER NOT NULL PRIMARY KEY, + Type INTEGER NOT NULL, + TypeId INTEGER NOT NULL, + TagId INTEGER NOT NULL, + Position INTEGER NOT NULL, + FOREIGN KEY(TagId) REFERENCES Tag(TagId) + CONSTRAINT Type_TypeId_TagId_Position UNIQUE (Type, TypeId, TagId, Position)); + +CREATE TABLE UserMark ( + UserMarkId INTEGER NOT NULL PRIMARY KEY, + ColorIndex INTEGER NOT NULL, + LocationId INTEGER NOT NULL, + StyleIndex INTEGER NOT NULL, + UserMarkGuid TEXT NOT NULL UNIQUE, + Version INTEGER NOT NULL, + CHECK (LocationId > 0), + FOREIGN KEY(LocationId) REFERENCES Location(LocationId) + ); + +CREATE INDEX IX_BlockRange_UserMarkId ON BlockRange(UserMarkId); + +CREATE INDEX IX_Location_KeySymbol_MepsLanguage_BookNumber_ChapterNumber ON + Location(KeySymbol, MepsLanguage, BookNumber, ChapterNumber); + +CREATE INDEX IX_Location_MepsLanguage_DocumentId ON Location(MepsLanguage, DocumentId); + +CREATE INDEX IX_Note_LastModified_LocationId ON Note(LastModified, LocationId); + +CREATE INDEX IX_Note_LocationId_BlockIdentifier ON Note(LocationId, BlockIdentifier); + +CREATE INDEX IX_TagMap_TagId ON TagMap(TagId); + +CREATE INDEX IX_TagMap_TypeId_TagId_Position ON TagMap(TypeId, Type, TagId, Position); + +CREATE INDEX IX_Tag_Name_Type_TagId ON Tag(Name, Type, TagId); + +CREATE INDEX IX_UserMark_LocationId ON UserMark(LocationId); + diff --git a/readme.txt b/readme.txt new file mode 100644 index 0000000..da3f8d1 --- /dev/null +++ b/readme.txt @@ -0,0 +1 @@ +This is a record of the sqlite database schema used by the userData.db file. The DDL is extracted using SQLite Expert. \ No newline at end of file