Merge remote-tracking branch 'upstream/dev' into fix/blocked-contacts-issues

This commit is contained in:
Morgan Pretty 2023-10-03 13:43:57 +11:00
commit ed33e1f2e2
51 changed files with 596 additions and 113 deletions

View file

@ -38,18 +38,25 @@ extension ProjectState {
.contains("print("),
.contains("NSLog("),
.contains("SNLog("),
.contains("SNLogNotTests("),
.contains("owsFailDebug("),
.contains("#imageLiteral(resourceName:"),
.contains("UIImage(named:"),
.contains("UIImage(systemName:"),
.contains("[UIImage imageNamed:"),
.contains("UIFont(name:"),
.contains(".dateFormat ="),
.contains(".accessibilityLabel ="),
.contains(".accessibilityValue ="),
.contains(".accessibilityIdentifier ="),
.contains("accessibilityIdentifier:"),
.contains("accessibilityLabel:"),
.contains("Accessibility(identifier:"),
.contains("Accessibility(label:"),
.contains("NSAttributedString.Key("),
.contains("Notification.Name("),
.contains("Notification.Key("),
.contains("DispatchQueue("),
.containsAnd("identifier:", .previousLine(numEarlier: 1, .contains("Accessibility("))),
.containsAnd("label:", .previousLine(numEarlier: 1, .contains("Accessibility("))),
.containsAnd("label:", .previousLine(numEarlier: 2, .contains("Accessibility("))),
@ -57,7 +64,8 @@ extension ProjectState {
.regex(".*static var databaseTableName: String"),
.regex("Logger\\..*\\("),
.regex("OWSLogger\\..*\\("),
.regex("case .* = ")
.regex("case .* = "),
.regex("Error.*\\(")
]
}

View file

@ -509,6 +509,8 @@
FD17D7E527F6A09900122BE0 /* Identity.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E427F6A09900122BE0 /* Identity.swift */; };
FD17D7E727F6A16700122BE0 /* _003_YDBToGRDBMigration.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E627F6A16700122BE0 /* _003_YDBToGRDBMigration.swift */; };
FD17D7EA27F6A1C600122BE0 /* SUKLegacy.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E927F6A1C600122BE0 /* SUKLegacy.swift */; };
FD19363F2ACA66DE004BCF0F /* DatabaseSpec.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD19363E2ACA66DE004BCF0F /* DatabaseSpec.swift */; };
FD1936412ACA7BD8004BCF0F /* Result+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1936402ACA7BD8004BCF0F /* Result+Utilities.swift */; };
FD1A94FB2900D1C2000D73D3 /* PersistableRecord+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1A94FA2900D1C2000D73D3 /* PersistableRecord+Utilities.swift */; };
FD1A94FE2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1A94FD2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift */; };
FD1C98E4282E3C5B00B76F9E /* UINavigationBar+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1C98E3282E3C5B00B76F9E /* UINavigationBar+Utilities.swift */; };
@ -1656,6 +1658,8 @@
FD17D7E427F6A09900122BE0 /* Identity.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Identity.swift; sourceTree = "<group>"; };
FD17D7E627F6A16700122BE0 /* _003_YDBToGRDBMigration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = _003_YDBToGRDBMigration.swift; sourceTree = "<group>"; };
FD17D7E927F6A1C600122BE0 /* SUKLegacy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SUKLegacy.swift; sourceTree = "<group>"; };
FD19363E2ACA66DE004BCF0F /* DatabaseSpec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DatabaseSpec.swift; sourceTree = "<group>"; };
FD1936402ACA7BD8004BCF0F /* Result+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Result+Utilities.swift"; sourceTree = "<group>"; };
FD1A94FA2900D1C2000D73D3 /* PersistableRecord+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "PersistableRecord+Utilities.swift"; sourceTree = "<group>"; };
FD1A94FD2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PersistableRecordUtilitiesSpec.swift; sourceTree = "<group>"; };
FD1C98E3282E3C5B00B76F9E /* UINavigationBar+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UINavigationBar+Utilities.swift"; sourceTree = "<group>"; };
@ -3581,6 +3585,7 @@
FDF222082818D2B0000A4995 /* NSAttributedString+Utilities.swift */,
FD8ECF912938552800C0D1BB /* Threading.swift */,
FD8ECF93293856AF00C0D1BB /* Randomness.swift */,
FD1936402ACA7BD8004BCF0F /* Result+Utilities.swift */,
C33FDB38255A580B00E217F9 /* OWSBackgroundTask.h */,
C33FDC1B255A581F00E217F9 /* OWSBackgroundTask.m */,
FD29598C2A43BC0B00888A17 /* Version.swift */,
@ -3756,6 +3761,14 @@
path = LegacyDatabase;
sourceTree = "<group>";
};
FD19363D2ACA66CF004BCF0F /* Database */ = {
isa = PBXGroup;
children = (
FD19363E2ACA66DE004BCF0F /* DatabaseSpec.swift */,
);
path = Database;
sourceTree = "<group>";
};
FD1A94FC2900D2DB000D73D3 /* Utilities */ = {
isa = PBXGroup;
children = (
@ -3934,6 +3947,7 @@
isa = PBXGroup;
children = (
FD71161228D00D5300B47552 /* Conversations */,
FD19363D2ACA66CF004BCF0F /* Database */,
FD71161828D00E0100B47552 /* Settings */,
);
path = SessionTests;
@ -5806,6 +5820,7 @@
FD97B2402A3FEB050027DD57 /* ARC4RandomNumberGenerator.swift in Sources */,
FD37EA1128AB34B3003AE748 /* TypedTableAlteration.swift in Sources */,
FD30036E2A3AE26000B5A5FB /* CExceptionHelper.mm in Sources */,
FD1936412ACA7BD8004BCF0F /* Result+Utilities.swift in Sources */,
C3D9E4DA256778410040E4F3 /* UIImage+OWS.m in Sources */,
C32C600F256E07F5003C73A2 /* NSUserDefaults+OWS.m in Sources */,
FDE658A329418E2F00A33BC1 /* KeyPair.swift in Sources */,
@ -6309,6 +6324,7 @@
FD71161728D00DA400B47552 /* ThreadSettingsViewModelSpec.swift in Sources */,
FD2AAAF028ED57B500A49611 /* SynchronousStorage.swift in Sources */,
FD23CE292A6775650000B97C /* MockCrypto.swift in Sources */,
FD19363F2ACA66DE004BCF0F /* DatabaseSpec.swift in Sources */,
FD23CE332A67C4D90000B97C /* MockNetwork.swift in Sources */,
FD71161528D00D6700B47552 /* ThreadDisappearingMessagesViewModelSpec.swift in Sources */,
FD23CE2D2A678E1E0000B97C /* MockCaches.swift in Sources */,

View file

@ -3,7 +3,7 @@ import GRDB
import SessionUtilitiesKit
public enum SNMessagingKit: MigratableTarget { // Just to make the external API nice
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .messagingKit,
migrations: [

View file

@ -9,6 +9,14 @@ enum _001_InitialSetupMigration: Migration {
static let identifier: String = "initialSetup" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
Contact.self, Profile.self, SessionThread.self, DisappearingMessagesConfiguration.self,
ClosedGroup.self, ClosedGroupKeyPair.self, OpenGroup.self, Capability.self, BlindedIdLookup.self,
GroupMember.self, Interaction.self, RecipientState.self, Attachment.self,
InteractionAttachment.self, Quote.self, LinkPreview.self, ControlMessageProcessRecord.self,
ThreadTypingIndicator.self
]
public static let fullTextSearchTokenizer: FTS5TokenizerDescriptor = {
// Define the tokenizer to be used in all the FTS tables

View file

@ -12,6 +12,8 @@ enum _002_SetupStandardJobs: Migration {
static let identifier: String = "SetupStandardJobs" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Start by adding the jobs that don't have collections (in the jobs like these

View file

@ -16,6 +16,8 @@ enum _003_YDBToGRDBMigration: Migration {
static let identifier: String = "YDBToGRDBMigration"
static let needsConfigSync: Bool = true
static let minExpectedRunDuration: TimeInterval = 20
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [Identity.self]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
guard let dbConnection: YapDatabaseConnection = SUKLegacy.newDatabaseConnection() else {

View file

@ -11,6 +11,8 @@ enum _004_RemoveLegacyYDB: Migration {
static let identifier: String = "RemoveLegacyYDB" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try? SUKLegacy.deleteLegacyDatabaseFilesAndKey()

View file

@ -10,6 +10,8 @@ enum _005_FixDeletedMessageReadState: Migration {
static let identifier: String = "FixDeletedMessageReadState" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
_ = try Interaction

View file

@ -11,6 +11,8 @@ enum _006_FixHiddenModAdminSupport: Migration {
static let identifier: String = "FixHiddenModAdminSupport" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [GroupMember.self]
static func migrate(_ db: Database) throws {
try db.alter(table: GroupMember.self) { t in

View file

@ -10,6 +10,8 @@ enum _007_HomeQueryOptimisationIndexes: Migration {
static let identifier: String = "HomeQueryOptimisationIndexes" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try db.create(

View file

@ -10,6 +10,8 @@ enum _008_EmojiReacts: Migration {
static let identifier: String = "EmojiReacts" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Reaction.self]
static func migrate(_ db: Database) throws {
try db.create(table: Reaction.self) { t in

View file

@ -9,6 +9,8 @@ enum _009_OpenGroupPermission: Migration {
static let identifier: String = "OpenGroupPermission" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [OpenGroup.self]
static func migrate(_ db: GRDB.Database) throws {
try db.alter(table: OpenGroup.self) { t in

View file

@ -11,6 +11,8 @@ enum _010_AddThreadIdToFTS: Migration {
static let identifier: String = "AddThreadIdToFTS" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 3
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Can't actually alter a virtual table in SQLite so we need to drop and recreate it,

View file

@ -11,6 +11,8 @@ enum _011_AddPendingReadReceipts: Migration {
static let identifier: String = "AddPendingReadReceipts" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [PendingReadReceipt.self]
static func migrate(_ db: Database) throws {
try db.create(table: PendingReadReceipt.self) { t in

View file

@ -10,6 +10,8 @@ enum _012_AddFTSIfNeeded: Migration {
static let identifier: String = "AddFTSIfNeeded" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Fix an issue that the fullTextSearchTable was dropped unintentionally and global search won't work.

View file

@ -14,6 +14,12 @@ enum _013_SessionUtilChanges: Migration {
static let identifier: String = "SessionUtilChanges"
static let needsConfigSync: Bool = true
static let minExpectedRunDuration: TimeInterval = 0.4
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [
GroupMember.self, ClosedGroupKeyPair.self, SessionThread.self
]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
SessionThread.self, Profile.self, GroupMember.self, ClosedGroupKeyPair.self, ConfigDump.self
]
static func migrate(_ db: Database) throws {
// Add `markedAsUnread` to the thread table

View file

@ -11,6 +11,11 @@ enum _014_GenerateInitialUserConfigDumps: Migration {
static let identifier: String = "GenerateInitialUserConfigDumps" // stringlint:disable
static let needsConfigSync: Bool = true
static let minExpectedRunDuration: TimeInterval = 4.0
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [
Identity.self, SessionThread.self, Contact.self, Profile.self, ClosedGroup.self,
OpenGroup.self, DisappearingMessagesConfiguration.self, GroupMember.self, ConfigDump.self
]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// If we have no ed25519 key then there is no need to create cached dump data

View file

@ -11,6 +11,10 @@ enum _015_BlockCommunityMessageRequests: Migration {
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static var requirements: [MigrationRequirement] = [.sessionUtilStateLoaded]
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [
Identity.self, Setting.self
]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Profile.self]
static func migrate(_ db: Database) throws {
// Add the new 'Profile' properties

View file

@ -12,6 +12,8 @@ enum _016_MakeBrokenProfileTimestampsNullable: Migration {
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static var requirements: [MigrationRequirement] = [.sessionUtilStateLoaded]
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Profile.self]
static func migrate(_ db: Database) throws {
/// SQLite doesn't support altering columns after creation so we need to create a new table with the setup we

View file

@ -138,12 +138,12 @@ public extension Profile {
try container.encode(id, forKey: .id)
try container.encode(name, forKey: .name)
try container.encode(lastNameUpdate, forKey: .lastNameUpdate)
try container.encodeIfPresent(lastNameUpdate, forKey: .lastNameUpdate)
try container.encodeIfPresent(nickname, forKey: .nickname)
try container.encodeIfPresent(profilePictureUrl, forKey: .profilePictureUrl)
try container.encodeIfPresent(profilePictureFileName, forKey: .profilePictureFileName)
try container.encodeIfPresent(profileEncryptionKey, forKey: .profileEncryptionKey)
try container.encode(lastProfilePictureUpdate, forKey: .lastProfilePictureUpdate)
try container.encodeIfPresent(lastProfilePictureUpdate, forKey: .lastProfilePictureUpdate)
try container.encodeIfPresent(blocksCommunityMessageRequests, forKey: .blocksCommunityMessageRequests)
try container.encodeIfPresent(lastBlocksCommunityMessageRequests, forKey: .lastBlocksCommunityMessageRequests)
}
@ -334,9 +334,9 @@ public extension Profile {
guard id.count > 8 else { return id }
switch truncating {
case .start: return "...\(id.suffix(8))"
case .middle: return "\(id.prefix(4))...\(id.suffix(4))"
case .end: return "\(id.prefix(8))..."
case .start: return "...\(id.suffix(8))" //stringlint:disable
case .middle: return "\(id.prefix(4))...\(id.suffix(4))" //stringlint:disable
case .end: return "\(id.prefix(8))..." //stringlint:disable
}
}

View file

@ -136,7 +136,7 @@ public enum SessionUtil {
) throws -> UnsafeMutablePointer<config_object>? {
// Setup initial variables (including getting the memory address for any cached data)
var conf: UnsafeMutablePointer<config_object>? = nil
let error: UnsafeMutablePointer<CChar>? = nil
var error: [CChar] = [CChar](repeating: 0, count: 256)
let cachedDump: (data: UnsafePointer<UInt8>, length: Int)? = cachedData?.withUnsafeBytes { unsafeBytes in
return unsafeBytes.baseAddress.map {
(
@ -146,33 +146,26 @@ public enum SessionUtil {
}
}
// No need to deallocate the `cachedDump.data` as it'll automatically be cleaned up by
// the `cachedDump` lifecycle, but need to deallocate the `error` if it gets set
defer {
error?.deallocate()
}
// Try to create the object
var secretKey: [UInt8] = ed25519SecretKey
let result: Int32 = {
switch variant {
case .userProfile:
return user_profile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return user_profile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
case .contacts:
return contacts_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return contacts_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
case .convoInfoVolatile:
return convo_info_volatile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return convo_info_volatile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
case .userGroups:
return user_groups_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return user_groups_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
}
}()
guard result == 0 else {
let errorString: String = (error.map { String(cString: $0) } ?? "unknown error") // stringlint:disable
SNLog("[SessionUtil Error] Unable to create \(variant.rawValue) config object: \(errorString)")
SNLog("[SessionUtil Error] Unable to create \(variant.rawValue) config object: \(String(cString: error))")
throw SessionUtilError.unableToCreateConfigObject
}

View file

@ -25,7 +25,7 @@ class MessageSendJobSpec: QuickSpec {
@TestState var interactionAttachment: InteractionAttachment!
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View file

@ -18,7 +18,7 @@ class OpenGroupAPISpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View file

@ -98,7 +98,7 @@ class OpenGroupManagerSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View file

@ -16,7 +16,7 @@ class MessageReceiverDecryptionSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View file

@ -16,7 +16,7 @@ class MessageSenderEncryptionSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View file

@ -5,7 +5,7 @@ import GRDB
import SessionUtilitiesKit
public enum SNSnodeKit: MigratableTarget { // Just to make the external API nice
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .snodeKit,
migrations: [

View file

@ -9,6 +9,10 @@ enum _001_InitialSetupMigration: Migration {
static let identifier: String = "initialSetup" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
Snode.self, SnodeSet.self, SnodeReceivedMessageInfo.self
]
static func migrate(_ db: Database) throws {
try db.create(table: Snode.self) { t in

View file

@ -11,6 +11,8 @@ enum _002_SetupStandardJobs: Migration {
static let identifier: String = "SetupStandardJobs" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try autoreleasepool {

View file

@ -11,6 +11,8 @@ enum _003_YDBToGRDBMigration: Migration {
static let target: TargetMigrations.Identifier = .snodeKit
static let identifier: String = "YDBToGRDBMigration"
static let needsConfigSync: Bool = false
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
/// This migration can take a while if it's a very large database or there are lots of closed groups (want this to account
/// for about 10% of the progress bar so we intentionally have a higher `minExpectedRunDuration` so show more

View file

@ -9,6 +9,8 @@ enum _004_FlagMessageHashAsDeletedOrInvalid: Migration {
static let target: TargetMigrations.Identifier = .snodeKit
static let identifier: String = "FlagMessageHashAsDeletedOrInvalid" // stringlint:disable
static let needsConfigSync: Bool = false
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [SnodeReceivedMessageInfo.self]
/// This migration adds a flat to the `SnodeReceivedMessageInfo` so that when deleting interactions we can
/// ignore their hashes when subsequently trying to fetch new messages (which results in the storage server returning

View file

@ -16,7 +16,7 @@ class ThreadDisappearingMessagesSettingsViewModelSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,

View file

@ -16,7 +16,7 @@ class ThreadSettingsViewModelSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,

View file

@ -0,0 +1,341 @@
// Copyright © 2023 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import GRDB
import Quick
import Nimble
import SessionUIKit
import SessionSnodeKit
import SessionMessagingKit
@testable import Session
@testable import SessionUtilitiesKit
class DatabaseSpec: QuickSpec {
fileprivate static let ignoredTables: Set<String> = [
"sqlite_sequence", "grdb_migrations", "*_fts*"
]
override class func spec() {
// MARK: Configuration
@TestState var dependencies: Dependencies! = Dependencies()
@TestState var mockStorage: Storage! = SynchronousStorage(customWriter: try! DatabaseQueue())
@TestState var initialResult: Result<Void, Error>! = nil
@TestState var finalResult: Result<Void, Error>! = nil
let allMigrations: [Storage.KeyedMigration] = SynchronousStorage.sortedMigrationInfo(
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,
SNUIKit.self
]
)
let dynamicTests: [MigrationTest] = MigrationTest.extractTests(allMigrations)
let allDatabaseTypes: [(TableRecord & FetchableRecord).Type] = MigrationTest.extractDatabaseTypes(allMigrations)
MigrationTest.explicitValues = [
// Specific enum values needed
TableColumn(SessionThread.self, .notificationSound): 1000,
TableColumn(ConfigDump.self, .variant): "userProfile",
// libSession will throw if we try to insert a community with an invalid
// 'server' value or a room that is too long
TableColumn(OpenGroup.self, .server): "https://www.oxen.io",
TableColumn(OpenGroup.self, .roomToken): "testRoom",
// libSession will fail to load state if the ConfigDump data is invalid
TableColumn(ConfigDump.self, .data): Data()
]
// MARK: - a Database
describe("a Database") {
beforeEach {
// FIXME: These should be mocked out instead of set this way
dependencies.caches.mutate(cache: .general) { $0.encodedPublicKey = "05\(TestConstants.publicKey)" }
SessionUtil.clearMemoryState()
}
// MARK: -- can be created from an empty state
it("can be created from an empty state") {
mockStorage.perform(
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,
SNUIKit.self
],
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
}
// MARK: -- can still parse the database types
it("can still parse the database types") {
mockStorage.perform(
sortedMigrations: allMigrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
// Generate dummy data (fetching below won't do anything)
expect(try MigrationTest.generateDummyData(mockStorage, nullsWherePossible: false)).toNot(throwError())
// Fetch the records which are required by the migrations or were modified by them to
// ensure the decoding is also still working correctly
mockStorage.read { db in
allDatabaseTypes.forEach { table in
expect { try table.fetchAll(db) }.toNot(throwError())
}
}
}
// MARK: -- can still parse the database types setting null where possible
it("can still parse the database types setting null where possible") {
mockStorage.perform(
sortedMigrations: allMigrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
// Generate dummy data (fetching below won't do anything)
expect(try MigrationTest.generateDummyData(mockStorage, nullsWherePossible: true)).toNot(throwError())
// Fetch the records which are required by the migrations or were modified by them to
// ensure the decoding is also still working correctly
mockStorage.read { db in
allDatabaseTypes.forEach { table in
expect { try table.fetchAll(db) }.toNot(throwError())
}
}
}
// MARK: -- can migrate from X to Y
dynamicTests.forEach { test in
it("can migrate from \(test.initialMigrationKey) to \(test.finalMigrationKey)") {
mockStorage.perform(
sortedMigrations: test.initialMigrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
// Generate dummy data (otherwise structural issues or invalid foreign keys won't error)
expect(try MigrationTest.generateDummyData(mockStorage, nullsWherePossible: false)).toNot(throwError())
// Peform the target migrations to ensure the migrations themselves worked correctly
mockStorage.perform(
sortedMigrations: test.migrationsToTest,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in finalResult = result }
)
expect(finalResult).to(beSuccess())
/// Ensure all of the `fetchedTables` records can still be decoded correctly after the migrations have completed (since
/// we perform multiple migrations above it's possible these won't work after the `initialMigrations` but actually will
/// work when required as an intermediate migration could have satisfied the data requirements)
mockStorage.read { db in
test.migrationsToTest.forEach { _, _, migration in
migration.fetchedTables.forEach { table in
expect { try table.fetchAll(db) }.toNot(throwError())
}
}
}
}
}
}
}
}
// MARK: - Convenience
private extension Database.ColumnType {
init(rawValue: Any) {
switch rawValue as? String {
case .some(let value): self = Database.ColumnType(rawValue: value)
case .none: self = Database.ColumnType.any
}
}
}
private struct TableColumn: Hashable {
let tableName: String
let columnName: String
init<T: TableRecord & ColumnExpressible>(_ type: T.Type, _ column: T.Columns) {
self.tableName = T.databaseTableName
self.columnName = column.name
}
init?(_ tableName: String, _ columnName: Any?) {
guard let finalColumnName: String = columnName as? String else { return nil }
self.tableName = tableName
self.columnName = finalColumnName
}
}
private class MigrationTest {
static var explicitValues: [TableColumn: (any DatabaseValueConvertible)] = [:]
let initialMigrations: [Storage.KeyedMigration]
let migrationsToTest: [Storage.KeyedMigration]
var initialMigrationKey: String { return (initialMigrations.last?.key ?? "an empty database") }
var finalMigrationKey: String { return (migrationsToTest.last?.key ?? "invalid") }
private init(
initialMigrations: [Storage.KeyedMigration],
migrationsToTest: [Storage.KeyedMigration]
) {
self.initialMigrations = initialMigrations
self.migrationsToTest = migrationsToTest
}
// MARK: - Test Data
static func extractTests(_ allMigrations: [Storage.KeyedMigration]) -> [MigrationTest] {
return (0..<(allMigrations.count - 1))
.flatMap { index -> [MigrationTest] in
((index + 1)..<allMigrations.count).map { targetMigrationIndex -> MigrationTest in
MigrationTest(
initialMigrations: Array(allMigrations[0..<index]),
migrationsToTest: Array(allMigrations[index..<targetMigrationIndex])
)
}
}
}
static func extractDatabaseTypes(_ allMigrations: [Storage.KeyedMigration]) -> [(TableRecord & FetchableRecord).Type] {
return allMigrations
.reduce(into: [:]) { result, next in
next.migration.fetchedTables.forEach { table in
result[ObjectIdentifier(table).hashValue] = table
}
next.migration.createdOrAlteredTables.forEach { table in
result[ObjectIdentifier(table).hashValue] = table
}
}
.values
.asArray()
}
// MARK: - Mock Data
static func generateDummyData(_ storage: Storage, nullsWherePossible: Bool) throws {
var generationError: Error? = nil
// The `PRAGMA foreign_keys` is a no-op within a transaction so we have to do it outside of one
try storage.testDbWriter?.writeWithoutTransaction { db in try db.execute(sql: "PRAGMA foreign_keys = OFF") }
storage.write { db in
do {
try MigrationTest.generateDummyData(db, nullsWherePossible: nullsWherePossible)
try db.checkForeignKeys()
}
catch { generationError = error }
}
try storage.testDbWriter?.writeWithoutTransaction { db in try db.execute(sql: "PRAGMA foreign_keys = ON") }
// Throw the error if there was one
if let error: Error = generationError { throw error }
}
private static func generateDummyData(_ db: Database, nullsWherePossible: Bool) throws {
// Fetch table schema information
let disallowedPrefixes: Set<String> = DatabaseSpec.ignoredTables
.filter { $0.hasPrefix("*") && !$0.hasSuffix("*") }
.map { String($0[$0.index(after: $0.startIndex)...]) }
.asSet()
let disallowedSuffixes: Set<String> = DatabaseSpec.ignoredTables
.filter { $0.hasSuffix("*") && !$0.hasPrefix("*") }
.map { String($0[$0.startIndex..<$0.index(before: $0.endIndex)]) }
.asSet()
let disallowedContains: Set<String> = DatabaseSpec.ignoredTables
.filter { $0.hasPrefix("*") && $0.hasSuffix("*") }
.map { String($0[$0.index(after: $0.startIndex)..<$0.index(before: $0.endIndex)]) }
.asSet()
let tables: [Row] = try Row
.fetchAll(db, sql: "SELECT * from sqlite_schema WHERE type = 'table'")
.filter { tableInfo -> Bool in
guard let name: String = tableInfo["name"] else { return false }
return (
!DatabaseSpec.ignoredTables.contains(name) &&
!disallowedPrefixes.contains(where: { name.hasPrefix($0) }) &&
!disallowedSuffixes.contains(where: { name.hasSuffix($0) }) &&
!disallowedContains.contains(where: { name.contains($0) })
)
}
// Generate data via schema inspection for all other tables
try tables.forEach { tableInfo in
switch tableInfo["name"] as? String {
case .none: throw StorageError.generic
case Identity.databaseTableName:
// If there is an 'Identity' table then insert "proper" identity info (otherwise mock
// data might get deleted as invalid in libSession migrations)
try [
Identity(variant: .x25519PublicKey, data: Data.data(fromHex: TestConstants.publicKey)!),
Identity(variant: .x25519PrivateKey, data: Data.data(fromHex: TestConstants.privateKey)!),
Identity(variant: .ed25519PublicKey, data: Data.data(fromHex: TestConstants.edPublicKey)!),
Identity(variant: .ed25519SecretKey, data: Data.data(fromHex: TestConstants.edSecretKey)!)
].forEach { try $0.insert(db) }
case .some(let name):
// No need to insert dummy data if it already exists in the table
guard try Int.fetchOne(db, sql: "SELECT COUNT(*) FROM '\(name)'") == 0 else { return }
let columnInfo: [Row] = try Row.fetchAll(db, sql: "PRAGMA table_info('\(name)');")
let validNames: [String] = columnInfo.compactMap { $0["name"].map { "'\($0)'" } }
let columnNames: String = validNames.joined(separator: ", ")
let columnArgs: String = validNames.map { _ in "?" }.joined(separator: ", ")
try db.execute(
sql: "INSERT INTO \(name) (\(columnNames)) VALUES (\(columnArgs))",
arguments: StatementArguments(columnInfo.map { column in
// If we want to allow setting nulls (and the column is nullable but not a primary
// key) then use null for it's value
guard !nullsWherePossible || column["notnull"] != 0 || column["pk"] == 1 else {
return nil
}
// If this column has an explicitly defined value then use that
if
let key: TableColumn = TableColumn(name, column["name"]),
let explicitValue: (any DatabaseValueConvertible) = MigrationTest.explicitValues[key]
{
return explicitValue
}
// Otherwise generate some mock data (trying to use potentially real values in case
// something is a primary/foreign key)
switch Database.ColumnType(rawValue: column["type"]) {
case .text: return "05\(TestConstants.publicKey)"
case .blob: return Data([1, 2, 3])
case .boolean: return false
case .integer, .numeric, .double, .real: return 1
case .date, .datetime: return Date(timeIntervalSince1970: 1234567890)
case .any: return nil
default: return nil
}
})
)
}
}
}
}

View file

@ -15,7 +15,7 @@ class NotificationContentViewModelSpec: QuickSpec {
// MARK: Configuration
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,

View file

@ -5,7 +5,7 @@ import GRDB
import SessionUtilitiesKit
public enum SNUIKit: MigratableTarget {
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .uiKit,
migrations: [

View file

@ -11,6 +11,8 @@ enum _001_ThemePreferences: Migration {
static let identifier: String = "ThemePreferences" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [Identity.self]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Determine if the user was matching the system setting (previously the absence of this value

View file

@ -5,10 +5,10 @@ import GRDB
public enum SNUtilitiesKit: MigratableTarget { // Just to make the external API nice
public static var isRunningTests: Bool {
ProcessInfo.processInfo.environment["XCTestConfigurationFilePath"] != nil
ProcessInfo.processInfo.environment["XCTestConfigurationFilePath"] != nil // stringlint:disable
}
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .utilitiesKit,
migrations: [

View file

@ -8,6 +8,10 @@ enum _001_InitialSetupMigration: Migration {
static let identifier: String = "initialSetup" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
Identity.self, Job.self, JobDependencies.self, Setting.self
]
static func migrate(_ db: Database) throws {
try db.create(table: Identity.self) { t in

View file

@ -10,6 +10,8 @@ enum _002_SetupStandardJobs: Migration {
static let identifier: String = "SetupStandardJobs" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try autoreleasepool {

View file

@ -9,6 +9,8 @@ enum _003_YDBToGRDBMigration: Migration {
static let identifier: String = "YDBToGRDBMigration" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
guard let dbConnection: YapDatabaseConnection = SUKLegacy.newDatabaseConnection() else {

View file

@ -9,6 +9,8 @@ enum _004_AddJobPriority: Migration {
static let identifier: String = "AddJobPriority" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Job.self]
static func migrate(_ db: Database) throws {
// Add `priority` to the job table

View file

@ -56,23 +56,16 @@ open class Storage {
fileprivate var dbWriter: DatabaseWriter?
internal var testDbWriter: DatabaseWriter? { dbWriter }
private var unprocessedMigrationRequirements: Atomic<[MigrationRequirement]> = Atomic(MigrationRequirement.allCases)
private var migrator: DatabaseMigrator?
private var migrationProgressUpdater: Atomic<((String, CGFloat) -> ())>?
private var migrationRequirementProcesser: Atomic<(Database?, MigrationRequirement) -> ()>?
// MARK: - Initialization
public init(
customWriter: DatabaseWriter? = nil,
customMigrationTargets: [MigratableTarget.Type]? = nil
) {
configureDatabase(customWriter: customWriter, customMigrationTargets: customMigrationTargets)
public init(customWriter: DatabaseWriter? = nil) {
configureDatabase(customWriter: customWriter)
}
private func configureDatabase(
customWriter: DatabaseWriter? = nil,
customMigrationTargets: [MigratableTarget.Type]? = nil
) {
private func configureDatabase(customWriter: DatabaseWriter? = nil) {
// Create the database directory if needed and ensure it's protection level is set before attempting to
// create the database KeySpec or the database itself
OWSFileSystem.ensureDirectoryExists(Storage.sharedDatabaseDirectoryPath)
@ -83,13 +76,6 @@ open class Storage {
dbWriter = customWriter
isValid = true
Storage.internalHasCreatedValidInstance.mutate { $0 = true }
perform(
migrationTargets: (customMigrationTargets ?? []),
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { _, _ in }
)
return
}
@ -148,6 +134,8 @@ open class Storage {
// MARK: - Migrations
public typealias KeyedMigration = (key: String, identifier: TargetMigrations.Identifier, migration: Migration.Type)
public static func appliedMigrationIdentifiers(_ db: Database) -> Set<String> {
let migrator: DatabaseMigrator = DatabaseMigrator()
@ -155,12 +143,50 @@ open class Storage {
.defaulting(to: [])
}
public static func sortedMigrationInfo(migrationTargets: [MigratableTarget.Type]) -> [KeyedMigration] {
typealias MigrationInfo = (identifier: TargetMigrations.Identifier, migrations: TargetMigrations.MigrationSet)
return migrationTargets
.map { target -> TargetMigrations in target.migrations() }
.sorted()
.reduce(into: [[MigrationInfo]]()) { result, next in
next.migrations.enumerated().forEach { index, migrationSet in
if result.count <= index {
result.append([])
}
result[index] = (result[index] + [(next.identifier, migrationSet)])
}
}
.reduce(into: []) { result, next in
next.forEach { identifier, migrations in
result.append(contentsOf: migrations.map { (identifier.key(with: $0), identifier, $0) })
}
}
}
public func perform(
migrationTargets: [MigratableTarget.Type],
async: Bool = true,
onProgressUpdate: ((CGFloat, TimeInterval) -> ())?,
onMigrationRequirement: @escaping (Database?, MigrationRequirement) -> (),
onComplete: @escaping (Swift.Result<Void, Error>, Bool) -> ()
) {
perform(
sortedMigrations: Storage.sortedMigrationInfo(migrationTargets: migrationTargets),
async: async,
onProgressUpdate: onProgressUpdate,
onMigrationRequirement: onMigrationRequirement,
onComplete: onComplete
)
}
internal func perform(
sortedMigrations: [KeyedMigration],
async: Bool,
onProgressUpdate: ((CGFloat, TimeInterval) -> ())?,
onMigrationRequirement: @escaping (Database?, MigrationRequirement) -> (),
onComplete: @escaping (Swift.Result<Void, Error>, Bool) -> ()
) {
guard isValid, let dbWriter: DatabaseWriter = dbWriter else {
let error: Error = (startupError ?? StorageError.startupFailed)
@ -169,68 +195,34 @@ open class Storage {
return
}
typealias MigrationInfo = (identifier: TargetMigrations.Identifier, migrations: TargetMigrations.MigrationSet)
let maybeSortedMigrationInfo: [MigrationInfo]? = try? dbWriter
.read { db -> [MigrationInfo] in
migrationTargets
.map { target -> TargetMigrations in target.migrations(db) }
.sorted()
.reduce(into: [[MigrationInfo]]()) { result, next in
next.migrations.enumerated().forEach { index, migrationSet in
if result.count <= index {
result.append([])
}
result[index] = (result[index] + [(next.identifier, migrationSet)])
}
}
.reduce(into: []) { result, next in result.append(contentsOf: next) }
}
guard let sortedMigrationInfo: [MigrationInfo] = maybeSortedMigrationInfo else {
SNLog("[Database Error] Statup failed with error: Unable to prepare migrations")
onComplete(.failure(StorageError.startupFailed), false)
return
}
// Setup and run any required migrations
migrator = { [weak self] in
var migrator: DatabaseMigrator = DatabaseMigrator()
sortedMigrationInfo.forEach { migrationInfo in
migrationInfo.migrations.forEach { migration in
migrator.registerMigration(self, targetIdentifier: migrationInfo.identifier, migration: migration)
}
}
return migrator
}()
var migrator: DatabaseMigrator = DatabaseMigrator()
sortedMigrations.forEach { _, identifier, migration in
migrator.registerMigration(self, targetIdentifier: identifier, migration: migration)
}
// Determine which migrations need to be performed and gather the relevant settings needed to
// inform the app of progress/states
let completedMigrations: [String] = (try? dbWriter.read { db in try migrator?.completedMigrations(db) })
let completedMigrations: [String] = (try? dbWriter.read { db in try migrator.completedMigrations(db) })
.defaulting(to: [])
let unperformedMigrations: [(key: String, migration: Migration.Type)] = sortedMigrationInfo
let unperformedMigrations: [KeyedMigration] = sortedMigrations
.reduce(into: []) { result, next in
next.migrations.forEach { migration in
let key: String = next.identifier.key(with: migration)
guard !completedMigrations.contains(key) else { return }
result.append((key, migration))
}
guard !completedMigrations.contains(next.key) else { return }
result.append(next)
}
let migrationToDurationMap: [String: TimeInterval] = unperformedMigrations
.reduce(into: [:]) { result, next in
result[next.key] = next.migration.minExpectedRunDuration
}
let unperformedMigrationDurations: [TimeInterval] = unperformedMigrations
.map { _, migration in migration.minExpectedRunDuration }
.map { _, _, migration in migration.minExpectedRunDuration }
let totalMinExpectedDuration: TimeInterval = migrationToDurationMap.values.reduce(0, +)
let needsConfigSync: Bool = unperformedMigrations
.contains(where: { _, migration in migration.needsConfigSync })
.contains(where: { _, _, migration in migration.needsConfigSync })
self.migrationProgressUpdater = Atomic({ targetKey, progress in
guard let migrationIndex: Int = unperformedMigrations.firstIndex(where: { key, _ in key == targetKey }) else {
guard let migrationIndex: Int = unperformedMigrations.firstIndex(where: { key, _, _ in key == targetKey }) else {
return
}
@ -250,14 +242,22 @@ open class Storage {
let migrationCompleted: (Swift.Result<Void, Error>) -> () = { [weak self] result in
// Process any unprocessed requirements which need to be processed before completion
// then clear out the state
self?.unprocessedMigrationRequirements.wrappedValue
.filter { $0.shouldProcessAtCompletionIfNotRequired }
.forEach { self?.migrationRequirementProcesser?.wrappedValue(nil, $0) }
let requirementProcessor: ((Database?, MigrationRequirement) -> ())? = self?.migrationRequirementProcesser?.wrappedValue
let remainingMigrationRequirements: [MigrationRequirement] = (self?.unprocessedMigrationRequirements.wrappedValue
.filter { $0.shouldProcessAtCompletionIfNotRequired })
.defaulting(to: [])
self?.migrationsCompleted.mutate { $0 = true }
self?.migrationProgressUpdater = nil
self?.migrationRequirementProcesser = nil
SUKLegacy.clearLegacyDatabaseInstance()
// Process any remaining migration requirements
if !remainingMigrationRequirements.isEmpty {
self?.write { db in
remainingMigrationRequirements.forEach { requirementProcessor?(db, $0) }
}
}
// Reset in case there is a requirement on a migration which runs when returning from
// the background
self?.unprocessedMigrationRequirements.mutate { $0 = MigrationRequirement.allCases }
@ -289,13 +289,9 @@ open class Storage {
}
// Note: The non-async migration should only be used for unit tests
guard async else {
do { try self.migrator?.migrate(dbWriter) }
catch { migrationCompleted(Swift.Result<Void, Error>.failure(error)) }
return
}
guard async else { return migrationCompleted(Result(try migrator.migrate(dbWriter))) }
self.migrator?.asyncMigrate(dbWriter) { result in
migrator.asyncMigrate(dbWriter) { result in
let finalResult: Swift.Result<Void, Error> = {
switch result {
case .failure(let error): return .failure(error)

View file

@ -10,6 +10,14 @@ public protocol Migration {
static var minExpectedRunDuration: TimeInterval { get }
static var requirements: [MigrationRequirement] { get }
/// This includes any tables which are fetched from as part of the migration so that we can test they can still be parsed
/// correctly within migration tests
static var fetchedTables: [(TableRecord & FetchableRecord).Type] { get }
/// This includes any tables which are created or altered as part of the migration so that we can test they can still be parsed
/// correctly within migration tests
static var createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] { get }
static func migrate(_ db: Database) throws
}

View file

@ -4,7 +4,7 @@ import Foundation
import GRDB
public protocol MigratableTarget {
static func migrations(_ db: Database) -> TargetMigrations
static func migrations() -> TargetMigrations
}
public struct TargetMigrations: Comparable {

View file

@ -0,0 +1,26 @@
// Copyright © 2023 Rangeproof Pty Ltd. All rights reserved.
import Foundation
public extension Result where Failure == Error {
init(_ closure: @autoclosure () throws -> Success) {
do { self = Result.success(try closure()) }
catch { self = Result.failure(error) }
}
func onFailure(closure: (Failure) -> ()) -> Result<Success, Failure> {
switch self {
case .success: break
case .failure(let failure): closure(failure)
}
return self
}
func successOrThrow() throws -> Success {
switch self {
case .success(let value): return value
case .failure(let error): throw error
}
}
}

View file

@ -14,7 +14,7 @@ class IdentitySpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self
]
)

View file

@ -15,7 +15,7 @@ class PersistableRecordUtilitiesSpec: QuickSpec {
@TestState var customWriter: DatabaseQueue! = try! DatabaseQueue()
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: customWriter,
customMigrationTargets: [
migrationTargets: [
TestTarget.self
]
)
@ -669,6 +669,8 @@ fileprivate enum TestInsertTestTypeMigration: Migration {
static let identifier: String = "TestInsertTestType"
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(FetchableRecord & TableRecord).Type] = [TestType.self, MutableTestType.self]
static func migrate(_ db: Database) throws {
try db.create(table: TestType.self) { t in
@ -687,6 +689,8 @@ fileprivate enum TestAddColumnMigration: Migration {
static let identifier: String = "TestAddColumn"
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(FetchableRecord & TableRecord).Type] = [TestType.self, MutableTestType.self]
static func migrate(_ db: Database) throws {
try db.alter(table: TestType.self) { t in
@ -700,7 +704,7 @@ fileprivate enum TestAddColumnMigration: Migration {
}
fileprivate struct TestTarget: MigratableTarget {
static func migrations(_ db: Database) -> TargetMigrations {
static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .test,
migrations: (0..<100)

View file

@ -38,7 +38,7 @@ class JobRunnerSpec: QuickSpec {
)
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self
],
initialData: { db in

View file

@ -2,9 +2,8 @@
import Foundation
import GRDB
import SessionUtilitiesKit
@testable import SessionMessagingKit
@testable import SessionUtilitiesKit
class MockJobRunner: Mock<JobRunnerType>, JobRunnerType {
// MARK: - Configuration

View file

@ -8,10 +8,33 @@ import GRDB
class SynchronousStorage: Storage {
public init(
customWriter: DatabaseWriter? = nil,
customMigrationTargets: [MigratableTarget.Type]? = nil,
migrationTargets: [MigratableTarget.Type]? = nil,
migrations: [Storage.KeyedMigration]? = nil,
initialData: ((Database) throws -> ())? = nil
) {
super.init(customWriter: customWriter, customMigrationTargets: customMigrationTargets)
super.init(customWriter: customWriter)
// Process any migration targets first
if let migrationTargets: [MigratableTarget.Type] = migrationTargets {
perform(
migrationTargets: migrationTargets,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { _, _ in }
)
}
// Then process any provided migration info
if let migrations: [Storage.KeyedMigration] = migrations {
perform(
sortedMigrations: migrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { _, _ in }
)
}
write { db in try initialData?(db) }
}