Merge remote-tracking branch 'upstream/dev' into disappearing-message-redesign

# Conflicts:
#	_SharedTestUtilities/MockJobRunner.swift
This commit is contained in:
Morgan Pretty 2023-10-03 15:00:35 +11:00
commit 90cd3fb5e0
65 changed files with 774 additions and 212 deletions

View File

@ -38,12 +38,14 @@ extension ProjectState {
.contains("print("),
.contains("NSLog("),
.contains("SNLog("),
.contains("SNLogNotTests("),
.contains("owsFailDebug("),
.contains("#imageLiteral(resourceName:"),
.contains("UIImage(named:"),
.contains("UIImage(systemName:"),
.contains("[UIImage imageNamed:"),
.contains("UIFont(name:"),
.contains(".dateFormat ="),
.contains(".accessibilityLabel ="),
.contains(".accessibilityValue ="),
.contains(".accessibilityIdentifier ="),
@ -51,6 +53,10 @@ extension ProjectState {
.contains("accessibilityLabel:"),
.contains("Accessibility(identifier:"),
.contains("Accessibility(label:"),
.contains("NSAttributedString.Key("),
.contains("Notification.Name("),
.contains("Notification.Key("),
.contains("DispatchQueue("),
.containsAnd("identifier:", .previousLine(numEarlier: 1, .contains("Accessibility("))),
.containsAnd("label:", .previousLine(numEarlier: 1, .contains("Accessibility("))),
.containsAnd("label:", .previousLine(numEarlier: 2, .contains("Accessibility("))),
@ -58,7 +64,8 @@ extension ProjectState {
.regex(".*static var databaseTableName: String"),
.regex("Logger\\..*\\("),
.regex("OWSLogger\\..*\\("),
.regex("case .* = ")
.regex("case .* = "),
.regex("Error.*\\(")
]
}

View File

@ -515,6 +515,8 @@
FD17D7E527F6A09900122BE0 /* Identity.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E427F6A09900122BE0 /* Identity.swift */; };
FD17D7E727F6A16700122BE0 /* _003_YDBToGRDBMigration.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E627F6A16700122BE0 /* _003_YDBToGRDBMigration.swift */; };
FD17D7EA27F6A1C600122BE0 /* SUKLegacy.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E927F6A1C600122BE0 /* SUKLegacy.swift */; };
FD19363F2ACA66DE004BCF0F /* DatabaseSpec.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD19363E2ACA66DE004BCF0F /* DatabaseSpec.swift */; };
FD1936412ACA7BD8004BCF0F /* Result+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1936402ACA7BD8004BCF0F /* Result+Utilities.swift */; };
FD1A94FB2900D1C2000D73D3 /* PersistableRecord+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1A94FA2900D1C2000D73D3 /* PersistableRecord+Utilities.swift */; };
FD1A94FE2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1A94FD2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift */; };
FD1C98E4282E3C5B00B76F9E /* UINavigationBar+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1C98E3282E3C5B00B76F9E /* UINavigationBar+Utilities.swift */; };
@ -1675,6 +1677,8 @@
FD17D7E427F6A09900122BE0 /* Identity.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Identity.swift; sourceTree = "<group>"; };
FD17D7E627F6A16700122BE0 /* _003_YDBToGRDBMigration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = _003_YDBToGRDBMigration.swift; sourceTree = "<group>"; };
FD17D7E927F6A1C600122BE0 /* SUKLegacy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SUKLegacy.swift; sourceTree = "<group>"; };
FD19363E2ACA66DE004BCF0F /* DatabaseSpec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DatabaseSpec.swift; sourceTree = "<group>"; };
FD1936402ACA7BD8004BCF0F /* Result+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Result+Utilities.swift"; sourceTree = "<group>"; };
FD1A94FA2900D1C2000D73D3 /* PersistableRecord+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "PersistableRecord+Utilities.swift"; sourceTree = "<group>"; };
FD1A94FD2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PersistableRecordUtilitiesSpec.swift; sourceTree = "<group>"; };
FD1C98E3282E3C5B00B76F9E /* UINavigationBar+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UINavigationBar+Utilities.swift"; sourceTree = "<group>"; };
@ -3612,6 +3616,7 @@
FDF222082818D2B0000A4995 /* NSAttributedString+Utilities.swift */,
FD8ECF912938552800C0D1BB /* Threading.swift */,
FD8ECF93293856AF00C0D1BB /* Randomness.swift */,
FD1936402ACA7BD8004BCF0F /* Result+Utilities.swift */,
C33FDB38255A580B00E217F9 /* OWSBackgroundTask.h */,
C33FDC1B255A581F00E217F9 /* OWSBackgroundTask.m */,
FD29598C2A43BC0B00888A17 /* Version.swift */,
@ -3789,6 +3794,14 @@
path = LegacyDatabase;
sourceTree = "<group>";
};
FD19363D2ACA66CF004BCF0F /* Database */ = {
isa = PBXGroup;
children = (
FD19363E2ACA66DE004BCF0F /* DatabaseSpec.swift */,
);
path = Database;
sourceTree = "<group>";
};
FD1A94FC2900D2DB000D73D3 /* Utilities */ = {
isa = PBXGroup;
children = (
@ -3968,6 +3981,7 @@
isa = PBXGroup;
children = (
FD71161228D00D5300B47552 /* Conversations */,
FD19363D2ACA66CF004BCF0F /* Database */,
FD71161828D00E0100B47552 /* Settings */,
);
path = SessionTests;
@ -5851,6 +5865,7 @@
FD97B2402A3FEB050027DD57 /* ARC4RandomNumberGenerator.swift in Sources */,
FD37EA1128AB34B3003AE748 /* TypedTableAlteration.swift in Sources */,
FD30036E2A3AE26000B5A5FB /* CExceptionHelper.mm in Sources */,
FD1936412ACA7BD8004BCF0F /* Result+Utilities.swift in Sources */,
C3D9E4DA256778410040E4F3 /* UIImage+OWS.m in Sources */,
C32C600F256E07F5003C73A2 /* NSUserDefaults+OWS.m in Sources */,
FDE658A329418E2F00A33BC1 /* KeyPair.swift in Sources */,
@ -6362,6 +6377,7 @@
FD71161728D00DA400B47552 /* ThreadSettingsViewModelSpec.swift in Sources */,
FD2AAAF028ED57B500A49611 /* SynchronousStorage.swift in Sources */,
FD23CE292A6775650000B97C /* MockCrypto.swift in Sources */,
FD19363F2ACA66DE004BCF0F /* DatabaseSpec.swift in Sources */,
FD23CE332A67C4D90000B97C /* MockNetwork.swift in Sources */,
FD71161528D00D6700B47552 /* ThreadDisappearingMessagesViewModelSpec.swift in Sources */,
FD23CE2D2A678E1E0000B97C /* MockCaches.swift in Sources */,
@ -6648,7 +6664,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 426;
CURRENT_PROJECT_VERSION = 427;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
@ -6672,7 +6688,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.4.2;
MARKETING_VERSION = 2.4.3;
MTL_ENABLE_DEBUG_INFO = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.ShareExtension";
PRODUCT_NAME = "$(TARGET_NAME)";
@ -6720,7 +6736,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 426;
CURRENT_PROJECT_VERSION = 427;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
ENABLE_NS_ASSERTIONS = NO;
@ -6749,7 +6765,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.4.2;
MARKETING_VERSION = 2.4.3;
MTL_ENABLE_DEBUG_INFO = NO;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.ShareExtension";
PRODUCT_NAME = "$(TARGET_NAME)";
@ -6785,7 +6801,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 426;
CURRENT_PROJECT_VERSION = 427;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
@ -6808,7 +6824,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.4.2;
MARKETING_VERSION = 2.4.3;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.NotificationServiceExtension";
@ -6859,7 +6875,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 426;
CURRENT_PROJECT_VERSION = 427;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
ENABLE_NS_ASSERTIONS = NO;
@ -6887,7 +6903,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.4.2;
MARKETING_VERSION = 2.4.3;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.NotificationServiceExtension";
@ -7819,7 +7835,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 426;
CURRENT_PROJECT_VERSION = 427;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -7857,7 +7873,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 2.4.2;
MARKETING_VERSION = 2.4.3;
OTHER_LDFLAGS = "$(inherited)";
OTHER_SWIFT_FLAGS = "$(inherited) \"-D\" \"COCOAPODS\" \"-DDEBUG\"";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
@ -7890,7 +7906,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 426;
CURRENT_PROJECT_VERSION = 427;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -7928,7 +7944,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 2.4.2;
MARKETING_VERSION = 2.4.3;
OTHER_LDFLAGS = "$(inherited)";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
PRODUCT_NAME = Session;

View File

@ -114,7 +114,7 @@ final class IncomingCallBanner: UIView, UIGestureRecognizerDelegate {
publicKey: call.sessionId,
threadVariant: .contact,
customImageData: nil,
profile: Storage.shared.read { db in Profile.fetchOrCreate(db, id: call.sessionId) },
profile: Storage.shared.read { [sessionId = call.sessionId] db in Profile.fetchOrCreate(db, id: sessionId) },
additionalProfile: nil
)
displayNameLabel.text = call.contactName

View File

@ -324,7 +324,7 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
// the user is in an invalid state (and should have already been shown a modal)
guard success else { return }
SNLog("RootViewController ready, readying remaining processes")
SNLog("RootViewController ready for state: \(Onboarding.State.current), readying remaining processes")
self?.initialLaunchFailed = false
/// Trigger any launch-specific jobs and start the JobRunner with `JobRunner.appDidFinishLaunching()` some

View File

@ -130,10 +130,6 @@ final class LinkDeviceVC: BaseVC, UIPageViewControllerDataSource, UIPageViewCont
// MARK: - Interaction
@objc private func close() {
dismiss(animated: true, completion: nil)
}
func controller(_ controller: QRCodeScanningViewController, didDetectQRCodeWith string: String, onError: (() -> ())?) {
let seed = Data(hex: string)
continueWithSeed(seed, onError: onError)

View File

@ -121,7 +121,7 @@ enum Onboarding {
.eraseToAnyPublisher()
}
enum State {
enum State: CustomStringConvertible {
case newUser
case missingName
case completed
@ -138,6 +138,14 @@ enum Onboarding {
// Otherwise we have enough for a full user and can start the app
return .completed
}
var description: String {
switch self {
case .newUser: return "New User" // stringlint:disable
case .missingName: return "Missing Name" // stringlint:disable
case .completed: return "Completed" // stringlint:disable
}
}
}
enum Flow {
@ -145,7 +153,7 @@ enum Onboarding {
/// If the user returns to an earlier screen during Onboarding we might need to clear out a partially created
/// account (eg. returning from the PN setting screen to the seed entry screen when linking a device)
func unregister() {
func unregister(using dependencies: Dependencies = Dependencies()) {
// Clear the in-memory state from SessionUtil
SessionUtil.clearMemoryState()
@ -165,6 +173,9 @@ enum Onboarding {
profileNameRetrievalIdentifier.mutate { $0 = nil }
profileNameRetrievalPublisher.mutate { $0 = nil }
// Clear the cached 'encodedPublicKey' if needed
dependencies.caches.mutate(cache: .general) { $0.encodedPublicKey = nil }
UserDefaults.standard[.hasSyncedInitialConfiguration] = false
}

View File

@ -8,6 +8,8 @@ import SignalUtilitiesKit
final class SeedVC: BaseVC {
public static func mnemonic() throws -> String {
let dbIsValid: Bool = Storage.shared.isValid
let dbHasRead: Bool = Storage.shared.hasSuccessfullyRead
let dbHasWritten: Bool = Storage.shared.hasSuccessfullyWritten
let dbIsSuspendedUnsafe: Bool = Storage.shared.isSuspendedUnsafe
if let hexEncodedSeed: String = Identity.fetchHexEncodedSeed() {
@ -19,6 +21,8 @@ final class SeedVC: BaseVC {
let hasStoredEdKeyPair: Bool = (Identity.fetchUserEd25519KeyPair() != nil)
let dbStates: [String] = [
"dbIsValid: \(dbIsValid)", // stringlint:disable
"dbHasRead: \(dbHasRead)", // stringlint:disable
"dbHasWritten: \(dbHasWritten)", // stringlint:disable
"dbIsSuspendedUnsafe: \(dbIsSuspendedUnsafe)", // stringlint:disable
"storedSeed: false", // stringlint:disable
"userPublicKey: \(hasStoredPublicKey)", // stringlint:disable

View File

@ -8,7 +8,7 @@ import SessionUIKit
import SignalUtilitiesKit
import SessionUtilitiesKit
class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsViewModel.Section, Profile> {
class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsViewModel.Section, BlockedContactsViewModel.DataModel> {
// MARK: - Section
public enum Section: SessionTableSection {
@ -39,10 +39,14 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
// doesn't stutter (it should load basically immediately but without this there is a
// distinct stutter)
_pagedDataObserver = PagedDatabaseObserver(
pagedTable: Profile.self,
pagedTable: Contact.self,
pageSize: BlockedContactsViewModel.pageSize,
idColumn: .id,
observedChanges: [
PagedData.ObservedChanges(
table: Contact.self,
columns: [.id, .isBlocked]
),
PagedData.ObservedChanges(
table: Profile.self,
columns: [
@ -50,16 +54,12 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
.name,
.nickname,
.profilePictureFileName
]
),
PagedData.ObservedChanges(
table: Contact.self,
columns: [.isBlocked],
],
joinToPagedType: {
let profile: TypedTableAlias<Profile> = TypedTableAlias()
let contact: TypedTableAlias<Contact> = TypedTableAlias()
let profile: TypedTableAlias<Profile> = TypedTableAlias()
return SQL("JOIN \(Contact.self) ON \(contact[.id]) = \(profile[.id])")
return SQL("JOIN \(Profile.self) ON \(profile[.id]) = \(contact[.id])")
}()
)
],
@ -101,7 +101,7 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
private let contactDataSubject: CurrentValueSubject<([SectionModel], StagedChangeset<[SectionModel]>), Never> = CurrentValueSubject(([], StagedChangeset()))
private let selectedContactIdsSubject: CurrentValueSubject<Set<String>, Never> = CurrentValueSubject([])
private var _pagedDataObserver: PagedDatabaseObserver<Profile, DataModel>?
private var _pagedDataObserver: PagedDatabaseObserver<Contact, DataModel>?
public override var pagedDataObserver: TransactionObserver? { _pagedDataObserver }
public override var observableTableData: ObservableData { _observableTableData }
@ -138,26 +138,32 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
section: .contacts,
elements: data
.sorted { lhs, rhs -> Bool in
lhs.profile.displayName() < rhs.profile.displayName()
let lhsValue: String = (lhs.profile?.displayName() ?? lhs.id)
let rhsValue: String = (rhs.profile?.displayName() ?? rhs.id)
return (lhsValue < rhsValue)
}
.map { [weak self] model -> SessionCell.Info<Profile> in
.map { [weak self] model -> SessionCell.Info<DataModel> in
SessionCell.Info(
id: model.profile,
leftAccessory: .profile(id: model.profile.id, profile: model.profile),
title: model.profile.displayName(),
id: model,
leftAccessory: .profile(id: model.id, profile: model.profile),
title: (
model.profile?.displayName() ??
Profile.truncated(id: model.id, truncating: .middle)
),
rightAccessory: .radio(
isSelected: {
self?.selectedContactIdsSubject.value.contains(model.profile.id) == true
self?.selectedContactIdsSubject.value.contains(model.id) == true
}
),
onTap: {
var updatedSelectedIds: Set<String> = (self?.selectedContactIdsSubject.value ?? [])
if !updatedSelectedIds.contains(model.profile.id) {
updatedSelectedIds.insert(model.profile.id)
if !updatedSelectedIds.contains(model.id) {
updatedSelectedIds.insert(model.id)
}
else {
updatedSelectedIds.remove(model.profile.id)
updatedSelectedIds.remove(model.id)
}
self?.selectedContactIdsSubject.send(updatedSelectedIds)
@ -182,7 +188,7 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
guard
let section: BlockedContactsViewModel.SectionModel = self.tableData
.first(where: { section in section.model == .contacts }),
let info: SessionCell.Info<Profile> = section.elements
let info: SessionCell.Info<DataModel> = section.elements
.first(where: { info in info.id.id == contactId })
else { return contactId }
@ -262,20 +268,22 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
public typealias Columns = CodingKeys
public enum CodingKeys: String, CodingKey, ColumnExpression, CaseIterable {
case rowId
case id
case profile
}
public var differenceIdentifier: String { profile.id }
public var id: String { profile.id }
public var differenceIdentifier: String { id }
public let rowId: Int64
public let profile: Profile
public let id: String
public let profile: Profile?
static func query(
filterSQL: SQL,
orderSQL: SQL
) -> (([Int64]) -> any FetchRequest<DataModel>) {
return { rowIds -> any FetchRequest<DataModel> in
let contact: TypedTableAlias<Contact> = TypedTableAlias()
let profile: TypedTableAlias<Profile> = TypedTableAlias()
/// **Note:** The `numColumnsBeforeProfile` value **MUST** match the number of fields before
@ -283,15 +291,17 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
/// parse and might throw
///
/// Explicitly set default values for the fields ignored for search results
let numColumnsBeforeProfile: Int = 1
let numColumnsBeforeProfile: Int = 2
let request: SQLRequest<DataModel> = """
SELECT
\(profile[.rowId]) AS \(DataModel.Columns.rowId),
\(contact[.rowId]) AS \(DataModel.Columns.rowId),
\(contact[.id]),
\(profile.allColumns)
FROM \(Profile.self)
WHERE \(profile[.rowId]) IN \(rowIds)
FROM \(Contact.self)
LEFT JOIN \(Profile.self) ON \(profile[.id]) = \(contact[.id])
WHERE \(contact[.rowId]) IN \(rowIds)
ORDER BY \(orderSQL)
"""
@ -309,10 +319,10 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
}
static var optimisedJoinSQL: SQL = {
let profile: TypedTableAlias<Profile> = TypedTableAlias()
let contact: TypedTableAlias<Contact> = TypedTableAlias()
let profile: TypedTableAlias<Profile> = TypedTableAlias()
return SQL("JOIN \(Contact.self) ON \(contact[.id]) = \(profile[.id])")
return SQL("LEFT JOIN \(Profile.self) ON \(profile[.id]) = \(contact[.id])")
}()
static var filterSQL: SQL = {
@ -322,9 +332,10 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
}()
static let orderSQL: SQL = {
let contact: TypedTableAlias<Contact> = TypedTableAlias()
let profile: TypedTableAlias<Profile> = TypedTableAlias()
return SQL("IFNULL(IFNULL(\(profile[.nickname]), \(profile[.name])), \(profile[.id])) ASC")
return SQL("IFNULL(IFNULL(\(profile[.nickname]), \(profile[.name])), \(contact[.id])) ASC")
}()
}

View File

@ -77,13 +77,16 @@ class BlockedContactCell: UITableViewCell {
public func update(with cellViewModel: BlockedContactsViewModel.DataModel, isSelected: Bool) {
profilePictureView.update(
publicKey: cellViewModel.profile.id,
publicKey: cellViewModel.id,
threadVariant: .contact,
customImageData: nil,
profile: cellViewModel.profile,
additionalProfile: nil
)
selectionView.text = cellViewModel.profile.displayName()
selectionView.text = (
cellViewModel.profile?.displayName() ??
Profile.truncated(id: cellViewModel.id, truncating: .middle)
)
selectionView.update(isSelected: isSelected)
}
}

View File

@ -3,7 +3,7 @@ import GRDB
import SessionUtilitiesKit
public enum SNMessagingKit: MigratableTarget { // Just to make the external API nice
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .messagingKit,
migrations: [

View File

@ -9,6 +9,14 @@ enum _001_InitialSetupMigration: Migration {
static let identifier: String = "initialSetup" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
Contact.self, Profile.self, SessionThread.self, DisappearingMessagesConfiguration.self,
ClosedGroup.self, ClosedGroupKeyPair.self, OpenGroup.self, Capability.self, BlindedIdLookup.self,
GroupMember.self, Interaction.self, RecipientState.self, Attachment.self,
InteractionAttachment.self, Quote.self, LinkPreview.self, ControlMessageProcessRecord.self,
ThreadTypingIndicator.self
]
public static let fullTextSearchTokenizer: FTS5TokenizerDescriptor = {
// Define the tokenizer to be used in all the FTS tables

View File

@ -12,6 +12,8 @@ enum _002_SetupStandardJobs: Migration {
static let identifier: String = "SetupStandardJobs" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Start by adding the jobs that don't have collections (in the jobs like these

View File

@ -16,6 +16,8 @@ enum _003_YDBToGRDBMigration: Migration {
static let identifier: String = "YDBToGRDBMigration"
static let needsConfigSync: Bool = true
static let minExpectedRunDuration: TimeInterval = 20
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [Identity.self]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
guard let dbConnection: YapDatabaseConnection = SUKLegacy.newDatabaseConnection() else {

View File

@ -11,6 +11,8 @@ enum _004_RemoveLegacyYDB: Migration {
static let identifier: String = "RemoveLegacyYDB" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try? SUKLegacy.deleteLegacyDatabaseFilesAndKey()

View File

@ -10,6 +10,8 @@ enum _005_FixDeletedMessageReadState: Migration {
static let identifier: String = "FixDeletedMessageReadState" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
_ = try Interaction

View File

@ -11,6 +11,8 @@ enum _006_FixHiddenModAdminSupport: Migration {
static let identifier: String = "FixHiddenModAdminSupport" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [GroupMember.self]
static func migrate(_ db: Database) throws {
try db.alter(table: GroupMember.self) { t in

View File

@ -10,6 +10,8 @@ enum _007_HomeQueryOptimisationIndexes: Migration {
static let identifier: String = "HomeQueryOptimisationIndexes" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try db.create(

View File

@ -10,6 +10,8 @@ enum _008_EmojiReacts: Migration {
static let identifier: String = "EmojiReacts" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Reaction.self]
static func migrate(_ db: Database) throws {
try db.create(table: Reaction.self) { t in

View File

@ -9,6 +9,8 @@ enum _009_OpenGroupPermission: Migration {
static let identifier: String = "OpenGroupPermission" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [OpenGroup.self]
static func migrate(_ db: GRDB.Database) throws {
try db.alter(table: OpenGroup.self) { t in

View File

@ -11,6 +11,8 @@ enum _010_AddThreadIdToFTS: Migration {
static let identifier: String = "AddThreadIdToFTS" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 3
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Can't actually alter a virtual table in SQLite so we need to drop and recreate it,

View File

@ -11,6 +11,8 @@ enum _011_AddPendingReadReceipts: Migration {
static let identifier: String = "AddPendingReadReceipts" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [PendingReadReceipt.self]
static func migrate(_ db: Database) throws {
try db.create(table: PendingReadReceipt.self) { t in

View File

@ -10,6 +10,8 @@ enum _012_AddFTSIfNeeded: Migration {
static let identifier: String = "AddFTSIfNeeded" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Fix an issue that the fullTextSearchTable was dropped unintentionally and global search won't work.

View File

@ -14,6 +14,12 @@ enum _013_SessionUtilChanges: Migration {
static let identifier: String = "SessionUtilChanges"
static let needsConfigSync: Bool = true
static let minExpectedRunDuration: TimeInterval = 0.4
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [
GroupMember.self, ClosedGroupKeyPair.self, SessionThread.self
]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
SessionThread.self, Profile.self, GroupMember.self, ClosedGroupKeyPair.self, ConfigDump.self
]
static func migrate(_ db: Database) throws {
// Add `markedAsUnread` to the thread table

View File

@ -11,6 +11,11 @@ enum _014_GenerateInitialUserConfigDumps: Migration {
static let identifier: String = "GenerateInitialUserConfigDumps" // stringlint:disable
static let needsConfigSync: Bool = true
static let minExpectedRunDuration: TimeInterval = 4.0
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [
Identity.self, SessionThread.self, Contact.self, Profile.self, ClosedGroup.self,
OpenGroup.self, DisappearingMessagesConfiguration.self, GroupMember.self, ConfigDump.self
]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// If we have no ed25519 key then there is no need to create cached dump data

View File

@ -11,6 +11,10 @@ enum _015_BlockCommunityMessageRequests: Migration {
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.01
static var requirements: [MigrationRequirement] = [.sessionUtilStateLoaded]
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [
Identity.self, Setting.self
]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Profile.self]
static func migrate(_ db: Database) throws {
// Add the new 'Profile' properties

View File

@ -12,6 +12,8 @@ enum _016_MakeBrokenProfileTimestampsNullable: Migration {
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static var requirements: [MigrationRequirement] = [.sessionUtilStateLoaded]
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Profile.self]
static func migrate(_ db: Database) throws {
/// SQLite doesn't support altering columns after creation so we need to create a new table with the setup we

View File

@ -10,6 +10,12 @@ enum _017_DisappearingMessagesConfiguration: Migration {
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static var requirements: [MigrationRequirement] = [.sessionUtilStateLoaded]
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [
Identity.self, DisappearingMessagesConfiguration.self
]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
DisappearingMessagesConfiguration.self, Contact.self
]
static func migrate(_ db: GRDB.Database) throws {
try db.alter(table: DisappearingMessagesConfiguration.self) { t in

View File

@ -138,12 +138,12 @@ public extension Profile {
try container.encode(id, forKey: .id)
try container.encode(name, forKey: .name)
try container.encode(lastNameUpdate, forKey: .lastNameUpdate)
try container.encodeIfPresent(lastNameUpdate, forKey: .lastNameUpdate)
try container.encodeIfPresent(nickname, forKey: .nickname)
try container.encodeIfPresent(profilePictureUrl, forKey: .profilePictureUrl)
try container.encodeIfPresent(profilePictureFileName, forKey: .profilePictureFileName)
try container.encodeIfPresent(profileEncryptionKey, forKey: .profileEncryptionKey)
try container.encode(lastProfilePictureUpdate, forKey: .lastProfilePictureUpdate)
try container.encodeIfPresent(lastProfilePictureUpdate, forKey: .lastProfilePictureUpdate)
try container.encodeIfPresent(blocksCommunityMessageRequests, forKey: .blocksCommunityMessageRequests)
try container.encodeIfPresent(lastBlocksCommunityMessageRequests, forKey: .lastBlocksCommunityMessageRequests)
}
@ -334,9 +334,9 @@ public extension Profile {
guard id.count > 8 else { return id }
switch truncating {
case .start: return "...\(id.suffix(8))"
case .middle: return "\(id.prefix(4))...\(id.suffix(4))"
case .end: return "\(id.prefix(8))..."
case .start: return "...\(id.suffix(8))" //stringlint:disable
case .middle: return "\(id.prefix(4))...\(id.suffix(4))" //stringlint:disable
case .end: return "\(id.prefix(8))..." //stringlint:disable
}
}

View File

@ -77,7 +77,7 @@ internal extension SessionUtil {
}
}
catch {
SNLog("[libSession] Failed to update/dump updated \(variant) config data due to error: \(error)")
SNLog("[SessionUtil] Failed to update/dump updated \(variant) config data due to error: \(error)")
throw error
}
@ -368,7 +368,7 @@ internal extension SessionUtil {
loopCounter += 1
guard loopCounter < maxLoopCount else {
SNLog("[libSession] Got stuck in infinite loop processing '\(variant.configMessageKind.description)' data")
SNLog("[SessionUtil] Got stuck in infinite loop processing '\(variant.configMessageKind.description)' data")
throw SessionUtilError.processingLoopLimitReached
}
}

View File

@ -45,7 +45,7 @@ public enum SessionUtil {
// MARK: - Variables
internal static func syncDedupeId(_ publicKey: String) -> String {
return "EnqueueConfigurationSyncJob-\(publicKey)"
return "EnqueueConfigurationSyncJob-\(publicKey)" // stringlint:disable
}
/// Returns `true` if there is a config which needs to be pushed, but returns `false` if the configs are all up to date or haven't been
@ -63,7 +63,7 @@ public enum SessionUtil {
public static var libSessionVersion: String { String(cString: LIBSESSION_UTIL_VERSION_STR) }
internal static func lastError(_ conf: UnsafeMutablePointer<config_object>?) -> String {
return (conf?.pointee.last_error.map { String(cString: $0) } ?? "Unknown")
return (conf?.pointee.last_error.map { String(cString: $0) } ?? "Unknown") // stringlint:disable
}
// MARK: - Loading
@ -84,7 +84,7 @@ public enum SessionUtil {
guard
let secretKey: [UInt8] = ed25519SecretKey,
SessionUtil.configStore.wrappedValue.isEmpty
else { return }
else { return SNLog("[SessionUtil] Ignoring loadState for '\(userPublicKey)' due to existing state") }
// If we weren't given a database instance then get one
guard let db: Database = db else {
@ -125,6 +125,8 @@ public enum SessionUtil {
)
}
}
SNLog("[SessionUtil] Completed loadState for '\(userPublicKey)'")
}
private static func loadState(
@ -134,7 +136,7 @@ public enum SessionUtil {
) throws -> UnsafeMutablePointer<config_object>? {
// Setup initial variables (including getting the memory address for any cached data)
var conf: UnsafeMutablePointer<config_object>? = nil
let error: UnsafeMutablePointer<CChar>? = nil
var error: [CChar] = [CChar](repeating: 0, count: 256)
let cachedDump: (data: UnsafePointer<UInt8>, length: Int)? = cachedData?.withUnsafeBytes { unsafeBytes in
return unsafeBytes.baseAddress.map {
(
@ -144,33 +146,26 @@ public enum SessionUtil {
}
}
// No need to deallocate the `cachedDump.data` as it'll automatically be cleaned up by
// the `cachedDump` lifecycle, but need to deallocate the `error` if it gets set
defer {
error?.deallocate()
}
// Try to create the object
var secretKey: [UInt8] = ed25519SecretKey
let result: Int32 = {
switch variant {
case .userProfile:
return user_profile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return user_profile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
case .contacts:
return contacts_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return contacts_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
case .convoInfoVolatile:
return convo_info_volatile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return convo_info_volatile_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
case .userGroups:
return user_groups_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), error)
return user_groups_init(&conf, &secretKey, cachedDump?.data, (cachedDump?.length ?? 0), &error)
}
}()
guard result == 0 else {
let errorString: String = (error.map { String(cString: $0) } ?? "unknown error")
SNLog("[SessionUtil Error] Unable to create \(variant.rawValue) config object: \(errorString)")
SNLog("[SessionUtil Error] Unable to create \(variant.rawValue) config object: \(String(cString: error))")
throw SessionUtilError.unableToCreateConfigObject
}
@ -241,7 +236,7 @@ public enum SessionUtil {
var cPushData: UnsafeMutablePointer<config_push_data>!
let configCountInfo: String = {
var result: String = "Invalid"
var result: String = "Invalid" // stringlint:disable
try? CExceptionHelper.performSafely {
switch variant {
@ -261,7 +256,7 @@ public enum SessionUtil {
}
}
catch {
SNLog("[libSession] Failed to generate push data for \(variant) config data, size: \(configCountInfo), error: \(error)")
SNLog("[SessionUtil] Failed to generate push data for \(variant) config data, size: \(configCountInfo), error: \(error)")
throw error
}
@ -418,7 +413,7 @@ public enum SessionUtil {
}
}
catch {
SNLog("[libSession] Failed to process merge of \(next.key) config data")
SNLog("[SessionUtil] Failed to process merge of \(next.key) config data")
throw error
}

View File

@ -25,7 +25,7 @@ class MessageSendJobSpec: QuickSpec {
@TestState var interactionAttachment: InteractionAttachment!
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View File

@ -18,7 +18,7 @@ class OpenGroupAPISpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View File

@ -98,7 +98,7 @@ class OpenGroupManagerSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View File

@ -16,7 +16,7 @@ class MessageReceiverDecryptionSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View File

@ -16,7 +16,7 @@ class MessageReceiverSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,

View File

@ -16,7 +16,7 @@ class MessageSenderEncryptionSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNMessagingKit.self
],

View File

@ -5,7 +5,7 @@ import GRDB
import SessionUtilitiesKit
public enum SNSnodeKit: MigratableTarget { // Just to make the external API nice
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .snodeKit,
migrations: [

View File

@ -9,6 +9,10 @@ enum _001_InitialSetupMigration: Migration {
static let identifier: String = "initialSetup" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
Snode.self, SnodeSet.self, SnodeReceivedMessageInfo.self
]
static func migrate(_ db: Database) throws {
try db.create(table: Snode.self) { t in

View File

@ -11,6 +11,8 @@ enum _002_SetupStandardJobs: Migration {
static let identifier: String = "SetupStandardJobs" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try autoreleasepool {

View File

@ -11,6 +11,8 @@ enum _003_YDBToGRDBMigration: Migration {
static let target: TargetMigrations.Identifier = .snodeKit
static let identifier: String = "YDBToGRDBMigration"
static let needsConfigSync: Bool = false
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
/// This migration can take a while if it's a very large database or there are lots of closed groups (want this to account
/// for about 10% of the progress bar so we intentionally have a higher `minExpectedRunDuration` so show more

View File

@ -9,6 +9,8 @@ enum _004_FlagMessageHashAsDeletedOrInvalid: Migration {
static let target: TargetMigrations.Identifier = .snodeKit
static let identifier: String = "FlagMessageHashAsDeletedOrInvalid" // stringlint:disable
static let needsConfigSync: Bool = false
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [SnodeReceivedMessageInfo.self]
/// This migration adds a flat to the `SnodeReceivedMessageInfo` so that when deleting interactions we can
/// ignore their hashes when subsequently trying to fetch new messages (which results in the storage server returning

View File

@ -16,7 +16,7 @@ class ThreadDisappearingMessagesSettingsViewModelSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,

View File

@ -16,7 +16,7 @@ class ThreadSettingsViewModelSpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,

View File

@ -0,0 +1,341 @@
// Copyright © 2023 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import GRDB
import Quick
import Nimble
import SessionUIKit
import SessionSnodeKit
import SessionMessagingKit
@testable import Session
@testable import SessionUtilitiesKit
class DatabaseSpec: QuickSpec {
fileprivate static let ignoredTables: Set<String> = [
"sqlite_sequence", "grdb_migrations", "*_fts*"
]
override class func spec() {
// MARK: Configuration
@TestState var dependencies: Dependencies! = Dependencies()
@TestState var mockStorage: Storage! = SynchronousStorage(customWriter: try! DatabaseQueue())
@TestState var initialResult: Result<Void, Error>! = nil
@TestState var finalResult: Result<Void, Error>! = nil
let allMigrations: [Storage.KeyedMigration] = SynchronousStorage.sortedMigrationInfo(
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,
SNUIKit.self
]
)
let dynamicTests: [MigrationTest] = MigrationTest.extractTests(allMigrations)
let allDatabaseTypes: [(TableRecord & FetchableRecord).Type] = MigrationTest.extractDatabaseTypes(allMigrations)
MigrationTest.explicitValues = [
// Specific enum values needed
TableColumn(SessionThread.self, .notificationSound): 1000,
TableColumn(ConfigDump.self, .variant): "userProfile",
// libSession will throw if we try to insert a community with an invalid
// 'server' value or a room that is too long
TableColumn(OpenGroup.self, .server): "https://www.oxen.io",
TableColumn(OpenGroup.self, .roomToken): "testRoom",
// libSession will fail to load state if the ConfigDump data is invalid
TableColumn(ConfigDump.self, .data): Data()
]
// MARK: - a Database
describe("a Database") {
beforeEach {
// FIXME: These should be mocked out instead of set this way
dependencies.caches.mutate(cache: .general) { $0.encodedPublicKey = "05\(TestConstants.publicKey)" }
SessionUtil.clearMemoryState()
}
// MARK: -- can be created from an empty state
it("can be created from an empty state") {
mockStorage.perform(
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,
SNUIKit.self
],
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
}
// MARK: -- can still parse the database types
it("can still parse the database types") {
mockStorage.perform(
sortedMigrations: allMigrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
// Generate dummy data (fetching below won't do anything)
expect(try MigrationTest.generateDummyData(mockStorage, nullsWherePossible: false)).toNot(throwError())
// Fetch the records which are required by the migrations or were modified by them to
// ensure the decoding is also still working correctly
mockStorage.read { db in
allDatabaseTypes.forEach { table in
expect { try table.fetchAll(db) }.toNot(throwError())
}
}
}
// MARK: -- can still parse the database types setting null where possible
it("can still parse the database types setting null where possible") {
mockStorage.perform(
sortedMigrations: allMigrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
// Generate dummy data (fetching below won't do anything)
expect(try MigrationTest.generateDummyData(mockStorage, nullsWherePossible: true)).toNot(throwError())
// Fetch the records which are required by the migrations or were modified by them to
// ensure the decoding is also still working correctly
mockStorage.read { db in
allDatabaseTypes.forEach { table in
expect { try table.fetchAll(db) }.toNot(throwError())
}
}
}
// MARK: -- can migrate from X to Y
dynamicTests.forEach { test in
it("can migrate from \(test.initialMigrationKey) to \(test.finalMigrationKey)") {
mockStorage.perform(
sortedMigrations: test.initialMigrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in initialResult = result }
)
expect(initialResult).to(beSuccess())
// Generate dummy data (otherwise structural issues or invalid foreign keys won't error)
expect(try MigrationTest.generateDummyData(mockStorage, nullsWherePossible: false)).toNot(throwError())
// Peform the target migrations to ensure the migrations themselves worked correctly
mockStorage.perform(
sortedMigrations: test.migrationsToTest,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { result, _ in finalResult = result }
)
expect(finalResult).to(beSuccess())
/// Ensure all of the `fetchedTables` records can still be decoded correctly after the migrations have completed (since
/// we perform multiple migrations above it's possible these won't work after the `initialMigrations` but actually will
/// work when required as an intermediate migration could have satisfied the data requirements)
mockStorage.read { db in
test.migrationsToTest.forEach { _, _, migration in
migration.fetchedTables.forEach { table in
expect { try table.fetchAll(db) }.toNot(throwError())
}
}
}
}
}
}
}
}
// MARK: - Convenience
private extension Database.ColumnType {
init(rawValue: Any) {
switch rawValue as? String {
case .some(let value): self = Database.ColumnType(rawValue: value)
case .none: self = Database.ColumnType.any
}
}
}
private struct TableColumn: Hashable {
let tableName: String
let columnName: String
init<T: TableRecord & ColumnExpressible>(_ type: T.Type, _ column: T.Columns) {
self.tableName = T.databaseTableName
self.columnName = column.name
}
init?(_ tableName: String, _ columnName: Any?) {
guard let finalColumnName: String = columnName as? String else { return nil }
self.tableName = tableName
self.columnName = finalColumnName
}
}
private class MigrationTest {
static var explicitValues: [TableColumn: (any DatabaseValueConvertible)] = [:]
let initialMigrations: [Storage.KeyedMigration]
let migrationsToTest: [Storage.KeyedMigration]
var initialMigrationKey: String { return (initialMigrations.last?.key ?? "an empty database") }
var finalMigrationKey: String { return (migrationsToTest.last?.key ?? "invalid") }
private init(
initialMigrations: [Storage.KeyedMigration],
migrationsToTest: [Storage.KeyedMigration]
) {
self.initialMigrations = initialMigrations
self.migrationsToTest = migrationsToTest
}
// MARK: - Test Data
static func extractTests(_ allMigrations: [Storage.KeyedMigration]) -> [MigrationTest] {
return (0..<(allMigrations.count - 1))
.flatMap { index -> [MigrationTest] in
((index + 1)..<allMigrations.count).map { targetMigrationIndex -> MigrationTest in
MigrationTest(
initialMigrations: Array(allMigrations[0..<index]),
migrationsToTest: Array(allMigrations[index..<targetMigrationIndex])
)
}
}
}
static func extractDatabaseTypes(_ allMigrations: [Storage.KeyedMigration]) -> [(TableRecord & FetchableRecord).Type] {
return allMigrations
.reduce(into: [:]) { result, next in
next.migration.fetchedTables.forEach { table in
result[ObjectIdentifier(table).hashValue] = table
}
next.migration.createdOrAlteredTables.forEach { table in
result[ObjectIdentifier(table).hashValue] = table
}
}
.values
.asArray()
}
// MARK: - Mock Data
static func generateDummyData(_ storage: Storage, nullsWherePossible: Bool) throws {
var generationError: Error? = nil
// The `PRAGMA foreign_keys` is a no-op within a transaction so we have to do it outside of one
try storage.testDbWriter?.writeWithoutTransaction { db in try db.execute(sql: "PRAGMA foreign_keys = OFF") }
storage.write { db in
do {
try MigrationTest.generateDummyData(db, nullsWherePossible: nullsWherePossible)
try db.checkForeignKeys()
}
catch { generationError = error }
}
try storage.testDbWriter?.writeWithoutTransaction { db in try db.execute(sql: "PRAGMA foreign_keys = ON") }
// Throw the error if there was one
if let error: Error = generationError { throw error }
}
private static func generateDummyData(_ db: Database, nullsWherePossible: Bool) throws {
// Fetch table schema information
let disallowedPrefixes: Set<String> = DatabaseSpec.ignoredTables
.filter { $0.hasPrefix("*") && !$0.hasSuffix("*") }
.map { String($0[$0.index(after: $0.startIndex)...]) }
.asSet()
let disallowedSuffixes: Set<String> = DatabaseSpec.ignoredTables
.filter { $0.hasSuffix("*") && !$0.hasPrefix("*") }
.map { String($0[$0.startIndex..<$0.index(before: $0.endIndex)]) }
.asSet()
let disallowedContains: Set<String> = DatabaseSpec.ignoredTables
.filter { $0.hasPrefix("*") && $0.hasSuffix("*") }
.map { String($0[$0.index(after: $0.startIndex)..<$0.index(before: $0.endIndex)]) }
.asSet()
let tables: [Row] = try Row
.fetchAll(db, sql: "SELECT * from sqlite_schema WHERE type = 'table'")
.filter { tableInfo -> Bool in
guard let name: String = tableInfo["name"] else { return false }
return (
!DatabaseSpec.ignoredTables.contains(name) &&
!disallowedPrefixes.contains(where: { name.hasPrefix($0) }) &&
!disallowedSuffixes.contains(where: { name.hasSuffix($0) }) &&
!disallowedContains.contains(where: { name.contains($0) })
)
}
// Generate data via schema inspection for all other tables
try tables.forEach { tableInfo in
switch tableInfo["name"] as? String {
case .none: throw StorageError.generic
case Identity.databaseTableName:
// If there is an 'Identity' table then insert "proper" identity info (otherwise mock
// data might get deleted as invalid in libSession migrations)
try [
Identity(variant: .x25519PublicKey, data: Data.data(fromHex: TestConstants.publicKey)!),
Identity(variant: .x25519PrivateKey, data: Data.data(fromHex: TestConstants.privateKey)!),
Identity(variant: .ed25519PublicKey, data: Data.data(fromHex: TestConstants.edPublicKey)!),
Identity(variant: .ed25519SecretKey, data: Data.data(fromHex: TestConstants.edSecretKey)!)
].forEach { try $0.insert(db) }
case .some(let name):
// No need to insert dummy data if it already exists in the table
guard try Int.fetchOne(db, sql: "SELECT COUNT(*) FROM '\(name)'") == 0 else { return }
let columnInfo: [Row] = try Row.fetchAll(db, sql: "PRAGMA table_info('\(name)');")
let validNames: [String] = columnInfo.compactMap { $0["name"].map { "'\($0)'" } }
let columnNames: String = validNames.joined(separator: ", ")
let columnArgs: String = validNames.map { _ in "?" }.joined(separator: ", ")
try db.execute(
sql: "INSERT INTO \(name) (\(columnNames)) VALUES (\(columnArgs))",
arguments: StatementArguments(columnInfo.map { column in
// If we want to allow setting nulls (and the column is nullable but not a primary
// key) then use null for it's value
guard !nullsWherePossible || column["notnull"] != 0 || column["pk"] == 1 else {
return nil
}
// If this column has an explicitly defined value then use that
if
let key: TableColumn = TableColumn(name, column["name"]),
let explicitValue: (any DatabaseValueConvertible) = MigrationTest.explicitValues[key]
{
return explicitValue
}
// Otherwise generate some mock data (trying to use potentially real values in case
// something is a primary/foreign key)
switch Database.ColumnType(rawValue: column["type"]) {
case .text: return "05\(TestConstants.publicKey)"
case .blob: return Data([1, 2, 3])
case .boolean: return false
case .integer, .numeric, .double, .real: return 1
case .date, .datetime: return Date(timeIntervalSince1970: 1234567890)
case .any: return nil
default: return nil
}
})
)
}
}
}
}

View File

@ -15,7 +15,7 @@ class NotificationContentViewModelSpec: QuickSpec {
// MARK: Configuration
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self,
SNSnodeKit.self,
SNMessagingKit.self,

View File

@ -5,7 +5,7 @@ import GRDB
import SessionUtilitiesKit
public enum SNUIKit: MigratableTarget {
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .uiKit,
migrations: [

View File

@ -11,6 +11,8 @@ enum _001_ThemePreferences: Migration {
static let identifier: String = "ThemePreferences" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = [Identity.self]
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
// Determine if the user was matching the system setting (previously the absence of this value

View File

@ -5,10 +5,10 @@ import GRDB
public enum SNUtilitiesKit: MigratableTarget { // Just to make the external API nice
public static var isRunningTests: Bool {
ProcessInfo.processInfo.environment["XCTestConfigurationFilePath"] != nil
ProcessInfo.processInfo.environment["XCTestConfigurationFilePath"] != nil // stringlint:disable
}
public static func migrations(_ db: Database) -> TargetMigrations {
public static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .utilitiesKit,
migrations: [

View File

@ -118,7 +118,7 @@ public enum Mnemonic {
}
public static func decode(mnemonic: String, language: Language = .english) throws -> String {
var words: [String] = mnemonic.split(separator: " ").map { String($0) }
var words: [String] = mnemonic.components(separatedBy: .whitespacesAndNewlines)
let truncatedWordSet: [String] = language.loadTruncatedWordSet()
let prefixLength: Int = language.prefixLength
var result = ""

View File

@ -8,6 +8,10 @@ enum _001_InitialSetupMigration: Migration {
static let identifier: String = "initialSetup" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [
Identity.self, Job.self, JobDependencies.self, Setting.self
]
static func migrate(_ db: Database) throws {
try db.create(table: Identity.self) { t in

View File

@ -10,6 +10,8 @@ enum _002_SetupStandardJobs: Migration {
static let identifier: String = "SetupStandardJobs" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
try autoreleasepool {

View File

@ -9,6 +9,8 @@ enum _003_YDBToGRDBMigration: Migration {
static let identifier: String = "YDBToGRDBMigration" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = []
static func migrate(_ db: Database) throws {
guard let dbConnection: YapDatabaseConnection = SUKLegacy.newDatabaseConnection() else {

View File

@ -9,6 +9,8 @@ enum _004_AddJobPriority: Migration {
static let identifier: String = "AddJobPriority" // stringlint:disable
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0.1
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] = [Job.self]
static func migrate(_ db: Database) throws {
// Add `priority` to the job table

View File

@ -41,6 +41,12 @@ open class Storage {
/// this should be taken into consideration when used
public private(set) var isSuspendedUnsafe: Bool = false
/// This property gets set the first time we successfully read from the database
public private(set) var hasSuccessfullyRead: Bool = false
/// This property gets set the first time we successfully write to the database
public private(set) var hasSuccessfullyWritten: Bool = false
public var hasCompletedMigrations: Bool { migrationsCompleted.wrappedValue }
public var currentlyRunningMigration: (identifier: TargetMigrations.Identifier, migration: Migration.Type)? {
internalCurrentlyRunningMigration.wrappedValue
@ -50,23 +56,16 @@ open class Storage {
fileprivate var dbWriter: DatabaseWriter?
internal var testDbWriter: DatabaseWriter? { dbWriter }
private var unprocessedMigrationRequirements: Atomic<[MigrationRequirement]> = Atomic(MigrationRequirement.allCases)
private var migrator: DatabaseMigrator?
private var migrationProgressUpdater: Atomic<((String, CGFloat) -> ())>?
private var migrationRequirementProcesser: Atomic<(Database?, MigrationRequirement) -> ()>?
// MARK: - Initialization
public init(
customWriter: DatabaseWriter? = nil,
customMigrationTargets: [MigratableTarget.Type]? = nil
) {
configureDatabase(customWriter: customWriter, customMigrationTargets: customMigrationTargets)
public init(customWriter: DatabaseWriter? = nil) {
configureDatabase(customWriter: customWriter)
}
private func configureDatabase(
customWriter: DatabaseWriter? = nil,
customMigrationTargets: [MigratableTarget.Type]? = nil
) {
private func configureDatabase(customWriter: DatabaseWriter? = nil) {
// Create the database directory if needed and ensure it's protection level is set before attempting to
// create the database KeySpec or the database itself
OWSFileSystem.ensureDirectoryExists(Storage.sharedDatabaseDirectoryPath)
@ -77,13 +76,6 @@ open class Storage {
dbWriter = customWriter
isValid = true
Storage.internalHasCreatedValidInstance.mutate { $0 = true }
perform(
migrationTargets: (customMigrationTargets ?? []),
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { _, _ in }
)
return
}
@ -142,6 +134,8 @@ open class Storage {
// MARK: - Migrations
public typealias KeyedMigration = (key: String, identifier: TargetMigrations.Identifier, migration: Migration.Type)
public static func appliedMigrationIdentifiers(_ db: Database) -> Set<String> {
let migrator: DatabaseMigrator = DatabaseMigrator()
@ -149,12 +143,50 @@ open class Storage {
.defaulting(to: [])
}
public static func sortedMigrationInfo(migrationTargets: [MigratableTarget.Type]) -> [KeyedMigration] {
typealias MigrationInfo = (identifier: TargetMigrations.Identifier, migrations: TargetMigrations.MigrationSet)
return migrationTargets
.map { target -> TargetMigrations in target.migrations() }
.sorted()
.reduce(into: [[MigrationInfo]]()) { result, next in
next.migrations.enumerated().forEach { index, migrationSet in
if result.count <= index {
result.append([])
}
result[index] = (result[index] + [(next.identifier, migrationSet)])
}
}
.reduce(into: []) { result, next in
next.forEach { identifier, migrations in
result.append(contentsOf: migrations.map { (identifier.key(with: $0), identifier, $0) })
}
}
}
public func perform(
migrationTargets: [MigratableTarget.Type],
async: Bool = true,
onProgressUpdate: ((CGFloat, TimeInterval) -> ())?,
onMigrationRequirement: @escaping (Database?, MigrationRequirement) -> (),
onComplete: @escaping (Swift.Result<Void, Error>, Bool) -> ()
) {
perform(
sortedMigrations: Storage.sortedMigrationInfo(migrationTargets: migrationTargets),
async: async,
onProgressUpdate: onProgressUpdate,
onMigrationRequirement: onMigrationRequirement,
onComplete: onComplete
)
}
internal func perform(
sortedMigrations: [KeyedMigration],
async: Bool,
onProgressUpdate: ((CGFloat, TimeInterval) -> ())?,
onMigrationRequirement: @escaping (Database?, MigrationRequirement) -> (),
onComplete: @escaping (Swift.Result<Void, Error>, Bool) -> ()
) {
guard isValid, let dbWriter: DatabaseWriter = dbWriter else {
let error: Error = (startupError ?? StorageError.startupFailed)
@ -163,68 +195,34 @@ open class Storage {
return
}
typealias MigrationInfo = (identifier: TargetMigrations.Identifier, migrations: TargetMigrations.MigrationSet)
let maybeSortedMigrationInfo: [MigrationInfo]? = try? dbWriter
.read { db -> [MigrationInfo] in
migrationTargets
.map { target -> TargetMigrations in target.migrations(db) }
.sorted()
.reduce(into: [[MigrationInfo]]()) { result, next in
next.migrations.enumerated().forEach { index, migrationSet in
if result.count <= index {
result.append([])
}
result[index] = (result[index] + [(next.identifier, migrationSet)])
}
}
.reduce(into: []) { result, next in result.append(contentsOf: next) }
}
guard let sortedMigrationInfo: [MigrationInfo] = maybeSortedMigrationInfo else {
SNLog("[Database Error] Statup failed with error: Unable to prepare migrations")
onComplete(.failure(StorageError.startupFailed), false)
return
}
// Setup and run any required migrations
migrator = { [weak self] in
var migrator: DatabaseMigrator = DatabaseMigrator()
sortedMigrationInfo.forEach { migrationInfo in
migrationInfo.migrations.forEach { migration in
migrator.registerMigration(self, targetIdentifier: migrationInfo.identifier, migration: migration)
}
}
return migrator
}()
var migrator: DatabaseMigrator = DatabaseMigrator()
sortedMigrations.forEach { _, identifier, migration in
migrator.registerMigration(self, targetIdentifier: identifier, migration: migration)
}
// Determine which migrations need to be performed and gather the relevant settings needed to
// inform the app of progress/states
let completedMigrations: [String] = (try? dbWriter.read { db in try migrator?.completedMigrations(db) })
let completedMigrations: [String] = (try? dbWriter.read { db in try migrator.completedMigrations(db) })
.defaulting(to: [])
let unperformedMigrations: [(key: String, migration: Migration.Type)] = sortedMigrationInfo
let unperformedMigrations: [KeyedMigration] = sortedMigrations
.reduce(into: []) { result, next in
next.migrations.forEach { migration in
let key: String = next.identifier.key(with: migration)
guard !completedMigrations.contains(key) else { return }
result.append((key, migration))
}
guard !completedMigrations.contains(next.key) else { return }
result.append(next)
}
let migrationToDurationMap: [String: TimeInterval] = unperformedMigrations
.reduce(into: [:]) { result, next in
result[next.key] = next.migration.minExpectedRunDuration
}
let unperformedMigrationDurations: [TimeInterval] = unperformedMigrations
.map { _, migration in migration.minExpectedRunDuration }
.map { _, _, migration in migration.minExpectedRunDuration }
let totalMinExpectedDuration: TimeInterval = migrationToDurationMap.values.reduce(0, +)
let needsConfigSync: Bool = unperformedMigrations
.contains(where: { _, migration in migration.needsConfigSync })
.contains(where: { _, _, migration in migration.needsConfigSync })
self.migrationProgressUpdater = Atomic({ targetKey, progress in
guard let migrationIndex: Int = unperformedMigrations.firstIndex(where: { key, _ in key == targetKey }) else {
guard let migrationIndex: Int = unperformedMigrations.firstIndex(where: { key, _, _ in key == targetKey }) else {
return
}
@ -244,14 +242,22 @@ open class Storage {
let migrationCompleted: (Swift.Result<Void, Error>) -> () = { [weak self] result in
// Process any unprocessed requirements which need to be processed before completion
// then clear out the state
self?.unprocessedMigrationRequirements.wrappedValue
.filter { $0.shouldProcessAtCompletionIfNotRequired }
.forEach { self?.migrationRequirementProcesser?.wrappedValue(nil, $0) }
let requirementProcessor: ((Database?, MigrationRequirement) -> ())? = self?.migrationRequirementProcesser?.wrappedValue
let remainingMigrationRequirements: [MigrationRequirement] = (self?.unprocessedMigrationRequirements.wrappedValue
.filter { $0.shouldProcessAtCompletionIfNotRequired })
.defaulting(to: [])
self?.migrationsCompleted.mutate { $0 = true }
self?.migrationProgressUpdater = nil
self?.migrationRequirementProcesser = nil
SUKLegacy.clearLegacyDatabaseInstance()
// Process any remaining migration requirements
if !remainingMigrationRequirements.isEmpty {
self?.write { db in
remainingMigrationRequirements.forEach { requirementProcessor?(db, $0) }
}
}
// Reset in case there is a requirement on a migration which runs when returning from
// the background
self?.unprocessedMigrationRequirements.mutate { $0 = MigrationRequirement.allCases }
@ -283,13 +289,9 @@ open class Storage {
}
// Note: The non-async migration should only be used for unit tests
guard async else {
do { try self.migrator?.migrate(dbWriter) }
catch { migrationCompleted(Swift.Result<Void, Error>.failure(error)) }
return
}
guard async else { return migrationCompleted(Result(try migrator.migrate(dbWriter))) }
self.migrator?.asyncMigrate(dbWriter) { result in
migrator.asyncMigrate(dbWriter) { result in
let finalResult: Swift.Result<Void, Error> = {
switch result {
case .failure(let error): return .failure(error)
@ -456,37 +458,60 @@ open class Storage {
// MARK: - Logging Functions
typealias CallInfo = (file: String, function: String, line: Int)
private static func logSlowWrites<T>(
private enum Action {
case read
case write
case logIfSlow
}
private typealias CallInfo = (storage: Storage?, actions: [Action], file: String, function: String, line: Int)
private static func perform<T>(
info: CallInfo,
updates: @escaping (Database) throws -> T
) -> (Database) throws -> T {
return { db in
let start: CFTimeInterval = CACurrentMediaTime()
let actionName: String = (info.actions.contains(.write) ? "write" : "read")
let fileName: String = (info.file.components(separatedBy: "/").last.map { " \($0):\(info.line)" } ?? "")
let timeout: Timer = Timer.scheduledTimerOnMainThread(withTimeInterval: writeWarningThreadshold) {
$0.invalidate()
let timeout: Timer? = {
guard info.actions.contains(.logIfSlow) else { return nil }
// Don't want to log on the main thread as to avoid confusion when debugging issues
DispatchQueue.global(qos: .default).async {
SNLog("[Storage\(fileName)] Slow write taking longer than \(writeWarningThreadshold, format: ".2", omitZeroDecimal: true)s - \(info.function)")
return Timer.scheduledTimerOnMainThread(withTimeInterval: Storage.writeWarningThreadshold) {
$0.invalidate()
// Don't want to log on the main thread as to avoid confusion when debugging issues
DispatchQueue.global(qos: .default).async {
SNLog("[Storage\(fileName)] Slow \(actionName) taking longer than \(Storage.writeWarningThreadshold, format: ".2", omitZeroDecimal: true)s - \(info.function)")
}
}
}
}()
// If we timed out and are logging slow actions then log the actual duration to help us
// prioritise performance issues
defer {
// If we timed out then log the actual duration to help us prioritise performance issues
if !timeout.isValid {
if timeout != nil && timeout?.isValid == false {
let end: CFTimeInterval = CACurrentMediaTime()
DispatchQueue.global(qos: .default).async {
SNLog("[Storage\(fileName)] Slow write completed after \(end - start, format: ".2", omitZeroDecimal: true)s")
SNLog("[Storage\(fileName)] Slow \(actionName) completed after \(end - start, format: ".2", omitZeroDecimal: true)s")
}
}
timeout.invalidate()
timeout?.invalidate()
}
return try updates(db)
// Get the result
let result: T = try updates(db)
// Update the state flags
switch info.actions {
case [.write], [.write, .logIfSlow]: info.storage?.hasSuccessfullyWritten = true
case [.read], [.read, .logIfSlow]: info.storage?.hasSuccessfullyRead = true
default: break
}
return result
}
}
@ -516,9 +541,8 @@ open class Storage {
) -> T? {
guard isValid, let dbWriter: DatabaseWriter = dbWriter else { return nil }
let info: CallInfo = (fileName, functionName, lineNumber)
do { return try dbWriter.write(Storage.logSlowWrites(info: info, updates: updates)) }
let info: CallInfo = { [weak self] in (self, [.write, .logIfSlow], fileName, functionName, lineNumber) }()
do { return try dbWriter.write(Storage.perform(info: info, updates: updates)) }
catch { return Storage.logIfNeeded(error, isWrite: true) }
}
@ -549,10 +573,10 @@ open class Storage {
) {
guard isValid, let dbWriter: DatabaseWriter = dbWriter else { return }
let info: CallInfo = (fileName, functionName, lineNumber)
let info: CallInfo = { [weak self] in (self, [.write, .logIfSlow], fileName, functionName, lineNumber) }()
dbWriter.asyncWrite(
Storage.logSlowWrites(info: info, updates: updates),
Storage.perform(info: info, updates: updates),
completion: { db, result in
switch result {
case .failure(let error): Storage.logIfNeeded(error, isWrite: true)
@ -576,7 +600,7 @@ open class Storage {
.eraseToAnyPublisher()
}
let info: CallInfo = (fileName, functionName, lineNumber)
let info: CallInfo = { [weak self] in (self, [.write, .logIfSlow], fileName, functionName, lineNumber) }()
/// **Note:** GRDB does have a `writePublisher` method but it appears to asynchronously trigger
/// both the `output` and `complete` closures at the same time which causes a lot of unexpected
@ -587,7 +611,7 @@ open class Storage {
/// which behaves in a much more expected way than the GRDB `writePublisher` does
return Deferred {
Future { resolver in
do { resolver(Result.success(try dbWriter.write(Storage.logSlowWrites(info: info, updates: updates)))) }
do { resolver(Result.success(try dbWriter.write(Storage.perform(info: info, updates: updates)))) }
catch {
Storage.logIfNeeded(error, isWrite: true)
resolver(Result.failure(error))
@ -597,6 +621,9 @@ open class Storage {
}
open func readPublisher<T>(
fileName: String = #file,
functionName: String = #function,
lineNumber: Int = #line,
using dependencies: Dependencies = Dependencies(),
value: @escaping (Database) throws -> T
) -> AnyPublisher<T, Error> {
@ -605,6 +632,8 @@ open class Storage {
.eraseToAnyPublisher()
}
let info: CallInfo = { [weak self] in (self, [.read], fileName, functionName, lineNumber) }()
/// **Note:** GRDB does have a `readPublisher` method but it appears to asynchronously trigger
/// both the `output` and `complete` closures at the same time which causes a lot of unexpected
/// behaviours (this behaviour is apparently expected but still causes a number of odd behaviours in our code
@ -614,7 +643,7 @@ open class Storage {
/// which behaves in a much more expected way than the GRDB `readPublisher` does
return Deferred {
Future { resolver in
do { resolver(Result.success(try dbWriter.read(value))) }
do { resolver(Result.success(try dbWriter.read(Storage.perform(info: info, updates: value)))) }
catch {
Storage.logIfNeeded(error, isWrite: false)
resolver(Result.failure(error))
@ -624,12 +653,16 @@ open class Storage {
}
@discardableResult public func read<T>(
fileName: String = #file,
functionName: String = #function,
lineNumber: Int = #line,
using dependencies: Dependencies = Dependencies(),
_ value: (Database) throws -> T?
_ value: @escaping (Database) throws -> T?
) -> T? {
guard isValid, let dbWriter: DatabaseWriter = dbWriter else { return nil }
do { return try dbWriter.read(value) }
let info: CallInfo = { [weak self] in (self, [.read], fileName, functionName, lineNumber) }()
do { return try dbWriter.read(Storage.perform(info: info, updates: value)) }
catch { return Storage.logIfNeeded(error, isWrite: false) }
}

View File

@ -10,6 +10,14 @@ public protocol Migration {
static var minExpectedRunDuration: TimeInterval { get }
static var requirements: [MigrationRequirement] { get }
/// This includes any tables which are fetched from as part of the migration so that we can test they can still be parsed
/// correctly within migration tests
static var fetchedTables: [(TableRecord & FetchableRecord).Type] { get }
/// This includes any tables which are created or altered as part of the migration so that we can test they can still be parsed
/// correctly within migration tests
static var createdOrAlteredTables: [(TableRecord & FetchableRecord).Type] { get }
static func migrate(_ db: Database) throws
}

View File

@ -11,7 +11,7 @@ import DifferenceKit
/// **Note:** We **MUST** have accurate `filterSQL` and `orderSQL` values otherwise the indexing won't work
public class PagedDatabaseObserver<ObservedTable, T>: TransactionObserver where ObservedTable: TableRecord & ColumnExpressible & Identifiable, T: FetchableRecordWithRowId & Identifiable {
private let commitProcessingQueue: DispatchQueue = DispatchQueue(
label: "PagedDatabaseObserver.commitProcessingQueue",
label: "PagedDatabaseObserver.commitProcessingQueue", // stringlint:disable
qos: .userInitiated,
attributes: [] // Must be serial in order to avoid updates getting processed in the wrong order
)
@ -131,6 +131,7 @@ public class PagedDatabaseObserver<ObservedTable, T>: TransactionObserver where
// Retrieve the pagedRowId for the related value that is
// getting deleted
let pagedTableName: String = self.pagedTableName
let pagedRowIds: [Int64] = Storage.shared
.read { db in
PagedData.pagedRowIdsForRelatedRowIds(
@ -183,10 +184,13 @@ public class PagedDatabaseObserver<ObservedTable, T>: TransactionObserver where
// Store the instance variables locally to avoid unwrapping
let dataCache: DataCache<T> = self.dataCache.wrappedValue
let pageInfo: PagedData.PageInfo = self.pageInfo.wrappedValue
let pagedTableName: String = self.pagedTableName
let joinSQL: SQL? = self.joinSQL
let orderSQL: SQL = self.orderSQL
let filterSQL: SQL = self.filterSQL
let dataQuery: ([Int64]) -> any FetchRequest<T> = self.dataQuery
let associatedRecords: [ErasedAssociatedRecord] = self.associatedRecords
let observedTableChangeTypes: [String: PagedData.ObservedChanges] = self.observedTableChangeTypes
let getAssociatedDataInfo: (Database, PagedData.PageInfo) -> AssociatedDataInfo = { db, updatedPageInfo in
associatedRecords.map { associatedRecord in
let hasChanges: Bool = associatedRecord.tryUpdateForDatabaseCommit(

View File

@ -4,7 +4,7 @@ import Foundation
import GRDB
public protocol MigratableTarget {
static func migrations(_ db: Database) -> TargetMigrations
static func migrations() -> TargetMigrations
}
public struct TargetMigrations: Comparable {

View File

@ -1111,6 +1111,7 @@ public final class JobQueue: Hashable {
hasStartedAtLeastOnce.mutate { $0 = true }
// Get any pending jobs
let jobVariants: [Job.Variant] = self.jobVariants
let jobIdsAlreadyRunning: Set<Int64> = currentlyRunningJobIds.wrappedValue
let jobsAlreadyInQueue: Set<Int64> = pendingJobsQueue.wrappedValue.compactMap { $0.id }.asSet()
let jobsToRun: [Job] = dependencies.storage.read(using: dependencies) { db in
@ -1335,6 +1336,7 @@ public final class JobQueue: Hashable {
}
private func scheduleNextSoonestJob(using dependencies: Dependencies) {
let jobVariants: [Job.Variant] = self.jobVariants
let jobIdsAlreadyRunning: Set<Int64> = currentlyRunningJobIds.wrappedValue
let nextJobTimestamp: TimeInterval? = dependencies.storage.read(using: dependencies) { db in
try Job

View File

@ -0,0 +1,26 @@
// Copyright © 2023 Rangeproof Pty Ltd. All rights reserved.
import Foundation
public extension Result where Failure == Error {
init(_ closure: @autoclosure () throws -> Success) {
do { self = Result.success(try closure()) }
catch { self = Result.failure(error) }
}
func onFailure(closure: (Failure) -> ()) -> Result<Success, Failure> {
switch self {
case .success: break
case .failure(let failure): closure(failure)
}
return self
}
func successOrThrow() throws -> Success {
switch self {
case .success(let value): return value
case .failure(let error): throw error
}
}
}

View File

@ -14,7 +14,7 @@ class IdentitySpec: QuickSpec {
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self
]
)

View File

@ -15,7 +15,7 @@ class PersistableRecordUtilitiesSpec: QuickSpec {
@TestState var customWriter: DatabaseQueue! = try! DatabaseQueue()
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: customWriter,
customMigrationTargets: [
migrationTargets: [
TestTarget.self
]
)
@ -669,6 +669,8 @@ fileprivate enum TestInsertTestTypeMigration: Migration {
static let identifier: String = "TestInsertTestType"
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(FetchableRecord & TableRecord).Type] = [TestType.self, MutableTestType.self]
static func migrate(_ db: Database) throws {
try db.create(table: TestType.self) { t in
@ -687,6 +689,8 @@ fileprivate enum TestAddColumnMigration: Migration {
static let identifier: String = "TestAddColumn"
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0
static let fetchedTables: [(TableRecord & FetchableRecord).Type] = []
static let createdOrAlteredTables: [(FetchableRecord & TableRecord).Type] = [TestType.self, MutableTestType.self]
static func migrate(_ db: Database) throws {
try db.alter(table: TestType.self) { t in
@ -700,7 +704,7 @@ fileprivate enum TestAddColumnMigration: Migration {
}
fileprivate struct TestTarget: MigratableTarget {
static func migrations(_ db: Database) -> TargetMigrations {
static func migrations() -> TargetMigrations {
return TargetMigrations(
identifier: .test,
migrations: (0..<100)

View File

@ -38,7 +38,7 @@ class JobRunnerSpec: QuickSpec {
)
@TestState var mockStorage: Storage! = SynchronousStorage(
customWriter: try! DatabaseQueue(),
customMigrationTargets: [
migrationTargets: [
SNUtilitiesKit.self
],
initialData: { db in

View File

@ -112,16 +112,6 @@ public enum AppSetup {
}
},
onComplete: { result, needsConfigSync in
// After the migrations have run but before the migration completion we load the
// SessionUtil state and update the 'needsConfigSync' flag based on whether the
// configs also need to be sync'ed
if Identity.userExists() {
SessionUtil.loadState(
userPublicKey: getUserHexEncodedPublicKey(),
ed25519SecretKey: Identity.fetchUserEd25519KeyPair()?.secretKey
)
}
// The 'needsConfigSync' flag should be based on whether either a migration or the
// configs need to be sync'ed
migrationsCompletion(result, (needsConfigSync || SessionUtil.needsSync))

View File

@ -2,7 +2,8 @@
import Foundation
import GRDB
import SessionUtilitiesKit
@testable import SessionUtilitiesKit
class MockJobRunner: Mock<JobRunnerType>, JobRunnerType {
// MARK: - Configuration

View File

@ -8,10 +8,33 @@ import GRDB
class SynchronousStorage: Storage {
public init(
customWriter: DatabaseWriter? = nil,
customMigrationTargets: [MigratableTarget.Type]? = nil,
migrationTargets: [MigratableTarget.Type]? = nil,
migrations: [Storage.KeyedMigration]? = nil,
initialData: ((Database) throws -> ())? = nil
) {
super.init(customWriter: customWriter, customMigrationTargets: customMigrationTargets)
super.init(customWriter: customWriter)
// Process any migration targets first
if let migrationTargets: [MigratableTarget.Type] = migrationTargets {
perform(
migrationTargets: migrationTargets,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { _, _ in }
)
}
// Then process any provided migration info
if let migrations: [Storage.KeyedMigration] = migrations {
perform(
sortedMigrations: migrations,
async: false,
onProgressUpdate: nil,
onMigrationRequirement: { _, _ in },
onComplete: { _, _ in }
)
}
write { db in try initialData?(db) }
}
@ -43,8 +66,11 @@ class SynchronousStorage: Storage {
}
@discardableResult override func read<T>(
fileName: String = #file,
functionName: String = #function,
lineNumber: Int = #line,
using dependencies: Dependencies = Dependencies(),
_ value: (Database) throws -> T?
_ value: @escaping (Database) throws -> T?
) -> T? {
guard isValid, let dbWriter: DatabaseWriter = testDbWriter else { return nil }
@ -56,16 +82,25 @@ class SynchronousStorage: Storage {
return try? dbWriter.unsafeReentrantRead(value)
}
return super.read(using: dependencies, value)
return super.read(
fileName: fileName,
functionName: functionName,
lineNumber: lineNumber,
using: dependencies,
value
)
}
// MARK: - Async Methods
override func readPublisher<T>(
fileName: String = #file,
functionName: String = #function,
lineNumber: Int = #line,
using dependencies: Dependencies = Dependencies(),
value: @escaping (Database) throws -> T
) -> AnyPublisher<T, Error> {
guard let result: T = self.read(using: dependencies, value) else {
guard let result: T = self.read(fileName: fileName, functionName: functionName, lineNumber: lineNumber, using: dependencies, value) else {
return Fail(error: StorageError.generic)
.eraseToAnyPublisher()
}