Merge pull request #718 from mpretty-cyro/fix/migration-and-release-issues

Fix migration issues and theming crashes
This commit is contained in:
Morgan Pretty 2022-10-24 17:17:56 +11:00 committed by GitHub
commit 91eff4fe39
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
54 changed files with 1326 additions and 463 deletions

View file

@ -11,7 +11,7 @@ abstract_target 'GlobalDependencies' do
# FIXME: If https://github.com/jedisct1/swift-sodium/pull/249 gets resolved then revert this back to the standard pod
pod 'Sodium', :git => 'https://github.com/oxen-io/session-ios-swift-sodium.git', branch: 'session-build'
pod 'GRDB.swift/SQLCipher'
pod 'SQLCipher', '~> 4.0'
pod 'SQLCipher', '~> 4.5.0' # FIXME: Version 4.5.2 is crashing when access DB settings
# FIXME: We want to remove this once it's been long enough since the migration to GRDB
pod 'YapDatabase/SQLCipher', :git => 'https://github.com/oxen-io/session-ios-yap-database.git', branch: 'signal-release'
@ -82,6 +82,7 @@ abstract_target 'GlobalDependencies' do
target 'SessionUtilitiesKit' do
pod 'SAMKeychain'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
pod 'DifferenceKit'
target 'SessionUtilitiesKitTests' do
inherit! :complete

View file

@ -21,14 +21,14 @@ PODS:
- Curve25519Kit (2.1.0):
- CocoaLumberjack
- SignalCoreKit
- DifferenceKit (1.2.0):
- DifferenceKit/Core (= 1.2.0)
- DifferenceKit/UIKitExtension (= 1.2.0)
- DifferenceKit/Core (1.2.0)
- DifferenceKit/UIKitExtension (1.2.0):
- DifferenceKit (1.3.0):
- DifferenceKit/Core (= 1.3.0)
- DifferenceKit/UIKitExtension (= 1.3.0)
- DifferenceKit/Core (1.3.0)
- DifferenceKit/UIKitExtension (1.3.0):
- DifferenceKit/Core
- GRDB.swift/SQLCipher (5.26.0):
- SQLCipher (>= 3.4.0)
- GRDB.swift/SQLCipher (6.1.0):
- SQLCipher (>= 3.4.2)
- libwebp (1.2.1):
- libwebp/demux (= 1.2.1)
- libwebp/mux (= 1.2.1)
@ -154,7 +154,7 @@ DEPENDENCIES:
- SignalCoreKit (from `https://github.com/oxen-io/session-ios-core-kit`, branch `session-version`)
- SocketRocket (~> 0.5.1)
- Sodium (from `https://github.com/oxen-io/session-ios-swift-sodium.git`, branch `session-build`)
- SQLCipher (~> 4.0)
- SQLCipher (~> 4.5.0)
- SwiftProtobuf (~> 1.5.0)
- WebRTC-lib
- YapDatabase/SQLCipher (from `https://github.com/oxen-io/session-ios-yap-database.git`, branch `signal-release`)
@ -221,8 +221,8 @@ SPEC CHECKSUMS:
CocoaLumberjack: 543c79c114dadc3b1aba95641d8738b06b05b646
CryptoSwift: a532e74ed010f8c95f611d00b8bbae42e9fe7c17
Curve25519Kit: e63f9859ede02438ae3defc5e1a87e09d1ec7ee6
DifferenceKit: 5659c430bb7fe45876fa32ce5cba5d6167f0c805
GRDB.swift: 1395cb3556df6b16ed69dfc74c3886abc75d2825
DifferenceKit: ab185c4d7f9cef8af3fcf593e5b387fb81e999ca
GRDB.swift: 611778a5e113385373baeb3e2ce474887d1aadb7
libwebp: 98a37e597e40bfdb4c911fc98f2c53d0b12d05fc
Nimble: 5316ef81a170ce87baf72dd961f22f89a602ff84
NVActivityIndicatorView: 1f6c5687f1171810aa27a3296814dc2d7dec3667
@ -242,6 +242,6 @@ SPEC CHECKSUMS:
YYImage: f1ddd15ac032a58b78bbed1e012b50302d318331
ZXingObjC: fdbb269f25dd2032da343e06f10224d62f537bdb
PODFILE CHECKSUM: 430e3b57d986dc8890415294fc6cf5e4eabfce3e
PODFILE CHECKSUM: 7452ce88370eadd58d21fdf6a4c4945d6554ee95
COCOAPODS: 1.11.3

View file

@ -570,6 +570,8 @@
FD17D7E527F6A09900122BE0 /* Identity.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E427F6A09900122BE0 /* Identity.swift */; };
FD17D7E727F6A16700122BE0 /* _003_YDBToGRDBMigration.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E627F6A16700122BE0 /* _003_YDBToGRDBMigration.swift */; };
FD17D7EA27F6A1C600122BE0 /* SUKLegacy.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD17D7E927F6A1C600122BE0 /* SUKLegacy.swift */; };
FD1A94FB2900D1C2000D73D3 /* PersistableRecord+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1A94FA2900D1C2000D73D3 /* PersistableRecord+Utilities.swift */; };
FD1A94FE2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1A94FD2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift */; };
FD1C98E4282E3C5B00B76F9E /* UINavigationBar+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD1C98E3282E3C5B00B76F9E /* UINavigationBar+Utilities.swift */; };
FD23EA5C28ED00F80058676E /* Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = FDC290A527D860CE005DAE71 /* Mock.swift */; };
FD23EA5D28ED00FA0058676E /* TestConstants.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD83B9BD27CF2243005E1583 /* TestConstants.swift */; };
@ -1683,6 +1685,8 @@
FD17D7E427F6A09900122BE0 /* Identity.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Identity.swift; sourceTree = "<group>"; };
FD17D7E627F6A16700122BE0 /* _003_YDBToGRDBMigration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = _003_YDBToGRDBMigration.swift; sourceTree = "<group>"; };
FD17D7E927F6A1C600122BE0 /* SUKLegacy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SUKLegacy.swift; sourceTree = "<group>"; };
FD1A94FA2900D1C2000D73D3 /* PersistableRecord+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "PersistableRecord+Utilities.swift"; sourceTree = "<group>"; };
FD1A94FD2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PersistableRecordUtilitiesSpec.swift; sourceTree = "<group>"; };
FD1C98E3282E3C5B00B76F9E /* UINavigationBar+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UINavigationBar+Utilities.swift"; sourceTree = "<group>"; };
FD23EA6028ED0B260058676E /* CombineExtensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CombineExtensions.swift; sourceTree = "<group>"; };
FD245C612850664300B966DD /* Configuration.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Configuration.swift; sourceTree = "<group>"; };
@ -3619,6 +3623,7 @@
FD17D7C627F5207C00122BE0 /* DatabaseMigrator+Utilities.swift */,
FDF22210281B5E0B000A4995 /* TableRecord+Utilities.swift */,
FDF2220E281B55E6000A4995 /* QueryInterfaceRequest+Utilities.swift */,
FD1A94FA2900D1C2000D73D3 /* PersistableRecord+Utilities.swift */,
);
path = Utilities;
sourceTree = "<group>";
@ -3652,6 +3657,14 @@
path = LegacyDatabase;
sourceTree = "<group>";
};
FD1A94FC2900D2DB000D73D3 /* Utilities */ = {
isa = PBXGroup;
children = (
FD1A94FD2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift */,
);
path = Utilities;
sourceTree = "<group>";
};
FD37E9C428A1C701003AE748 /* Themes */ = {
isa = PBXGroup;
children = (
@ -3698,6 +3711,7 @@
isa = PBXGroup;
children = (
FD37EA1328AB42C1003AE748 /* Models */,
FD1A94FC2900D2DB000D73D3 /* Utilities */,
);
path = Database;
sourceTree = "<group>";
@ -5336,6 +5350,7 @@
7BD477A827EC39F5004E2822 /* Atomic.swift in Sources */,
B8BC00C0257D90E30032E807 /* General.swift in Sources */,
FD17D7A127F40D2500122BE0 /* Storage.swift in Sources */,
FD1A94FB2900D1C2000D73D3 /* PersistableRecord+Utilities.swift in Sources */,
FD5D201E27B0D87C00FEA984 /* SessionId.swift in Sources */,
C32C5A24256DB7DB003C73A2 /* SNUserDefaults.swift in Sources */,
C3BBE0A72554D4DE0050F1E3 /* Promise+Retrying.swift in Sources */,
@ -5776,6 +5791,7 @@
FD23EA6328ED0B260058676E /* CombineExtensions.swift in Sources */,
FD2AAAEE28ED3E1100A49611 /* MockGeneralCache.swift in Sources */,
FD37EA1528AB42CB003AE748 /* IdentitySpec.swift in Sources */,
FD1A94FE2900D2EA000D73D3 /* PersistableRecordUtilitiesSpec.swift in Sources */,
FDC290AA27D9B6FD005DAE71 /* Mock.swift in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
@ -6016,7 +6032,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 383;
CURRENT_PROJECT_VERSION = 385;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
@ -6041,7 +6057,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.2.0;
MARKETING_VERSION = 2.2.1;
MTL_ENABLE_DEBUG_INFO = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.ShareExtension";
PRODUCT_NAME = "$(TARGET_NAME)";
@ -6089,7 +6105,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 383;
CURRENT_PROJECT_VERSION = 385;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
ENABLE_NS_ASSERTIONS = NO;
@ -6119,7 +6135,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.2.0;
MARKETING_VERSION = 2.2.1;
MTL_ENABLE_DEBUG_INFO = NO;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.ShareExtension";
PRODUCT_NAME = "$(TARGET_NAME)";
@ -6155,7 +6171,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 383;
CURRENT_PROJECT_VERSION = 385;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = "$(inherited)";
@ -6178,7 +6194,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.2.0;
MARKETING_VERSION = 2.2.1;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.NotificationServiceExtension";
@ -6229,7 +6245,7 @@
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CODE_SIGN_STYLE = Automatic;
COPY_PHASE_STRIP = NO;
CURRENT_PROJECT_VERSION = 383;
CURRENT_PROJECT_VERSION = 385;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = SUQ8J2PCT7;
ENABLE_NS_ASSERTIONS = NO;
@ -6257,7 +6273,7 @@
"@executable_path/Frameworks",
"@executable_path/../../Frameworks",
);
MARKETING_VERSION = 2.2.0;
MARKETING_VERSION = 2.2.1;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger.NotificationServiceExtension";
@ -7157,7 +7173,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 383;
CURRENT_PROJECT_VERSION = 385;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -7196,7 +7212,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 2.2.0;
MARKETING_VERSION = 2.2.1;
OTHER_LDFLAGS = "$(inherited)";
OTHER_SWIFT_FLAGS = "$(inherited) \"-D\" \"COCOAPODS\" \"-DDEBUG\"";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
@ -7229,7 +7245,7 @@
CODE_SIGN_ENTITLEMENTS = Session/Meta/Signal.entitlements;
CODE_SIGN_IDENTITY = "iPhone Developer";
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
CURRENT_PROJECT_VERSION = 383;
CURRENT_PROJECT_VERSION = 385;
DEVELOPMENT_TEAM = SUQ8J2PCT7;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@ -7268,7 +7284,7 @@
"$(SRCROOT)",
);
LLVM_LTO = NO;
MARKETING_VERSION = 2.2.0;
MARKETING_VERSION = 2.2.1;
OTHER_LDFLAGS = "$(inherited)";
PRODUCT_BUNDLE_IDENTIFIER = "com.loki-project.loki-messenger";
PRODUCT_NAME = Session;

View file

@ -2019,7 +2019,9 @@ extension ConversationVC:
func startVoiceMessageRecording() {
// Request permission if needed
Permissions.requestMicrophonePermissionIfNeeded() { [weak self] in
self?.cancelVoiceMessageRecording()
DispatchQueue.main.async {
self?.cancelVoiceMessageRecording()
}
}
// Keep screen on

View file

@ -1,6 +1,7 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import UIKit
import AVKit
import GRDB
import DifferenceKit
import SessionUIKit

View file

@ -247,32 +247,14 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
)
],
onChangeUnsorted: { [weak self] updatedData, updatedPageInfo in
guard
let currentData: [SectionModel] = self?.interactionData,
let updatedInteractionData: [SectionModel] = self?.process(data: updatedData, for: updatedPageInfo)
else { return }
let changeset: StagedChangeset<[SectionModel]> = StagedChangeset(
source: currentData,
target: updatedInteractionData
)
// No need to do anything if there were no changes
guard !changeset.isEmpty else { return }
// Run any changes on the main thread (as they will generally trigger UI updates)
DispatchQueue.main.async {
// If we have the callback then trigger it, otherwise just store the changes to be sent
// to the callback if we ever start observing again (when we have the callback it needs
// to do the data updating as it's tied to UI updates and can cause crashes if not updated
// in the correct order)
guard let onInteractionChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ()) = self?.onInteractionChange else {
self?.unobservedInteractionDataChanges = (updatedInteractionData, changeset)
return
PagedData.processAndTriggerUpdates(
updatedData: self?.process(data: updatedData, for: updatedPageInfo),
currentDataRetriever: { self?.interactionData },
onDataChange: self?.onInteractionChange,
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedInteractionDataChanges = (updatedData, changeset)
}
onInteractionChange(updatedInteractionData, changeset)
}
)
}
)
}

View file

@ -150,42 +150,14 @@ public class HomeViewModel {
orderSQL: SessionThreadViewModel.homeOrderSQL
),
onChangeUnsorted: { [weak self] updatedData, updatedPageInfo in
guard
let currentData: [SectionModel] = self?.threadData,
let updatedThreadData: [SectionModel] = self?.process(data: updatedData, for: updatedPageInfo)
else { return }
let changeset: StagedChangeset<[SectionModel]> = StagedChangeset(
source: currentData,
target: updatedThreadData
)
// No need to do anything if there were no changes
guard !changeset.isEmpty else { return }
let performUpdates = {
// If we have the callback then trigger it, otherwise just store the changes to be sent
// to the callback if we ever start observing again (when we have the callback it needs
// to do the data updating as it's tied to UI updates and can cause crashes if not updated
// in the correct order)
guard let onThreadChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ()) = self?.onThreadChange else {
self?.unobservedThreadDataChanges = (updatedThreadData, changeset)
return
PagedData.processAndTriggerUpdates(
updatedData: self?.process(data: updatedData, for: updatedPageInfo),
currentDataRetriever: { self?.threadData },
onDataChange: self?.onThreadChange,
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedThreadDataChanges = (updatedData, changeset)
}
onThreadChange(updatedThreadData, changeset)
}
// Note: On the initial launch the data will be fetched on the main thread and we want it
// to block so don't dispatch to the next run loop
guard !Thread.isMainThread else {
return performUpdates()
}
// Run any changes on the main thread (as they will generally trigger UI updates)
DispatchQueue.main.async {
performUpdates()
}
)
}
)
@ -246,20 +218,15 @@ public class HomeViewModel {
data: currentData.flatMap { $0.elements },
for: currentPageInfo
)
let changeset: StagedChangeset<[SectionModel]> = StagedChangeset(
source: currentData,
target: updatedThreadData
PagedData.processAndTriggerUpdates(
updatedData: updatedThreadData,
currentDataRetriever: { [weak self] in (self?.unobservedThreadDataChanges?.0 ?? self?.threadData) },
onDataChange: onThreadChange,
onUnobservedDataChange: { [weak self] updatedThreadData, changeset in
self?.unobservedThreadDataChanges = (updatedThreadData, changeset)
}
)
// No need to do anything if there were no changes
guard !changeset.isEmpty else { return }
guard let onThreadChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ()) = self.onThreadChange else {
self.unobservedThreadDataChanges = (updatedThreadData, changeset)
return
}
onThreadChange(updatedThreadData, changeset)
}
// MARK: - Thread Data

View file

@ -98,32 +98,14 @@ public class MessageRequestsViewModel {
orderSQL: SessionThreadViewModel.messageRequetsOrderSQL
),
onChangeUnsorted: { [weak self] updatedData, updatedPageInfo in
guard
let currentData: [SectionModel] = self?.threadData,
let updatedThreadData: [SectionModel] = self?.process(data: updatedData, for: updatedPageInfo)
else { return }
let changeset: StagedChangeset<[SectionModel]> = StagedChangeset(
source: currentData,
target: updatedThreadData
)
// No need to do anything if there were no changes
guard !changeset.isEmpty else { return }
// Run any changes on the main thread (as they will generally trigger UI updates)
DispatchQueue.main.async {
// If we have the callback then trigger it, otherwise just store the changes to be sent
// to the callback if we ever start observing again (when we have the callback it needs
// to do the data updating as it's tied to UI updates and can cause crashes if not updated
// in the correct order)
guard let onThreadChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ()) = self?.onThreadChange else {
self?.unobservedThreadDataChanges = (updatedThreadData, changeset)
return
PagedData.processAndTriggerUpdates(
updatedData: self?.process(data: updatedData, for: updatedPageInfo),
currentDataRetriever: { self?.threadData },
onDataChange: self?.onThreadChange,
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedThreadDataChanges = (updatedData, changeset)
}
onThreadChange(updatedThreadData, changeset)
}
)
}
)

View file

@ -131,8 +131,10 @@ final class NewDMVC: BaseVC, UIPageViewControllerDataSource, UIPageViewControlle
}
fileprivate func handleCameraAccessGranted() {
pages[1] = scanQRCodeWrapperVC
pageVC.setViewControllers([ scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
DispatchQueue.main.async {
self.pages[1] = self.scanQRCodeWrapperVC
self.pageVC.setViewControllers([ self.scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
}
}
// MARK: - Updating

View file

@ -93,32 +93,14 @@ public class MediaGalleryViewModel {
orderSQL: Item.galleryOrderSQL,
dataQuery: Item.baseQuery(orderSQL: Item.galleryOrderSQL),
onChangeUnsorted: { [weak self] updatedData, updatedPageInfo in
guard
let currentData: [SectionModel] = self?.galleryData,
let updatedGalleryData: [SectionModel] = self?.process(data: updatedData, for: updatedPageInfo)
else { return }
let changeset: StagedChangeset<[SectionModel]> = StagedChangeset(
source: currentData,
target: updatedGalleryData
)
// No need to do anything if there were no changes
guard !changeset.isEmpty else { return }
// Run any changes on the main thread (as they will generally trigger UI updates)
DispatchQueue.main.async {
// If we have the callback then trigger it, otherwise just store the changes to be sent
// to the callback if we ever start observing again (when we have the callback it needs
// to do the data updating as it's tied to UI updates and can cause crashes if not updated
// in the correct order)
guard let onGalleryChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ()) = self?.onGalleryChange else {
self?.unobservedGalleryDataChanges = (updatedGalleryData, changeset)
return
PagedData.processAndTriggerUpdates(
updatedData: self?.process(data: updatedData, for: updatedPageInfo),
currentDataRetriever: { self?.galleryData },
onDataChange: self?.onGalleryChange,
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedGalleryDataChanges = (updatedData, changeset)
}
onGalleryChange(updatedGalleryData, changeset)
}
)
}
)

View file

@ -142,13 +142,17 @@ class SendMediaNavigationController: UINavigationController {
private func didTapCameraModeButton() {
Permissions.requestCameraPermissionIfNeeded { [weak self] in
self?.fadeTo(viewControllers: ((self?.captureViewController).map { [$0] } ?? []))
DispatchQueue.main.async {
self?.fadeTo(viewControllers: ((self?.captureViewController).map { [$0] } ?? []))
}
}
}
private func didTapMediaLibraryModeButton() {
Permissions.requestLibraryPermissionIfNeeded { [weak self] in
self?.fadeTo(viewControllers: ((self?.mediaLibraryViewController).map { [$0] } ?? []))
DispatchQueue.main.async {
self?.fadeTo(viewControllers: ((self?.mediaLibraryViewController).map { [$0] } ?? []))
}
}
}

View file

@ -69,8 +69,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
minEstimatedTotalTime: minEstimatedTotalTime
)
},
migrationsCompletion: { [weak self] error, needsConfigSync in
guard error == nil else {
migrationsCompletion: { [weak self] result, needsConfigSync in
if case .failure(let error) = result {
self?.showFailedMigrationAlert(error: error)
return
}
@ -322,8 +322,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
minEstimatedTotalTime: minEstimatedTotalTime
)
},
migrationsCompletion: { [weak self] error, needsConfigSync in
guard error == nil else {
migrationsCompletion: { [weak self] result, needsConfigSync in
if case .failure(let error) = result {
self?.showFailedMigrationAlert(error: error)
return
}

View file

@ -597,7 +597,7 @@ class NotificationActionHandler {
interactionId: try thread.interactions
.select(.id)
.order(Interaction.Columns.timestampMs.desc)
.asRequest(of: Int64?.self)
.asRequest(of: Int64.self)
.fetchOne(db),
threadId: thread.id,
includingOlder: true,

View file

@ -109,8 +109,10 @@ final class LinkDeviceVC: BaseVC, UIPageViewControllerDataSource, UIPageViewCont
}
fileprivate func handleCameraAccessGranted() {
pages[1] = scanQRCodeWrapperVC
pageVC.setViewControllers([ scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
DispatchQueue.main.async {
self.pages[1] = self.scanQRCodeWrapperVC
self.pageVC.setViewControllers([ self.scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
}
}
// MARK: - Updating
@ -161,9 +163,15 @@ final class LinkDeviceVC: BaseVC, UIPageViewControllerDataSource, UIPageViewCont
GetSnodePoolJob.run()
NotificationCenter.default.addObserver(self, selector: #selector(handleInitialConfigurationMessageReceived), name: .initialConfigurationMessageReceived, object: nil)
ModalActivityIndicatorViewController.present(fromViewController: navigationController!) { [weak self] modal in
self?.activityIndicatorModal = modal
}
ModalActivityIndicatorViewController
.present(
// There was some crashing here due to force-unwrapping so just falling back to
// using self if there is no nav controller
fromViewController: (self.navigationController ?? self)
) { [weak self] modal in
self?.activityIndicatorModal = modal
}
}
@objc private func handleInitialConfigurationMessageReceived(_ notification: Notification) {

View file

@ -105,8 +105,10 @@ final class JoinOpenGroupVC: BaseVC, UIPageViewControllerDataSource, UIPageViewC
}
fileprivate func handleCameraAccessGranted() {
pages[1] = scanQRCodeWrapperVC
pageVC.setViewControllers([ scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
DispatchQueue.main.async {
self.pages[1] = self.scanQRCodeWrapperVC
self.pageVC.setViewControllers([ self.scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
}
}
// MARK: - Updating

View file

@ -62,32 +62,14 @@ public class BlockedContactsViewModel {
orderSQL: DataModel.orderSQL
),
onChangeUnsorted: { [weak self] updatedData, updatedPageInfo in
guard
let currentData: [SectionModel] = self?.contactData,
let updatedContactData: [SectionModel] = self?.process(data: updatedData, for: updatedPageInfo)
else { return }
let changeset: StagedChangeset<[SectionModel]> = StagedChangeset(
source: currentData,
target: updatedContactData
)
// No need to do anything if there were no changes
guard !changeset.isEmpty else { return }
// Run any changes on the main thread (as they will generally trigger UI updates)
DispatchQueue.main.async {
// If we have the callback then trigger it, otherwise just store the changes to be sent
// to the callback if we ever start observing again (when we have the callback it needs
// to do the data updating as it's tied to UI updates and can cause crashes if not updated
// in the correct order)
guard let onContactChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ()) = self?.onContactChange else {
self?.unobservedContactDataChanges = (updatedContactData, changeset)
return
PagedData.processAndTriggerUpdates(
updatedData: self?.process(data: updatedData, for: updatedPageInfo),
currentDataRetriever: { self?.contactData },
onDataChange: self?.onContactChange,
onUnobservedDataChange: { updatedData, changeset in
self?.unobservedContactDataChanges = (updatedData, changeset)
}
onContactChange(updatedContactData, changeset)
}
)
}
)

View file

@ -61,7 +61,7 @@ class ConversationSettingsViewModel: SessionTableViewModel<NoNav, ConversationSe
.settingBool(key: .trimOpenGroupMessagesOlderThanSixMonths)
),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.trimOpenGroupMessagesOlderThanSixMonths] = !db[.trimOpenGroupMessagesOlderThanSixMonths]
}
}
@ -79,7 +79,7 @@ class ConversationSettingsViewModel: SessionTableViewModel<NoNav, ConversationSe
.settingBool(key: .shouldAutoPlayConsecutiveAudioMessages)
),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.shouldAutoPlayConsecutiveAudioMessages] = !db[.shouldAutoPlayConsecutiveAudioMessages]
}
}

View file

@ -108,7 +108,7 @@ class NotificationSettingsViewModel: SessionTableViewModel<NoNav, NotificationSe
title: "NOTIFICATIONS_STYLE_SOUND_WHEN_OPEN_TITLE".localized(),
rightAccessory: .toggle(.settingBool(key: .playNotificationSoundInForeground)),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.playNotificationSoundInForeground] = !db[.playNotificationSoundInForeground]
}
}

View file

@ -100,7 +100,7 @@ class PrivacySettingsViewModel: SessionTableViewModel<PrivacySettingsViewModel.N
subtitle: "PRIVACY_SCREEN_SECURITY_LOCK_SESSION_DESCRIPTION".localized(),
rightAccessory: .toggle(.settingBool(key: .isScreenLockEnabled)),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.isScreenLockEnabled] = !db[.isScreenLockEnabled]
}
}
@ -116,7 +116,7 @@ class PrivacySettingsViewModel: SessionTableViewModel<PrivacySettingsViewModel.N
subtitle: "PRIVACY_READ_RECEIPTS_DESCRIPTION".localized(),
rightAccessory: .toggle(.settingBool(key: .areReadReceiptsEnabled)),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.areReadReceiptsEnabled] = !db[.areReadReceiptsEnabled]
}
}
@ -158,7 +158,7 @@ class PrivacySettingsViewModel: SessionTableViewModel<PrivacySettingsViewModel.N
},
rightAccessory: .toggle(.settingBool(key: .typingIndicatorsEnabled)),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.typingIndicatorsEnabled] = !db[.typingIndicatorsEnabled]
}
}
@ -174,7 +174,7 @@ class PrivacySettingsViewModel: SessionTableViewModel<PrivacySettingsViewModel.N
subtitle: "PRIVACY_LINK_PREVIEWS_DESCRIPTION".localized(),
rightAccessory: .toggle(.settingBool(key: .areLinkPreviewsEnabled)),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.areLinkPreviewsEnabled] = !db[.areLinkPreviewsEnabled]
}
}
@ -198,7 +198,7 @@ class PrivacySettingsViewModel: SessionTableViewModel<PrivacySettingsViewModel.N
onConfirm: { _ in Permissions.requestMicrophonePermissionIfNeeded() }
),
onTap: {
Storage.shared.writeAsync { db in
Storage.shared.write { db in
db[.areCallsEnabled] = !db[.areCallsEnabled]
}
}

View file

@ -95,8 +95,10 @@ final class QRCodeVC : BaseVC, UIPageViewControllerDataSource, UIPageViewControl
}
fileprivate func handleCameraAccessGranted() {
pages[1] = scanQRCodeWrapperVC
pageVC.setViewControllers([ scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
DispatchQueue.main.async {
self.pages[1] = self.scanQRCodeWrapperVC
self.pageVC.setViewControllers([ self.scanQRCodeWrapperVC ], direction: .forward, animated: false, completion: nil)
}
}
// MARK: - Updating

View file

@ -407,12 +407,14 @@ class SettingsViewModel: SessionTableViewModel<SettingsViewModel.NavButton, Sett
private func showPhotoLibraryForAvatar() {
Permissions.requestLibraryPermissionIfNeeded { [weak self] in
let picker: UIImagePickerController = UIImagePickerController()
picker.sourceType = .photoLibrary
picker.mediaTypes = [ "public.image" ]
picker.delegate = self?.imagePickerHandler
self?.transitionToScreen(picker, transitionType: .present)
DispatchQueue.main.async {
let picker: UIImagePickerController = UIImagePickerController()
picker.sourceType = .photoLibrary
picker.mediaTypes = [ "public.image" ]
picker.delegate = self?.imagePickerHandler
self?.transitionToScreen(picker, transitionType: .present)
}
}
}
@ -446,7 +448,7 @@ class SettingsViewModel: SessionTableViewModel<SettingsViewModel.NavButton, Sett
try MessageSender.syncConfiguration(db, forceSyncNow: true).retainUntilComplete()
// Wait for the database transaction to complete before updating the UI
db.afterNextTransactionCommit { _ in
db.afterNextTransaction { _ in
DispatchQueue.main.async {
modalActivityIndicator.dismiss(completion: {})
}

View file

@ -177,7 +177,7 @@ enum _001_InitialSetupMigration: Migration {
.notNull()
}
try db.create(table: _006_FixHiddenModAdminSupport.PreMigrationGroupMember.self) { t in
try db.create(table: GroupMember.self) { t in
// Note: Since we don't know whether this will be stored against a 'ClosedGroup' or
// an 'OpenGroup' we add the foreign key constraint against the thread itself (which
// shares the same 'id' as the 'groupId') so we can cascade delete automatically

View file

@ -22,34 +22,34 @@ enum _002_SetupStandardJobs: Migration {
variant: .disappearingMessages,
behaviour: .recurringOnLaunch,
shouldBlock: true
).inserted(db)
).migrationSafeInserted(db)
_ = try Job(
variant: .failedMessageSends,
behaviour: .recurringOnLaunch,
shouldBlock: true
).inserted(db)
).migrationSafeInserted(db)
_ = try Job(
variant: .failedAttachmentDownloads,
behaviour: .recurringOnLaunch,
shouldBlock: true
).inserted(db)
).migrationSafeInserted(db)
_ = try Job(
variant: .updateProfilePicture,
behaviour: .recurringOnActive
).inserted(db)
).migrationSafeInserted(db)
_ = try Job(
variant: .retrieveDefaultOpenGroupRooms,
behaviour: .recurringOnActive
).inserted(db)
).migrationSafeInserted(db)
_ = try Job(
variant: .garbageCollection,
behaviour: .recurringOnActive
).inserted(db)
).migrationSafeInserted(db)
}
Storage.update(progress: 1, for: self, in: target) // In case this is the last migration

View file

@ -422,7 +422,7 @@ enum _003_YDBToGRDBMigration: Migration {
profilePictureUrl: legacyContact.profilePictureURL,
profilePictureFileName: legacyContact.profilePictureFileName,
profileEncryptionKey: legacyContact.profileEncryptionKey
).insert(db)
).migrationSafeInsert(db)
/// **Note:** The blow "shouldForce" flags are here to allow us to avoid having to run legacy migrations they
/// replicate the behaviour of a number of the migrations and perform the changes if the migrations had never run
@ -490,7 +490,7 @@ enum _003_YDBToGRDBMigration: Migration {
shouldForceDidApproveMe
),
hasBeenBlocked: (!isCurrentUser && (legacyContact.hasBeenBlocked || legacyContact.isBlocked))
).insert(db)
).migrationSafeInsert(db)
}
// Increment the progress for each contact
@ -587,7 +587,7 @@ enum _003_YDBToGRDBMigration: Migration {
),
mutedUntilTimestamp: legacyThread.mutedUntilDate?.timeIntervalSince1970,
onlyNotifyForMentions: onlyNotifyForMentions
).insert(db)
).migrationSafeInsert(db)
// Disappearing Messages Configuration
if let config: SMKLegacy._DisappearingMessagesConfiguration = disappearingMessagesConfiguration[threadId] {
@ -595,12 +595,12 @@ enum _003_YDBToGRDBMigration: Migration {
threadId: threadId,
isEnabled: config.isEnabled,
durationSeconds: TimeInterval(config.durationSeconds)
).insert(db)
).migrationSafeInsert(db)
}
else {
try DisappearingMessagesConfiguration
.defaultWith(threadId)
.insert(db)
.migrationSafeInsert(db)
}
// Closed Groups
@ -618,7 +618,7 @@ enum _003_YDBToGRDBMigration: Migration {
threadId: threadId,
name: name,
formationTimestamp: TimeInterval(formationTimestamp)
).insert(db)
).migrationSafeInsert(db)
// Note: If a user has left a closed group then they won't actually have any keys
// but they should still be able to browse the old messages so we do want to allow
@ -629,7 +629,7 @@ enum _003_YDBToGRDBMigration: Migration {
publicKey: legacyKeys.publicKey,
secretKey: legacyKeys.privateKey,
receivedTimestamp: timestamp
).insert(db)
).migrationSafeInsert(db)
}
// Create the 'GroupMember' models for the group (even if the current user is no longer
@ -643,15 +643,16 @@ enum _003_YDBToGRDBMigration: Migration {
try? Profile(
id: profileId,
name: profileId
).save(db)
).migrationSafeSave(db)
}
try groupModel.groupMemberIds.forEach { memberId in
try _006_FixHiddenModAdminSupport.PreMigrationGroupMember(
try GroupMember(
groupId: threadId,
profileId: memberId,
role: .standard
).insert(db)
role: .standard,
isHidden: false // Ignored: Didn't exist at time of migration
).migrationSafeInsert(db)
if !validProfileIds.contains(memberId) {
createDummyProfile(profileId: memberId)
@ -659,11 +660,12 @@ enum _003_YDBToGRDBMigration: Migration {
}
try groupModel.groupAdminIds.forEach { adminId in
try _006_FixHiddenModAdminSupport.PreMigrationGroupMember(
try GroupMember(
groupId: threadId,
profileId: adminId,
role: .admin
).insert(db)
role: .admin,
isHidden: false // Ignored: Didn't exist at time of migration
).migrationSafeInsert(db)
if !validProfileIds.contains(adminId) {
createDummyProfile(profileId: adminId)
@ -671,11 +673,12 @@ enum _003_YDBToGRDBMigration: Migration {
}
try (closedGroupZombieMemberIds[legacyThread.uniqueId] ?? []).forEach { zombieId in
try _006_FixHiddenModAdminSupport.PreMigrationGroupMember(
try GroupMember(
groupId: threadId,
profileId: zombieId,
role: .zombie
).insert(db)
role: .zombie,
isHidden: false // Ignored: Didn't exist at time of migration
).migrationSafeInsert(db)
if !validProfileIds.contains(zombieId) {
createDummyProfile(profileId: zombieId)
@ -707,7 +710,8 @@ enum _003_YDBToGRDBMigration: Migration {
sequenceNumber: 0,
inboxLatestMessageId: 0,
outboxLatestMessageId: 0
).insert(db)
)
.migrationSafeInsert(db)
}
}
@ -930,7 +934,7 @@ enum _003_YDBToGRDBMigration: Migration {
openGroupServerMessageId: openGroupServerMessageId,
openGroupWhisperMods: false,
openGroupWhisperTo: nil
).inserted(db)
).migrationSafeInserted(db)
}
catch {
switch error {
@ -950,7 +954,7 @@ enum _003_YDBToGRDBMigration: Migration {
threadId: threadId,
variant: variant,
timestampMs: Int64.zeroingOverflow(legacyInteraction.timestamp)
)?.insert(db)
)?.migrationSafeInsert(db)
// Remove timestamps we created records for (they will be protected by unique
// constraints so don't need legacy process records)
@ -1012,7 +1016,7 @@ enum _003_YDBToGRDBMigration: Migration {
mostRecentFailureText :
nil
)
).save(db)
).migrationSafeSave(db)
}
// Handle any quote
@ -1045,7 +1049,7 @@ enum _003_YDBToGRDBMigration: Migration {
try Profile(
id: quotedMessage.authorId,
name: quotedMessage.authorId
).save(db)
).migrationSafeSave(db)
}
// Note: It looks like there is a way for a quote to not have it's
@ -1093,7 +1097,7 @@ enum _003_YDBToGRDBMigration: Migration {
timestampMs: Int64.zeroingOverflow(quotedMessage.timestamp),
body: quotedMessage.body,
attachmentId: attachmentId
).insert(db)
).migrationSafeInsert(db)
}
// Handle any LinkPreview
@ -1120,7 +1124,7 @@ enum _003_YDBToGRDBMigration: Migration {
variant: linkPreviewVariant,
title: linkPreview.title,
attachmentId: attachmentId
).save(db)
).migrationSafeSave(db)
}
// Handle any attachments
@ -1156,7 +1160,7 @@ enum _003_YDBToGRDBMigration: Migration {
albumIndex: index,
interactionId: interactionId,
attachmentId: attachmentId
).insert(db)
).migrationSafeInsert(db)
}
// Increment the progress for each contact
@ -1225,7 +1229,7 @@ enum _003_YDBToGRDBMigration: Migration {
timestampMs: legacyJob.message.timestamp
)
)
)?.inserted(db)
)?.migrationSafeInserted(db)
}
}
@ -1256,7 +1260,7 @@ enum _003_YDBToGRDBMigration: Migration {
messages: [processedMessage.messageInfo],
calledFromBackgroundPoller: legacyJob.isBackgroundPoll
)
)?.inserted(db)
)?.migrationSafeInserted(db)
}
}
@ -1346,7 +1350,7 @@ enum _003_YDBToGRDBMigration: Migration {
destination: legacyJob.destination,
message: legacyJob.message.toNonLegacy()
)
)?.inserted(db)
)?.migrationSafeInserted(db)
if let oldId: String = legacyJob.id {
messageSendJobLegacyMap[oldId] = job
@ -1373,7 +1377,7 @@ enum _003_YDBToGRDBMigration: Migration {
messageSendJobId: sendJobId,
attachmentId: legacyJob.attachmentID
)
)?.inserted(db)
)?.migrationSafeInserted(db)
// Add the dependency to the relevant MessageSendJob
guard let uploadJobId: Int64 = uploadJob?.id else {
@ -1384,7 +1388,7 @@ enum _003_YDBToGRDBMigration: Migration {
try JobDependencies(
jobId: sendJobId,
dependantId: uploadJobId
).insert(db)
).migrationSafeInsert(db)
}
}
@ -1413,7 +1417,7 @@ enum _003_YDBToGRDBMigration: Migration {
details: AttachmentDownloadJob.Details(
attachmentId: legacyJob.attachmentID
)
)?.inserted(db)
)?.migrationSafeInserted(db)
}
}
@ -1429,7 +1433,7 @@ enum _003_YDBToGRDBMigration: Migration {
destination: .contact(publicKey: threadId),
timestampMsValues: timestampsMs
)
)?.inserted(db)
)?.migrationSafeInserted(db)
}
}
Storage.update(progress: 0.99, for: self, in: target)
@ -1625,7 +1629,7 @@ enum _003_YDBToGRDBMigration: Migration {
}
}(),
caption: legacyAttachment.caption
).inserted(db)
).migrationSafeInserted(db)
processedAttachmentIds.insert(legacyAttachmentId)
@ -1664,7 +1668,7 @@ enum _003_YDBToGRDBMigration: Migration {
encryptionKey: nil,
digest: nil,
caption: nil
).inserted(db)
).migrationSafeInserted(db)
processedAttachmentIds.insert(legacyAttachmentId)

View file

@ -28,41 +28,3 @@ enum _006_FixHiddenModAdminSupport: Migration {
Storage.update(progress: 1, for: self, in: target) // In case this is the last migration
}
}
// MARK: - Pre-Migration Types
extension _006_FixHiddenModAdminSupport {
internal struct PreMigrationGroupMember: Codable, PersistableRecord, TableRecord, ColumnExpressible {
public static var databaseTableName: String { "groupMember" }
public typealias Columns = CodingKeys
public enum CodingKeys: String, CodingKey, ColumnExpression {
case groupId
case profileId
case role
}
public enum Role: Int, Codable, DatabaseValueConvertible {
case standard
case zombie
case moderator
case admin
}
public let groupId: String
public let profileId: String
public let role: Role
// MARK: - Initialization
public init(
groupId: String,
profileId: String,
role: Role
) {
self.groupId = groupId
self.profileId = profileId
self.role = role
}
}
}

View file

@ -113,7 +113,9 @@ public struct ControlMessageProcessRecord: Codable, FetchableRecord, Persistable
self.serverExpirationTimestamp = serverExpirationTimestamp
}
public func insert(_ db: Database) throws {
// MARK: - Custom Database Interaction
public func willInsert(_ db: Database) throws {
// If this isn't a legacy entry then check if there is a single entry and, if so,
// try to create a "legacy entry" version of this record to see if a unique constraint
// conflict occurs
@ -132,8 +134,6 @@ public struct ControlMessageProcessRecord: Codable, FetchableRecord, Persistable
).insert(db)
}
}
try performInsert(db)
}
}

View file

@ -316,21 +316,22 @@ public struct Interaction: Codable, Identifiable, Equatable, FetchableRecord, Mu
// MARK: - Custom Database Interaction
public mutating func insert(_ db: Database) throws {
public mutating func willInsert(_ db: Database) throws {
// Automatically mark interactions which can't be unread as read so the unread count
// isn't impacted
self.wasRead = (self.wasRead || !self.variant.canBeUnread)
}
public func aroundInsert(_ db: Database, insert: () throws -> InsertionSuccess) throws {
let success: InsertionSuccess = try insert()
try performInsert(db)
// Since we need to do additional logic upon insert we can just set the 'id' value
// here directly instead of in the 'didInsert' method (if you look at the docs the
// 'db.lastInsertedRowID' value is the row id of the newly inserted row which the
// interaction uses as it's id)
let interactionId: Int64 = db.lastInsertedRowID
self.id = interactionId
guard let thread: SessionThread = try? SessionThread.fetchOne(db, id: threadId) else {
guard
let threadVariant: SessionThread.Variant = try? SessionThread
.filter(id: threadId)
.select(.variant)
.asRequest(of: SessionThread.Variant.self)
.fetchOne(db)
else {
SNLog("Inserted an interaction but couldn't find it's associated thead")
return
}
@ -339,10 +340,10 @@ public struct Interaction: Codable, Identifiable, Equatable, FetchableRecord, Mu
case .standardOutgoing:
// New outgoing messages should immediately determine their recipient list
// from current thread state
switch thread.variant {
switch threadVariant {
case .contact:
try RecipientState(
interactionId: interactionId,
interactionId: success.rowID,
recipientId: threadId, // Will be the contact id
state: .sending
).insert(db)
@ -350,7 +351,7 @@ public struct Interaction: Codable, Identifiable, Equatable, FetchableRecord, Mu
case .closedGroup:
let closedGroupMemberIds: Set<String> = (try? GroupMember
.select(.profileId)
.filter(GroupMember.Columns.groupId == thread.id)
.filter(GroupMember.Columns.groupId == threadId)
.asRequest(of: String.self)
.fetchSet(db))
.defaulting(to: [])
@ -367,7 +368,7 @@ public struct Interaction: Codable, Identifiable, Equatable, FetchableRecord, Mu
.filter { memberId -> Bool in memberId != userPublicKey }
.forEach { memberId in
try RecipientState(
interactionId: interactionId,
interactionId: success.rowID,
recipientId: memberId,
state: .sending
).insert(db)
@ -378,7 +379,7 @@ public struct Interaction: Codable, Identifiable, Equatable, FetchableRecord, Mu
// we need to ensure we have a state for all threads; so for open groups
// we just use the open group id as the 'recipientId' value
try RecipientState(
interactionId: interactionId,
interactionId: success.rowID,
recipientId: threadId, // Will be the open group id
state: .sending
).insert(db)
@ -387,6 +388,10 @@ public struct Interaction: Codable, Identifiable, Equatable, FetchableRecord, Mu
default: break
}
}
public mutating func didInsert(_ inserted: InsertionSuccess) {
self.id = inserted.rowID
}
}
// MARK: - Mutation

View file

@ -125,9 +125,7 @@ public struct SessionThread: Codable, Identifiable, Equatable, FetchableRecord,
// MARK: - Custom Database Interaction
public func insert(_ db: Database) throws {
try performInsert(db)
public func willInsert(_ db: Database) throws {
db[.hasSavedThread] = true
}
}
@ -350,73 +348,3 @@ public extension SessionThread {
}
}
}
// MARK: - Objective-C Support
// FIXME: Remove when possible
@objc(SMKThread)
public class SMKThread: NSObject {
@objc(deleteAll)
public static func deleteAll() {
Storage.shared.writeAsync { db in
_ = try SessionThread.deleteAll(db)
}
}
@objc(isThreadMuted:)
public static func isThreadMuted(_ threadId: String) -> Bool {
return Storage.shared.read { db in
let mutedUntilTimestamp: TimeInterval? = try SessionThread
.select(SessionThread.Columns.mutedUntilTimestamp)
.filter(id: threadId)
.asRequest(of: TimeInterval?.self)
.fetchOne(db)
return (mutedUntilTimestamp != nil)
}
.defaulting(to: false)
}
@objc(isOnlyNotifyingForMentions:)
public static func isOnlyNotifyingForMentions(_ threadId: String) -> Bool {
return Storage.shared.read { db in
return try SessionThread
.select(SessionThread.Columns.onlyNotifyForMentions)
.filter(id: threadId)
.asRequest(of: Bool.self)
.fetchOne(db)
}
.defaulting(to: false)
}
@objc(setIsOnlyNotifyingForMentions:to:)
public static func isOnlyNotifyingForMentions(_ threadId: String, isEnabled: Bool) {
Storage.shared.write { db in
try SessionThread
.filter(id: threadId)
.updateAll(db, SessionThread.Columns.onlyNotifyForMentions.set(to: isEnabled))
}
}
@objc(mutedUntilDateFor:)
public static func mutedUntilDateFor(_ threadId: String) -> Date? {
return Storage.shared.read { db in
return try SessionThread
.select(SessionThread.Columns.mutedUntilTimestamp)
.filter(id: threadId)
.asRequest(of: TimeInterval.self)
.fetchOne(db)
}
.map { Date(timeIntervalSince1970: $0) }
}
@objc(updateWithMutedUntilDateTo:forThreadId:)
public static func updateWithMutedUntilDate(to date: Date?, threadId: String) {
Storage.shared.write { db in
try SessionThread
.filter(id: threadId)
.updateAll(db, SessionThread.Columns.mutedUntilTimestamp.set(to: date?.timeIntervalSince1970))
}
}
}

View file

@ -190,7 +190,6 @@ public enum OpenGroupAPI {
_ db: Database,
server: String,
requests: [BatchRequestInfoType],
authenticated: Bool = true,
using dependencies: SMKDependencies = SMKDependencies()
) -> Promise<[Endpoint: (OnionRequestResponseInfoType, Codable?)]> {
let requestBody: BatchRequest = requests.map { $0.toSubRequest() }
@ -205,7 +204,6 @@ public enum OpenGroupAPI {
endpoint: Endpoint.sequence,
body: requestBody
),
authenticated: authenticated,
using: dependencies
)
.decoded(as: responseTypes, on: OpenGroupAPI.workQueue, using: dependencies)
@ -229,7 +227,6 @@ public enum OpenGroupAPI {
public static func capabilities(
_ db: Database,
server: String,
authenticated: Bool = true,
using dependencies: SMKDependencies = SMKDependencies()
) -> Promise<(OnionRequestResponseInfoType, Capabilities)> {
return OpenGroupAPI
@ -239,7 +236,6 @@ public enum OpenGroupAPI {
server: server,
endpoint: .capabilities
),
authenticated: authenticated,
using: dependencies
)
.decoded(as: Capabilities.self, on: OpenGroupAPI.workQueue, using: dependencies)
@ -325,7 +321,6 @@ public enum OpenGroupAPI {
_ db: Database,
for roomToken: String,
on server: String,
authenticated: Bool = true,
using dependencies: SMKDependencies = SMKDependencies()
) -> Promise<(capabilities: (info: OnionRequestResponseInfoType, data: Capabilities), room: (info: OnionRequestResponseInfoType, data: Room))> {
let requestResponseType: [BatchRequestInfoType] = [
@ -353,7 +348,6 @@ public enum OpenGroupAPI {
db,
server: server,
requests: requestResponseType,
authenticated: authenticated,
using: dependencies
)
.map { (response: [Endpoint: (OnionRequestResponseInfoType, Codable?)]) -> (capabilities: (OnionRequestResponseInfoType, Capabilities), room: (OnionRequestResponseInfoType, Room)) in
@ -391,7 +385,6 @@ public enum OpenGroupAPI {
public static func capabilitiesAndRooms(
_ db: Database,
on server: String,
authenticated: Bool = true,
using dependencies: SMKDependencies = SMKDependencies()
) -> Promise<(capabilities: (info: OnionRequestResponseInfoType, data: Capabilities), rooms: (info: OnionRequestResponseInfoType, data: [Room]))> {
let requestResponseType: [BatchRequestInfoType] = [
@ -419,7 +412,6 @@ public enum OpenGroupAPI {
db,
server: server,
requests: requestResponseType,
authenticated: authenticated,
using: dependencies
)
.map { (response: [Endpoint: (OnionRequestResponseInfoType, Codable?)]) -> (capabilities: (OnionRequestResponseInfoType, Capabilities), rooms: (OnionRequestResponseInfoType, [Room])) in
@ -1286,8 +1278,8 @@ public enum OpenGroupAPI {
.fetchSet(db))
.defaulting(to: [])
// Check if the server supports blinded keys, if so then sign using the blinded key
if capabilities.contains(.blind) {
// If we have no capabilities or if the server supports blinded keys then sign using the blinded key
if capabilities.isEmpty || capabilities.contains(.blind) {
guard let blindedKeyPair: Box.KeyPair = dependencies.sodium.blindedKeyPair(serverPublicKey: serverPublicKey, edKeyPair: userEdKeyPair, genericHash: dependencies.genericHash) else {
return nil
}
@ -1394,7 +1386,6 @@ public enum OpenGroupAPI {
private static func send<T: Encodable>(
_ db: Database,
request: Request<T, Endpoint>,
authenticated: Bool = true,
using dependencies: SMKDependencies = SMKDependencies()
) -> Promise<(OnionRequestResponseInfoType, Data?)> {
let urlRequest: URLRequest
@ -1414,11 +1405,6 @@ public enum OpenGroupAPI {
guard let publicKey: String = maybePublicKey else { return Promise(error: OpenGroupAPIError.noPublicKey) }
// If we don't want to authenticate the request then send it immediately
guard authenticated else {
return dependencies.onionApi.sendOnionRequest(urlRequest, to: request.server, with: publicKey)
}
// Attempt to sign the request with the new auth
guard let signedRequest: URLRequest = sign(db, request: urlRequest, for: request.server, with: publicKey, using: dependencies) else {
return Promise(error: OpenGroupAPIError.signingFailed)

View file

@ -252,7 +252,6 @@ public final class OpenGroupManager: NSObject {
db,
for: roomToken,
on: targetServer,
authenticated: false,
using: dependencies
)
}
@ -469,7 +468,7 @@ public final class OpenGroupManager: NSObject {
}
}
db.afterNextTransactionCommit { db in
db.afterNextTransaction { db in
// Start the poller if needed
if dependencies.cache.pollers[server.lowercased()] == nil {
dependencies.mutableCache.mutate {
@ -935,7 +934,6 @@ public final class OpenGroupManager: NSObject {
OpenGroupAPI.capabilitiesAndRooms(
db,
on: OpenGroupAPI.defaultServer,
authenticated: false,
using: dependencies
)
}

View file

@ -376,7 +376,7 @@ public enum MessageReceiver {
// Download the profile picture if needed
if updatedProfile.profilePictureUrl != profile.profilePictureUrl || updatedProfile.profileEncryptionKey != profile.profileEncryptionKey {
db.afterNextTransactionCommit { _ in
db.afterNextTransaction { _ in
ProfileManager.downloadAvatar(for: updatedProfile)
}
}

View file

@ -361,7 +361,7 @@ public final class MessageSender {
.defaulting(to: [])
// If the server doesn't support blinding then go with an unblinded id
guard capabilities.contains(.blind) else {
guard capabilities.isEmpty || capabilities.contains(.blind) else {
return SessionId(.unblinded, publicKey: userEdKeyPair.publicKey).hexString
}
guard let blindedKeyPair: Box.KeyPair = dependencies.sodium.blindedKeyPair(serverPublicKey: openGroup.publicKey, edKeyPair: userEdKeyPair, genericHash: dependencies.genericHash) else {

View file

@ -213,7 +213,6 @@ extension OpenGroupAPI {
OpenGroupAPI.capabilities(
db,
server: server,
authenticated: false,
using: dependencies
)
}

View file

@ -38,6 +38,8 @@ public extension MentionInfo {
let request: SQLRequest<MentionInfo> = {
guard let pattern: FTS5Pattern = pattern else {
let finalLimitSQL: SQL = (limitSQL ?? "")
return """
SELECT
\(Profile.self).*,
@ -61,12 +63,13 @@ public extension MentionInfo {
)
GROUP BY \(profile[.id])
ORDER BY \(interaction[.timestampMs].desc)
\(limitSQL ?? "")
\(finalLimitSQL)
"""
}
// If we do have a search patern then use FTS
let matchLiteral: SQL = SQL(stringLiteral: "\(Profile.Columns.nickname.name):\(pattern.rawPattern) OR \(Profile.Columns.name.name):\(pattern.rawPattern)")
let finalLimitSQL: SQL = (limitSQL ?? "")
return """
SELECT
@ -93,7 +96,7 @@ public extension MentionInfo {
WHERE \(profileFullTextSearch) MATCH '\(matchLiteral)'
GROUP BY \(profile[.id])
ORDER BY \(interaction[.timestampMs].desc)
\(limitSQL ?? "")
\(finalLimitSQL)
"""
}()

View file

@ -650,6 +650,7 @@ public extension MessageViewModel {
let groupMemberRoleColumnLiteral: SQL = SQL(stringLiteral: GroupMember.Columns.role.name)
let numColumnsBeforeLinkedRecords: Int = 20
let finalGroupSQL: SQL = (groupSQL ?? "")
let request: SQLRequest<ViewModel> = """
SELECT
\(thread[.id]) AS \(ViewModel.threadIdKey),
@ -736,7 +737,7 @@ public extension MessageViewModel {
\(SQL("\(groupMemberAdminTableLiteral).\(groupMemberRoleColumnLiteral) = \(GroupMember.Role.admin)"))
)
WHERE \(interaction.alias[Column.rowID]) IN \(rowIds)
\(groupSQL ?? "")
\(finalGroupSQL)
ORDER BY \(orderSQL)
"""

View file

@ -34,7 +34,7 @@ class OpenGroupAPISpec: QuickSpec {
beforeEach {
mockStorage = Storage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations(),
SNMessagingKit.migrations()

View file

@ -100,7 +100,7 @@ class OpenGroupManagerSpec: QuickSpec {
mockOGMCache = MockOGMCache()
mockGeneralCache = MockGeneralCache()
mockStorage = Storage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations(),
SNMessagingKit.migrations()
@ -500,7 +500,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -515,7 +515,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "http://testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -530,7 +530,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "https://testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -551,7 +551,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -566,7 +566,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "http://testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -581,7 +581,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "https://testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -602,7 +602,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -617,7 +617,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "http://testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -632,7 +632,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "https://testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -665,7 +665,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "http://open.getsession.org",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -697,7 +697,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "http://116.203.70.33",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -715,7 +715,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "%%%",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -732,7 +732,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -752,7 +752,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
dependencies: dependencies
)
}
@ -785,7 +785,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
isConfigMessage: false,
dependencies: dependencies
)
@ -816,7 +816,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
isConfigMessage: false,
dependencies: dependencies
)
@ -853,7 +853,9 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey
.replacingOccurrences(of: "c3", with: "00")
.replacingOccurrences(of: "b3", with: "00"),
isConfigMessage: false,
dependencies: dependencies
)
@ -905,7 +907,7 @@ class OpenGroupManagerSpec: QuickSpec {
db,
roomToken: "testRoom",
server: "testServer",
publicKey: "testKey",
publicKey: TestConstants.serverPublicKey,
isConfigMessage: false,
dependencies: dependencies
)

View file

@ -26,7 +26,7 @@ class MessageReceiverDecryptionSpec: QuickSpec {
describe("a MessageReceiver") {
beforeEach {
mockStorage = Storage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations(),
SNMessagingKit.migrations()

View file

@ -23,7 +23,7 @@ class MessageSenderEncryptionSpec: QuickSpec {
describe("a MessageSender") {
beforeEach {
mockStorage = Storage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations(),
SNMessagingKit.migrations()

View file

@ -18,7 +18,7 @@ enum _002_SetupStandardJobs: Migration {
variant: .getSnodePool,
behaviour: .recurringOnLaunch,
shouldBlock: true
).inserted(db)
).migrationSafeInserted(db)
// Note: We also want this job to run both onLaunch and onActive as we want it to block
// 'onLaunch' and 'onActive' doesn't support blocking jobs
@ -26,7 +26,7 @@ enum _002_SetupStandardJobs: Migration {
variant: .getSnodePool,
behaviour: .recurringOnActive,
shouldSkipLaunchBecomeActive: true
).inserted(db)
).migrationSafeInserted(db)
}
Storage.update(progress: 1, for: self, in: target) // In case this is the last migration

View file

@ -159,7 +159,7 @@ enum _003_YDBToGRDBMigration: Migration {
port: legacySnode.port,
ed25519PublicKey: legacySnode.publicKeySet.ed25519Key,
x25519PublicKey: legacySnode.publicKeySet.x25519Key
).insert(db)
).migrationSafeInsert(db)
}
Storage.update(progress: 0.96, for: self, in: target)
@ -173,7 +173,7 @@ enum _003_YDBToGRDBMigration: Migration {
nodeIndex: nodeIndex,
address: legacySnode.address,
port: legacySnode.port
).insert(db)
).migrationSafeInsert(db)
}
}
Storage.update(progress: 0.98, for: self, in: target)
@ -188,7 +188,7 @@ enum _003_YDBToGRDBMigration: Migration {
key: key,
hash: hash,
expirationDateMs: SnodeReceivedMessage.defaultExpirationSeconds
).inserted(db)
).migrationSafeInserted(db)
}
}
Storage.update(progress: 0.99, for: self, in: target)
@ -205,7 +205,7 @@ enum _003_YDBToGRDBMigration: Migration {
expirationDateMs :
SnodeReceivedMessage.defaultExpirationSeconds
)
).inserted(db)
).migrationSafeInserted(db)
}
}

View file

@ -23,7 +23,7 @@ class ThreadDisappearingMessagesViewModelSpec: QuickSpec {
beforeEach {
mockStorage = Storage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations(),
SNSnodeKit.migrations(),

View file

@ -25,7 +25,7 @@ class ThreadSettingsViewModelSpec: QuickSpec {
beforeEach {
mockStorage = SynchronousStorage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations(),
SNSnodeKit.migrations(),

View file

@ -21,7 +21,7 @@ class NotificationContentViewModelSpec: QuickSpec {
beforeEach {
mockStorage = Storage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations(),
SNSnodeKit.migrations(),

View file

@ -18,7 +18,7 @@ enum _002_SetupStandardJobs: Migration {
_ = try Job(
variant: .syncPushTokens,
behaviour: .recurringOnLaunch
).inserted(db)
).migrationSafeInserted(db)
// Note: We actually need this job to run both onLaunch and onActive as the logic differs
// slightly and there are cases where a user might not be registered in 'onLaunch' but is
@ -27,7 +27,7 @@ enum _002_SetupStandardJobs: Migration {
variant: .syncPushTokens,
behaviour: .recurringOnActive,
shouldSkipLaunchBecomeActive: true
).inserted(db)
).migrationSafeInserted(db)
}
Storage.update(progress: 1, for: self, in: target) // In case this is the last migration

View file

@ -91,27 +91,27 @@ enum _003_YDBToGRDBMigration: Migration {
try Identity(
variant: .seed,
data: Data(hex: seedHexString)
).insert(db)
).migrationSafeInsert(db)
try Identity(
variant: .ed25519SecretKey,
data: Data(hex: userEd25519SecretKeyHexString)
).insert(db)
).migrationSafeInsert(db)
try Identity(
variant: .ed25519PublicKey,
data: Data(hex: userEd25519PublicKeyHexString)
).insert(db)
).migrationSafeInsert(db)
try Identity(
variant: .x25519PrivateKey,
data: userX25519KeyPair.privateKey
).insert(db)
).migrationSafeInsert(db)
try Identity(
variant: .x25519PublicKey,
data: userX25519KeyPair.publicKey
).insert(db)
).migrationSafeInsert(db)
}
Storage.update(progress: 1, for: self, in: target) // In case this is the last migration

View file

@ -103,7 +103,7 @@ open class Storage {
migrations: [TargetMigrations],
async: Bool = true,
onProgressUpdate: ((CGFloat, TimeInterval) -> ())?,
onComplete: @escaping (Error?, Bool) -> ()
onComplete: @escaping (Swift.Result<Database, Error>, Bool) -> ()
) {
guard isValid, let dbWriter: DatabaseWriter = dbWriter else { return }
@ -179,27 +179,31 @@ open class Storage {
}
// Store the logic to run when the migration completes
let migrationCompleted: (Database, Error?) -> () = { [weak self] db, error in
let migrationCompleted: (Swift.Result<Database, Error>) -> () = { [weak self] result in
self?.hasCompletedMigrations = true
self?.migrationProgressUpdater = nil
SUKLegacy.clearLegacyDatabaseInstance()
if let error = error {
if case .failure(let error) = result {
SNLog("[Migration Error] Migration failed with error: \(error)")
}
onComplete(error, needsConfigSync)
onComplete(result, needsConfigSync)
}
// Note: The non-async migration should only be used for unit tests
guard async else {
do { try self.migrator?.migrate(dbWriter) }
catch { try? dbWriter.read { db in migrationCompleted(db, error) } }
catch {
try? dbWriter.read { db in
migrationCompleted(Swift.Result<Database, Error>.failure(error))
}
}
return
}
self.migrator?.asyncMigrate(dbWriter) { db, error in
migrationCompleted(db, error)
self.migrator?.asyncMigrate(dbWriter) { result in
migrationCompleted(result)
}
}
@ -434,7 +438,7 @@ public extension ValueObservation {
func publisher(
in storage: Storage,
scheduling scheduler: ValueObservationScheduler = Storage.defaultPublisherScheduler
) -> AnyPublisher<Reducer.Value, Error> {
) -> AnyPublisher<Reducer.Value, Error> where Reducer: ValueReducer {
guard storage.isValid, let dbWriter: DatabaseWriter = storage.dbWriter else {
return Fail(error: StorageError.databaseInvalid).eraseToAnyPublisher()
}

View file

@ -2,6 +2,7 @@
import Foundation
import GRDB
import DifferenceKit
// MARK: - PagedDatabaseObserver
@ -198,6 +199,16 @@ public class PagedDatabaseObserver<ObservedTable, T>: TransactionObserver where
// Update the cache, pageInfo and the change callback
self?.dataCache.mutate { $0 = finalUpdatedDataCache }
self?.pageInfo.mutate { $0 = updatedPageInfo }
// Make sure the updates run on the main thread
guard Thread.isMainThread else {
DispatchQueue.main.async { [weak self] in
self?.onChangeUnsorted(finalUpdatedDataCache.values, updatedPageInfo)
}
return
}
self?.onChangeUnsorted(finalUpdatedDataCache.values, updatedPageInfo)
}
@ -673,7 +684,12 @@ public class PagedDatabaseObserver<ObservedTable, T>: TransactionObserver where
let updatedLimitInfo: PagedData.PageInfo = PagedData.PageInfo(
pageSize: currentPageInfo.pageSize,
pageOffset: queryInfo.updatedCacheOffset,
currentCount: (currentPageInfo.currentCount + newData.count),
currentCount: {
switch target {
case .reloadCurrent: return currentPageInfo.currentCount
default: return (currentPageInfo.currentCount + newData.count)
}
}(),
totalCount: totalCount
)
@ -726,6 +742,12 @@ public class PagedDatabaseObserver<ObservedTable, T>: TransactionObserver where
self?.isLoadingMoreData.mutate { $0 = false }
}
// Make sure the updates run on the main thread
guard Thread.isMainThread else {
DispatchQueue.main.async { triggerUpdates() }
return
}
triggerUpdates()
}
@ -996,6 +1018,56 @@ public enum PagedData {
let rowIndex: Int64
}
// MARK: - Convenience Functions
// FIXME: Would be good to clean this up further in the future (should be able to do more processing on BG threads)
public static func processAndTriggerUpdates<SectionModel: DifferentiableSection>(
updatedData: [SectionModel]?,
currentDataRetriever: @escaping (() -> [SectionModel]?),
onDataChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ())?,
onUnobservedDataChange: @escaping (([SectionModel], StagedChangeset<[SectionModel]>) -> Void)
) {
guard let updatedData: [SectionModel] = updatedData else { return }
// Note: While it would be nice to generate the changeset on a background thread it introduces
// a multi-threading issue where a data change can come in while the table is processing multiple
// updates resulting in the data being in a partially updated state (which makes the subsequent
// table reload crash due to inconsistent state)
let performUpdates = {
guard let currentData: [SectionModel] = currentDataRetriever() else { return }
let changeset: StagedChangeset<[SectionModel]> = StagedChangeset(
source: currentData,
target: updatedData
)
// No need to do anything if there were no changes
guard !changeset.isEmpty else { return }
// If we have the callback then trigger it, otherwise just store the changes to be sent
// to the callback if we ever start observing again (when we have the callback it needs
// to do the data updating as it's tied to UI updates and can cause crashes if not updated
// in the correct order)
guard let onDataChange: (([SectionModel], StagedChangeset<[SectionModel]>) -> ()) = onDataChange else {
onUnobservedDataChange(updatedData, changeset)
return
}
onDataChange(updatedData, changeset)
}
// No need to dispatch to the next run loop if we are alread on the main thread
guard !Thread.isMainThread else {
performUpdates()
return
}
// Run any changes on the main thread (as they will generally trigger UI updates)
DispatchQueue.main.async {
performUpdates()
}
}
// MARK: - Internal Functions
fileprivate static func totalCount(
@ -1005,10 +1077,11 @@ public enum PagedData {
filterSQL: SQL
) -> Int {
let tableNameLiteral: SQL = SQL(stringLiteral: tableName)
let finalJoinSQL: SQL = (requiredJoinSQL ?? "")
let request: SQLRequest<Int> = """
SELECT \(tableNameLiteral).rowId
FROM \(tableNameLiteral)
\(requiredJoinSQL ?? "")
\(finalJoinSQL)
WHERE \(filterSQL)
"""
@ -1027,12 +1100,14 @@ public enum PagedData {
offset: Int
) -> [Int64] {
let tableNameLiteral: SQL = SQL(stringLiteral: tableName)
let finalJoinSQL: SQL = (requiredJoinSQL ?? "")
let finalGroupSQL: SQL = (groupSQL ?? "")
let request: SQLRequest<Int64> = """
SELECT \(tableNameLiteral).rowId
FROM \(tableNameLiteral)
\(requiredJoinSQL ?? "")
\(finalJoinSQL)
WHERE \(filterSQL)
\(groupSQL ?? "")
\(finalGroupSQL)
ORDER BY \(orderSQL)
LIMIT \(limit) OFFSET \(offset)
"""
@ -1052,6 +1127,7 @@ public enum PagedData {
) -> Int? {
let tableNameLiteral: SQL = SQL(stringLiteral: tableName)
let idColumnLiteral: SQL = SQL(stringLiteral: idColumn)
let finalJoinSQL: SQL = (requiredJoinSQL ?? "")
let request: SQLRequest<Int> = """
SELECT
(data.rowIndex - 1) AS rowIndex -- Converting from 1-Indexed to 0-indexed
@ -1060,7 +1136,7 @@ public enum PagedData {
\(tableNameLiteral).\(idColumnLiteral) AS \(idColumnLiteral),
ROW_NUMBER() OVER (ORDER BY \(orderSQL)) AS rowIndex
FROM \(tableNameLiteral)
\(requiredJoinSQL ?? "")
\(finalJoinSQL)
WHERE \(filterSQL)
) AS data
WHERE \(SQL("data.\(idColumnLiteral) = \(id)"))
@ -1083,6 +1159,7 @@ public enum PagedData {
guard !rowIds.isEmpty else { return [] }
let tableNameLiteral: SQL = SQL(stringLiteral: tableName)
let finalJoinSQL: SQL = (requiredJoinSQL ?? "")
let request: SQLRequest<RowIndexInfo> = """
SELECT
data.rowId AS rowId,
@ -1092,7 +1169,7 @@ public enum PagedData {
\(tableNameLiteral).rowid AS rowid,
ROW_NUMBER() OVER (ORDER BY \(orderSQL)) AS rowIndex
FROM \(tableNameLiteral)
\(requiredJoinSQL ?? "")
\(finalJoinSQL)
WHERE \(filterSQL)
) AS data
WHERE \(SQL("data.rowid IN \(rowIds)"))

View file

@ -20,6 +20,7 @@ public struct TargetMigrations: Comparable {
case snodeKit
case messagingKit
case uiKit
case test
public static func < (lhs: Self, rhs: Self) -> Bool {
let lhsIndex: Int = (Identifier.allCases.firstIndex(of: lhs) ?? Identifier.allCases.count)

View file

@ -0,0 +1,277 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import GRDB
// MARK: - Migration Safe Functions
public extension MutablePersistableRecord where Self: TableRecord & EncodableRecord & Codable {
func migrationSafeInsert(
_ db: Database,
onConflict conflictResolution: Database.ConflictResolution? = nil
) throws {
var record = try MigrationSafeMutableRecord(db, originalRecord: self)
try record.insert(db, onConflict: conflictResolution)
}
func migrationSafeInserted(
_ db: Database,
onConflict conflictResolution: Database.ConflictResolution? = nil
) throws -> Self {
let record = try MigrationSafeMutableRecord(db, originalRecord: self)
let updatedRecord = try record.inserted(db, onConflict: conflictResolution)
return updatedRecord.originalRecord
}
func migrationSafeSave(
_ db: Database,
onConflict conflictResolution: Database.ConflictResolution? = nil
) throws {
var record = try MigrationSafeMutableRecord(db, originalRecord: self)
try record.save(db, onConflict: conflictResolution)
}
func migrationSafeSaved(
_ db: Database,
onConflict conflictResolution: Database.ConflictResolution? = nil
) throws -> Self {
let record = try MigrationSafeMutableRecord(db, originalRecord: self)
let updatedRecord = try record.saved(db, onConflict: conflictResolution)
return updatedRecord.originalRecord
}
func migrationSafeUpsert(_ db: Database) throws {
var record = try MigrationSafeMutableRecord(db, originalRecord: self)
try record.upsert(db)
}
}
// MARK: - MigrationSafeMutableRecord
private class MigrationSafeRecord<T: PersistableRecord & Encodable>: MigrationSafeMutableRecord<T> {}
private class MigrationSafeMutableRecord<T: MutablePersistableRecord & Encodable>: MutablePersistableRecord & Encodable {
public static var databaseTableName: String { T.databaseTableName }
fileprivate var originalRecord: T
private let availableColumnNames: [String]
init(_ db: Database, originalRecord: T) throws {
// Check the current columns in the database and filter out any properties on the object which
// don't exist in the dictionary
self.originalRecord = originalRecord
self.availableColumnNames = try db.columns(in: Self.databaseTableName).map(\.name)
}
func encode(to encoder: Encoder) throws {
let filteredEncoder: FilteredEncoder = FilteredEncoder(
originalEncoder: encoder,
availableKeys: availableColumnNames
)
try originalRecord.encode(to: filteredEncoder)
}
// MARK: - Persistence Callbacks
func willInsert(_ db: Database) throws {
try originalRecord.willInsert(db)
}
func aroundInsert(_ db: Database, insert: () throws -> InsertionSuccess) throws {
try originalRecord.aroundInsert(db, insert: insert)
}
func didInsert(_ inserted: InsertionSuccess) {
originalRecord.didInsert(inserted)
}
func willUpdate(_ db: Database, columns: Set<String>) throws {
try originalRecord.willUpdate(db, columns: columns)
}
func aroundUpdate(_ db: Database, columns: Set<String>, update: () throws -> PersistenceSuccess) throws {
try originalRecord.aroundUpdate(db, columns: columns, update: update)
}
func didUpdate(_ updated: PersistenceSuccess) {
originalRecord.didUpdate(updated)
}
func willSave(_ db: Database) throws {
try originalRecord.willSave(db)
}
func aroundSave(_ db: Database, save: () throws -> PersistenceSuccess) throws {
try originalRecord.aroundSave(db, save: save)
}
func didSave(_ saved: PersistenceSuccess) {
originalRecord.didSave(saved)
}
func willDelete(_ db: Database) throws {
try originalRecord.willDelete(db)
}
func aroundDelete(_ db: Database, delete: () throws -> Bool) throws {
try originalRecord.aroundDelete(db, delete: delete)
}
func didDelete(deleted: Bool) {
originalRecord.didDelete(deleted: deleted)
}
}
// MARK: - FilteredEncoder
private class FilteredEncoder: Encoder {
let originalEncoder: Encoder
let availableKeys: [String]
init(originalEncoder: Encoder, availableKeys: [String]) {
self.originalEncoder = originalEncoder
self.availableKeys = availableKeys
}
var codingPath: [CodingKey] { originalEncoder.codingPath }
var userInfo: [CodingUserInfoKey: Any] { originalEncoder.userInfo }
func container<Key>(keyedBy type: Key.Type) -> KeyedEncodingContainer<Key> where Key: CodingKey {
let container = originalEncoder.container(keyedBy: type)
let filteredContainer = FilteredKeyedEncodingContainer(
availableKeys: availableKeys,
originalContainer: container
)
return KeyedEncodingContainer(filteredContainer)
}
func unkeyedContainer() -> UnkeyedEncodingContainer { originalEncoder.unkeyedContainer() }
func singleValueContainer() -> SingleValueEncodingContainer { originalEncoder.singleValueContainer() }
}
// MARK: - FilteredKeyedEncodingContainer
private class FilteredKeyedEncodingContainer<Key: CodingKey>: KeyedEncodingContainerProtocol {
let codingPath: [CodingKey]
let availableKeys: [String]
var originalContainer: KeyedEncodingContainer<Key>
init(availableKeys: [String], originalContainer: KeyedEncodingContainer<Key>) {
self.availableKeys = availableKeys
self.codingPath = originalContainer.codingPath
self.originalContainer = originalContainer
}
func encodeNil(forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encodeNil(forKey: key)
}
func encode(_ value: Bool, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: String, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: Double, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: Float, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: Int, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: Int8, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: Int16, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: Int32, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: Int64, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: UInt, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: UInt8, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: UInt16, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: UInt32, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode(_ value: UInt64, forKey key: Key) throws {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func encode<T>(_ value: T, forKey key: Key) throws where T: Encodable {
guard availableKeys.contains(key.stringValue) else { return }
try originalContainer.encode(value, forKey: key)
}
func nestedContainer<NestedKey>(keyedBy keyType: NestedKey.Type, forKey key: Key) -> KeyedEncodingContainer<NestedKey> where NestedKey: CodingKey {
return originalContainer.nestedContainer(keyedBy: keyType, forKey: key)
}
func nestedUnkeyedContainer(forKey key: Key) -> UnkeyedEncodingContainer {
return originalContainer.nestedUnkeyedContainer(forKey: key)
}
func superEncoder() -> Encoder {
return originalContainer.superEncoder()
}
func superEncoder(forKey key: Key) -> Encoder {
return originalContainer.superEncoder(forKey: key)
}
}

View file

@ -138,7 +138,7 @@ public final class JobRunner {
guard canStartJob else { return }
// Start the job runner if needed
db.afterNextTransactionCommit { _ in
db.afterNextTransaction { _ in
queues.wrappedValue[updatedJob.variant]?.start()
}
}
@ -154,7 +154,7 @@ public final class JobRunner {
queues.wrappedValue[job.variant]?.upsert(job, canStartJob: canStartJob)
// Start the job runner if needed
db.afterNextTransactionCommit { _ in
db.afterNextTransaction { _ in
queues.wrappedValue[job.variant]?.start()
}
}
@ -177,7 +177,7 @@ public final class JobRunner {
queues.wrappedValue[updatedJob.variant]?.insert(updatedJob, before: otherJob)
// Start the job runner if needed
db.afterNextTransactionCommit { _ in
db.afterNextTransaction { _ in
queues.wrappedValue[updatedJob.variant]?.start()
}

View file

@ -17,7 +17,7 @@ class IdentitySpec: QuickSpec {
describe("an Identity") {
beforeEach {
mockStorage = Storage(
customWriter: DatabaseQueue(),
customWriter: try! DatabaseQueue(),
customMigrations: [
SNUtilitiesKit.migrations()
]

View file

@ -0,0 +1,681 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import GRDB
import Quick
import Nimble
@testable import SessionUtilitiesKit
class PersistableRecordUtilitiesSpec: QuickSpec {
static var customWriter: DatabaseQueue!
struct TestType: Codable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible {
public static var databaseTableName: String { "TestType" }
public typealias Columns = CodingKeys
public enum CodingKeys: String, CodingKey, ColumnExpression {
case columnA
case columnB
}
public let columnA: String
public let columnB: String?
}
struct MutableTestType: Codable, FetchableRecord, MutablePersistableRecord, TableRecord, ColumnExpressible {
public static var databaseTableName: String { "MutableTestType" }
public typealias Columns = CodingKeys
public enum CodingKeys: String, CodingKey, ColumnExpression {
case id
case columnA
case columnB
}
public var id: Int64?
public let columnA: String
public let columnB: String?
init(id: Int64? = nil, columnA: String, columnB: String?) {
self.id = id
self.columnA = columnA
self.columnB = columnB
}
mutating func didInsert(_ inserted: InsertionSuccess) {
self.id = inserted.rowID
}
}
enum TestInsertTestTypeMigration: Migration {
static let target: TargetMigrations.Identifier = .test
static let identifier: String = "TestInsertTestType"
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0
static func migrate(_ db: Database) throws {
try db.create(table: TestType.self) { t in
t.column(.columnA, .text).primaryKey()
}
try db.create(table: MutableTestType.self) { t in
t.column(.id, .integer).primaryKey(autoincrement: true)
t.column(.columnA, .text).unique()
}
}
}
enum TestAddColumnMigration: Migration {
static let target: TargetMigrations.Identifier = .test
static let identifier: String = "TestAddColumn"
static let needsConfigSync: Bool = false
static let minExpectedRunDuration: TimeInterval = 0
static func migrate(_ db: Database) throws {
try db.alter(table: TestType.self) { t in
t.add(.columnB, .text)
}
try db.alter(table: MutableTestType.self) { t in
t.add(.columnB, .text)
}
}
}
// MARK: - Spec
override func spec() {
var customWriter: DatabaseQueue!
var mockStorage: Storage!
describe("a PersistableRecord") {
beforeEach {
customWriter = try! DatabaseQueue()
PersistableRecordUtilitiesSpec.customWriter = customWriter
mockStorage = Storage(
customWriter: customWriter,
customMigrations: [
TargetMigrations(
identifier: .test,
migrations: (0..<100)
.map { _ in [] }
.appending([TestInsertTestTypeMigration.self])
)
]
)
}
afterEach {
customWriter = nil
mockStorage = nil
}
context("before running the add column migration") {
it("fails when using the standard insert") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test1", columnB: "Test1B").insert(db)
}
.to(throwError())
}
}
it("fails when using the standard inserted") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test2", columnB: "Test2B").inserted(db)
}
.to(throwError())
}
}
it("fails when using the standard save and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test3", columnB: "Test3B").save(db)
}
.to(throwError())
}
}
it("fails when using the standard saved and the item does not already exist") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test4", columnB: "Test4B").saved(db)
}
.to(throwError())
}
}
it("fails when using the standard upsert and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test5", columnB: "Test5B").upsert(db)
}
.to(throwError())
}
}
it("fails when using the standard mutable upsert and the item does not already exist") {
mockStorage.write { db in
expect {
var result = MutableTestType(columnA: "Test6", columnB: "Test6B")
try result.upsert(db)
return result
}
.to(throwError())
}
}
it("fails when using the standard upsert and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO TestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test19"])
)
try TestType(columnA: "Test19", columnB: "Test19B").upsert(db)
}
.to(throwError())
}
}
it("fails when using the standard mutable upsert and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test20"])
)
var result = MutableTestType(id: 1, columnA: "Test20", columnB: "Test20B")
try result.upsert(db)
return result
}
.to(throwError())
}
}
it("succeeds when using the migration safe insert") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test7", columnB: "Test7B").migrationSafeInsert(db)
}
.toNot(throwError())
}
mockStorage.read { db in
expect(try TestType.fetchAll(db))
.toNot(beNil())
}
}
it("succeeds when using the migration safe inserted") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test8", columnB: "Test8B").migrationSafeInserted(db)
}
.toNot(throwError())
expect {
try MutableTestType(columnA: "Test9", columnB: "Test9B")
.migrationSafeInserted(db)
.id
}
.toNot(beNil())
}
mockStorage.read { db in
expect(try MutableTestType.fetchAll(db))
.toNot(beNil())
}
}
it("succeeds when using the migration safe save and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test10", columnB: "Test10B").migrationSafeSave(db)
}
.toNot(throwError())
}
}
it("succeeds when using the migration safe saved and the item does not already exist") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test11", columnB: "Test11B").migrationSafeSaved(db)
}
.toNot(throwError())
expect {
try MutableTestType(columnA: "Test12", columnB: "Test12B")
.migrationSafeSaved(db)
.id
}
.toNot(beNil())
}
mockStorage.read { db in
expect(try MutableTestType.fetchAll(db))
.toNot(beNil())
}
}
it("succeeds when using the migration safe upsert and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test13", columnB: "Test13B").migrationSafeUpsert(db)
}
.toNot(throwError())
}
}
it("succeeds when using the migration safe mutable upsert and the item does not already exist") {
mockStorage.write { db in
expect {
var result = MutableTestType(columnA: "Test14", columnB: "Test14B")
try result.migrationSafeUpsert(db)
return result
}
.toNot(throwError())
}
mockStorage.read { db in
expect(try MutableTestType.fetchAll(db))
.toNot(beNil())
}
}
// Note: The built-in 'update' method only updates existing columns so this shouldn't fail
it("succeeds when using the standard save and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO TestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test16"])
)
try TestType(columnA: "Test16", columnB: "Test16B").save(db)
}
.toNot(throwError())
}
}
// Note: The built-in 'update' method only updates existing columns so this won't fail
// due to the structure discrepancy but won't update the id as that only happens on
// insert
it("succeeds when using the standard saved and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test17"])
)
_ = try MutableTestType(id: 1, columnA: "Test17", columnB: "Test17B").saved(db)
}
.toNot(throwError())
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test18"])
)
return try MutableTestType(id: 2, columnA: "Test18", columnB: "Test18B")
.saved(db)
.id
}
.toNot(beNil())
}
mockStorage.read { db in
let types: [MutableTestType]? = try MutableTestType.fetchAll(db)
expect(types).toNot(beNil())
expect(types?.compactMap { $0.id }.count).to(equal(types?.count))
}
}
}
context("after running the add column migration") {
beforeEach {
var migrator: DatabaseMigrator = DatabaseMigrator()
migrator.registerMigration(
TestAddColumnMigration.target,
migration: TestAddColumnMigration.self
)
expect { try migrator.migrate(customWriter) }
.toNot(throwError())
}
it("succeeds when using the standard insert") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test1", columnB: "Test1B").insert(db)
}
.toNot(throwError())
}
mockStorage.read { db in
expect(try TestType.fetchAll(db))
.toNot(beNil())
}
}
it("succeeds when using the standard inserted") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test2", columnB: "Test2B").inserted(db)
}
.toNot(throwError())
}
mockStorage.read { db in
expect(try MutableTestType.fetchAll(db))
.toNot(beNil())
}
}
it("succeeds when using the standard save and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test3", columnB: "Test3B").save(db)
}
.toNot(throwError())
}
}
it("succeeds when using the standard saved and the item does not already exist") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test3", columnB: "Test3B").saved(db)
}
.toNot(throwError())
}
}
it("succeeds when using the standard save and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO TestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test4"])
)
try TestType(columnA: "Test4", columnB: "Test4B").save(db)
}
.toNot(throwError())
}
}
// Note: The built-in 'update' method won't update the id as that only happens on
// insert
it("succeeds when using the standard saved and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test5"])
)
_ = try MutableTestType(id: 1, columnA: "Test5", columnB: "Test5B").saved(db)
}
.toNot(throwError())
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test6"])
)
return try MutableTestType(id: 2, columnA: "Test6", columnB: "Test6B")
.saved(db)
.id
}
.toNot(beNil())
}
mockStorage.read { db in
let types: [MutableTestType]? = try MutableTestType.fetchAll(db)
expect(types).toNot(beNil())
expect(types?.compactMap { $0.id }.count).to(equal(types?.count))
}
}
it("succeeds when using the standard upsert and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test7", columnB: "Test7B").upsert(db)
}
.toNot(throwError())
}
}
it("succeeds when using the standard mutable upsert and the item does not already exist") {
mockStorage.write { db in
expect {
var result = MutableTestType(columnA: "Test8", columnB: "Test8B")
try result.upsert(db)
return result
}
.toNot(throwError())
}
}
it("succeeds when using the standard upsert and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO TestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test9"])
)
try TestType(columnA: "Test9", columnB: "Test9B").upsert(db)
}
.toNot(throwError())
}
}
// Note: The built-in 'update' method won't update the id as that only happens on
// insert
it("succeeds when using the standard mutable upsert and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test10"])
)
var result = MutableTestType(id: 1, columnA: "Test10", columnB: "Test10B")
try result.upsert(db)
return result
}
.toNot(throwError())
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test11"])
)
var result = MutableTestType(id: 2, columnA: "Test11", columnB: "Test11B")
try result.upsert(db)
return result.id
}
.toNot(beNil())
}
mockStorage.read { db in
let types: [MutableTestType]? = try MutableTestType.fetchAll(db)
expect(types).toNot(beNil())
expect(types?.compactMap { $0.id }.count).to(equal(types?.count))
}
}
it("succeeds when using the migration safe insert") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test12", columnB: "Test12B").migrationSafeInsert(db)
}
.toNot(throwError())
}
mockStorage.read { db in
expect(try TestType.fetchAll(db))
.toNot(beNil())
}
}
it("succeeds when using the migration safe inserted") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test13", columnB: "Test13B").migrationSafeInserted(db)
}
.toNot(throwError())
expect {
try MutableTestType(columnA: "Test14", columnB: "Test14B")
.migrationSafeInserted(db)
.id
}
.toNot(beNil())
}
mockStorage.read { db in
expect(try MutableTestType.fetchAll(db))
.toNot(beNil())
}
}
it("succeeds when using the migration safe save and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test15", columnB: "Test15B").migrationSafeSave(db)
}
.toNot(throwError())
}
}
it("succeeds when using the migration safe saved and the item does not already exist") {
mockStorage.write { db in
expect {
try MutableTestType(columnA: "Test16", columnB: "Test16B").migrationSafeSaved(db)
}
.toNot(throwError())
}
}
it("succeeds when using the migration safe save and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO TestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test17"])
)
try TestType(columnA: "Test17", columnB: "Test17B").migrationSafeSave(db)
}
.toNot(throwError())
}
}
// Note: The built-in 'update' method won't update the id as that only happens on
// insert
it("succeeds when using the migration safe saved and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test18"])
)
_ = try MutableTestType(id: 1, columnA: "Test18", columnB: "Test18B")
.migrationSafeSaved(db)
}
.toNot(throwError())
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test19"])
)
return try MutableTestType(id: 2, columnA: "Test19", columnB: "Test19B")
.migrationSafeSaved(db)
.id
}
.toNot(beNil())
}
mockStorage.read { db in
let types: [MutableTestType]? = try MutableTestType.fetchAll(db)
expect(types).toNot(beNil())
expect(types?.compactMap { $0.id }.count).to(equal(types?.count))
}
}
it("succeeds when using the migration safe upsert and the item does not already exist") {
mockStorage.write { db in
expect {
try TestType(columnA: "Test20", columnB: "Test20B").migrationSafeUpsert(db)
}
.toNot(throwError())
}
}
it("succeeds when using the migration safe mutable upsert and the item does not already exist") {
mockStorage.write { db in
expect {
var result = MutableTestType(columnA: "Test21", columnB: "Test21B")
try result.migrationSafeUpsert(db)
return result
}
.toNot(throwError())
}
}
it("succeeds when using the migration safe upsert and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO TestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test22"])
)
try TestType(columnA: "Test22", columnB: "Test22B").migrationSafeUpsert(db)
}
.toNot(throwError())
}
}
// Note: The built-in 'update' method won't update the id as that only happens on
// insert
it("succeeds when using the migration safe mutable upsert and the item already exists") {
mockStorage.write { db in
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test23"])
)
var result = MutableTestType(id: 1, columnA: "Test23", columnB: "Test23B")
try result.migrationSafeUpsert(db)
return result
}
.toNot(throwError())
expect {
try db.execute(
sql: "INSERT INTO MutableTestType (columnA) VALUES (?)",
arguments: StatementArguments(["Test24"])
)
var result = MutableTestType(id: 2, columnA: "Test24", columnB: "Test24B")
try result.migrationSafeUpsert(db)
return result.id
}
.toNot(beNil())
}
mockStorage.read { db in
let types: [MutableTestType]? = try MutableTestType.fetchAll(db)
expect(types).toNot(beNil())
expect(types?.compactMap { $0.id }.count).to(equal(types?.count))
}
}
}
}
}
}

View file

@ -1,9 +1,9 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import GRDB
import SessionMessagingKit
import SessionUtilitiesKit
import UIKit
import SessionUIKit
public enum AppSetup {
@ -12,7 +12,7 @@ public enum AppSetup {
public static func setupEnvironment(
appSpecificBlock: @escaping () -> (),
migrationProgressChanged: ((CGFloat, TimeInterval) -> ())? = nil,
migrationsCompletion: @escaping (Error?, Bool) -> ()
migrationsCompletion: @escaping (Result<Database, Error>, Bool) -> ()
) {
guard !AppSetup.hasRun else { return }
@ -61,7 +61,7 @@ public enum AppSetup {
public static func runPostSetupMigrations(
backgroundTask: OWSBackgroundTask? = nil,
migrationProgressChanged: ((CGFloat, TimeInterval) -> ())? = nil,
migrationsCompletion: @escaping (Error?, Bool) -> ()
migrationsCompletion: @escaping (Result<Database, Error>, Bool) -> ()
) {
var backgroundTask: OWSBackgroundTask? = (backgroundTask ?? OWSBackgroundTask(labelStr: #function))
@ -73,9 +73,9 @@ public enum AppSetup {
SNUIKit.migrations()
],
onProgressUpdate: migrationProgressChanged,
onComplete: { error, needsConfigSync in
onComplete: { result, needsConfigSync in
DispatchQueue.main.async {
migrationsCompletion(error, needsConfigSync)
migrationsCompletion(result, needsConfigSync)
// The 'if' is only there to prevent the "variable never read" warning from showing
if backgroundTask != nil { backgroundTask = nil }