Sorted out a bunch of the config syncing logic

Updated the onboarding to attempt to retrieve the current user profile config and skip display name collection if it already exists
Updated the logic to get the snode pool and build paths immediately on launch even if the user hasn't been created yet (faster onboarding)
Removed the iOS-specific concurrent dual snode '/store' behaviour
Cleaned up the profile updating logic
Fixed an issue where the pollers could end up deadlocking the main thread if too many tried to start concurrently
This commit is contained in:
Morgan Pretty 2022-12-16 16:51:08 +11:00
parent 893967e380
commit 8f3dcbc6be
67 changed files with 2314 additions and 1146 deletions

View File

@ -502,7 +502,6 @@
FD09797527FAB64300936362 /* ProfileManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797327FAB3E200936362 /* ProfileManager.swift */; }; FD09797527FAB64300936362 /* ProfileManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797327FAB3E200936362 /* ProfileManager.swift */; };
FD09797727FAB7A600936362 /* Data+Image.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797627FAB7A600936362 /* Data+Image.swift */; }; FD09797727FAB7A600936362 /* Data+Image.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797627FAB7A600936362 /* Data+Image.swift */; };
FD09797927FAB7E800936362 /* ImageFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797827FAB7E800936362 /* ImageFormat.swift */; }; FD09797927FAB7E800936362 /* ImageFormat.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797827FAB7E800936362 /* ImageFormat.swift */; };
FD09797B27FBB25900936362 /* Updatable.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797A27FBB25900936362 /* Updatable.swift */; };
FD09797D27FBDB2000936362 /* Notification+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797C27FBDB2000936362 /* Notification+Utilities.swift */; }; FD09797D27FBDB2000936362 /* Notification+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09797C27FBDB2000936362 /* Notification+Utilities.swift */; };
FD09798127FCFEE800936362 /* SessionThread.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09798027FCFEE800936362 /* SessionThread.swift */; }; FD09798127FCFEE800936362 /* SessionThread.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09798027FCFEE800936362 /* SessionThread.swift */; };
FD09798327FD1A1500936362 /* ClosedGroup.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09798227FD1A1500936362 /* ClosedGroup.swift */; }; FD09798327FD1A1500936362 /* ClosedGroup.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD09798227FD1A1500936362 /* ClosedGroup.swift */; };
@ -591,6 +590,9 @@
FD2AAAF128ED57B500A49611 /* SynchronousStorage.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2AAAEF28ED57B500A49611 /* SynchronousStorage.swift */; }; FD2AAAF128ED57B500A49611 /* SynchronousStorage.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2AAAEF28ED57B500A49611 /* SynchronousStorage.swift */; };
FD2AAAF228ED57B500A49611 /* SynchronousStorage.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2AAAEF28ED57B500A49611 /* SynchronousStorage.swift */; }; FD2AAAF228ED57B500A49611 /* SynchronousStorage.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2AAAEF28ED57B500A49611 /* SynchronousStorage.swift */; };
FD2B4AFB29429D1000AB4848 /* ConfigContactsSpec.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2B4AFA29429D1000AB4848 /* ConfigContactsSpec.swift */; }; FD2B4AFB29429D1000AB4848 /* ConfigContactsSpec.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2B4AFA29429D1000AB4848 /* ConfigContactsSpec.swift */; };
FD2B4AFD294688D000AB4848 /* SessionUtil+Contacts.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2B4AFC294688D000AB4848 /* SessionUtil+Contacts.swift */; };
FD2B4AFF2946C93200AB4848 /* ConfigurationSyncJob.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2B4AFE2946C93200AB4848 /* ConfigurationSyncJob.swift */; };
FD2B4B042949887A00AB4848 /* QueryInterfaceRequest+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD2B4B032949887A00AB4848 /* QueryInterfaceRequest+Utilities.swift */; };
FD37E9C328A1C6F3003AE748 /* ThemeManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD37E9C228A1C6F3003AE748 /* ThemeManager.swift */; }; FD37E9C328A1C6F3003AE748 /* ThemeManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD37E9C228A1C6F3003AE748 /* ThemeManager.swift */; };
FD37E9C628A1D4EC003AE748 /* Theme+ClassicDark.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD37E9C528A1D4EC003AE748 /* Theme+ClassicDark.swift */; }; FD37E9C628A1D4EC003AE748 /* Theme+ClassicDark.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD37E9C528A1D4EC003AE748 /* Theme+ClassicDark.swift */; };
FD37E9C828A1D73F003AE748 /* Theme+Colors.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD37E9C728A1D73F003AE748 /* Theme+Colors.swift */; }; FD37E9C828A1D73F003AE748 /* Theme+Colors.swift in Sources */ = {isa = PBXBuildFile; fileRef = FD37E9C728A1D73F003AE748 /* Theme+Colors.swift */; };
@ -1656,7 +1658,6 @@
FD09797327FAB3E200936362 /* ProfileManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileManager.swift; sourceTree = "<group>"; }; FD09797327FAB3E200936362 /* ProfileManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileManager.swift; sourceTree = "<group>"; };
FD09797627FAB7A600936362 /* Data+Image.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Data+Image.swift"; sourceTree = "<group>"; }; FD09797627FAB7A600936362 /* Data+Image.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Data+Image.swift"; sourceTree = "<group>"; };
FD09797827FAB7E800936362 /* ImageFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageFormat.swift; sourceTree = "<group>"; }; FD09797827FAB7E800936362 /* ImageFormat.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImageFormat.swift; sourceTree = "<group>"; };
FD09797A27FBB25900936362 /* Updatable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Updatable.swift; sourceTree = "<group>"; };
FD09797C27FBDB2000936362 /* Notification+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Notification+Utilities.swift"; sourceTree = "<group>"; }; FD09797C27FBDB2000936362 /* Notification+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Notification+Utilities.swift"; sourceTree = "<group>"; };
FD09798027FCFEE800936362 /* SessionThread.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SessionThread.swift; sourceTree = "<group>"; }; FD09798027FCFEE800936362 /* SessionThread.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SessionThread.swift; sourceTree = "<group>"; };
FD09798227FD1A1500936362 /* ClosedGroup.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ClosedGroup.swift; sourceTree = "<group>"; }; FD09798227FD1A1500936362 /* ClosedGroup.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ClosedGroup.swift; sourceTree = "<group>"; };
@ -1707,6 +1708,9 @@
FD28A4F527EAD44C00FF65E7 /* Storage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Storage.swift; sourceTree = "<group>"; }; FD28A4F527EAD44C00FF65E7 /* Storage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Storage.swift; sourceTree = "<group>"; };
FD2AAAEF28ED57B500A49611 /* SynchronousStorage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SynchronousStorage.swift; sourceTree = "<group>"; }; FD2AAAEF28ED57B500A49611 /* SynchronousStorage.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SynchronousStorage.swift; sourceTree = "<group>"; };
FD2B4AFA29429D1000AB4848 /* ConfigContactsSpec.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ConfigContactsSpec.swift; sourceTree = "<group>"; }; FD2B4AFA29429D1000AB4848 /* ConfigContactsSpec.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ConfigContactsSpec.swift; sourceTree = "<group>"; };
FD2B4AFC294688D000AB4848 /* SessionUtil+Contacts.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "SessionUtil+Contacts.swift"; sourceTree = "<group>"; };
FD2B4AFE2946C93200AB4848 /* ConfigurationSyncJob.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConfigurationSyncJob.swift; sourceTree = "<group>"; };
FD2B4B032949887A00AB4848 /* QueryInterfaceRequest+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "QueryInterfaceRequest+Utilities.swift"; sourceTree = "<group>"; };
FD37E9C228A1C6F3003AE748 /* ThemeManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThemeManager.swift; sourceTree = "<group>"; }; FD37E9C228A1C6F3003AE748 /* ThemeManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThemeManager.swift; sourceTree = "<group>"; };
FD37E9C528A1D4EC003AE748 /* Theme+ClassicDark.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Theme+ClassicDark.swift"; sourceTree = "<group>"; }; FD37E9C528A1D4EC003AE748 /* Theme+ClassicDark.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Theme+ClassicDark.swift"; sourceTree = "<group>"; };
FD37E9C728A1D73F003AE748 /* Theme+Colors.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Theme+Colors.swift"; sourceTree = "<group>"; }; FD37E9C728A1D73F003AE748 /* Theme+Colors.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Theme+Colors.swift"; sourceTree = "<group>"; };
@ -2550,7 +2554,6 @@
C33FDB22255A580900E217F9 /* OWSMediaUtils.swift */, C33FDB22255A580900E217F9 /* OWSMediaUtils.swift */,
C33FDB1C255A580900E217F9 /* UIImage+OWS.h */, C33FDB1C255A580900E217F9 /* UIImage+OWS.h */,
C33FDB81255A581100E217F9 /* UIImage+OWS.m */, C33FDB81255A581100E217F9 /* UIImage+OWS.m */,
FD09797A27FBB25900936362 /* Updatable.swift */,
); );
path = Media; path = Media;
sourceTree = "<group>"; sourceTree = "<group>";
@ -3719,6 +3722,14 @@
path = Utilities; path = Utilities;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
FD2B4B022949886900AB4848 /* Database */ = {
isa = PBXGroup;
children = (
FD2B4B032949887A00AB4848 /* QueryInterfaceRequest+Utilities.swift */,
);
path = Database;
sourceTree = "<group>";
};
FD37E9C428A1C701003AE748 /* Themes */ = { FD37E9C428A1C701003AE748 /* Themes */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
@ -4005,6 +4016,7 @@
FD8ECF7529340F4800C0D1BB /* LibSessionUtil */ = { FD8ECF7529340F4800C0D1BB /* LibSessionUtil */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
FD2B4B022949886900AB4848 /* Database */,
FD8ECF8E29381FB200C0D1BB /* Config Handling */, FD8ECF8E29381FB200C0D1BB /* Config Handling */,
FD8ECF7829340F7100C0D1BB /* libsession-util.xcframework */, FD8ECF7829340F7100C0D1BB /* libsession-util.xcframework */,
FD8ECF882935AB7200C0D1BB /* SessionUtilError.swift */, FD8ECF882935AB7200C0D1BB /* SessionUtilError.swift */,
@ -4026,6 +4038,7 @@
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
FD8ECF8F29381FC200C0D1BB /* SessionUtil+UserProfile.swift */, FD8ECF8F29381FC200C0D1BB /* SessionUtil+UserProfile.swift */,
FD2B4AFC294688D000AB4848 /* SessionUtil+Contacts.swift */,
); );
path = "Config Handling"; path = "Config Handling";
sourceTree = "<group>"; sourceTree = "<group>";
@ -4199,6 +4212,7 @@
FDF0B74E28079E5E004C14C5 /* SendReadReceiptsJob.swift */, FDF0B74E28079E5E004C14C5 /* SendReadReceiptsJob.swift */,
C352A348255781F400338F3E /* AttachmentDownloadJob.swift */, C352A348255781F400338F3E /* AttachmentDownloadJob.swift */,
C352A35A2557824E00338F3E /* AttachmentUploadJob.swift */, C352A35A2557824E00338F3E /* AttachmentUploadJob.swift */,
FD2B4AFE2946C93200AB4848 /* ConfigurationSyncJob.swift */,
); );
path = Types; path = Types;
sourceTree = "<group>"; sourceTree = "<group>";
@ -5468,7 +5482,6 @@
FD17D7EA27F6A1C600122BE0 /* SUKLegacy.swift in Sources */, FD17D7EA27F6A1C600122BE0 /* SUKLegacy.swift in Sources */,
FDA8EB10280F8238002B68E5 /* Codable+Utilities.swift in Sources */, FDA8EB10280F8238002B68E5 /* Codable+Utilities.swift in Sources */,
C352A36D2557858E00338F3E /* NSTimer+Proxying.m in Sources */, C352A36D2557858E00338F3E /* NSTimer+Proxying.m in Sources */,
FD09797B27FBB25900936362 /* Updatable.swift in Sources */,
7B7CB192271508AD0079FF93 /* CallRingTonePlayer.swift in Sources */, 7B7CB192271508AD0079FF93 /* CallRingTonePlayer.swift in Sources */,
C3C2ABD22553C6C900C340D1 /* Data+SecureRandom.swift in Sources */, C3C2ABD22553C6C900C340D1 /* Data+SecureRandom.swift in Sources */,
FD848B8B283DC509000E298B /* PagedDatabaseObserver.swift in Sources */, FD848B8B283DC509000E298B /* PagedDatabaseObserver.swift in Sources */,
@ -5623,7 +5636,9 @@
FDC4386527B4DE7600C60D73 /* RoomPollInfo.swift in Sources */, FDC4386527B4DE7600C60D73 /* RoomPollInfo.swift in Sources */,
FD245C6B2850667400B966DD /* VisibleMessage+Profile.swift in Sources */, FD245C6B2850667400B966DD /* VisibleMessage+Profile.swift in Sources */,
FD37EA0F28AB3330003AE748 /* _006_FixHiddenModAdminSupport.swift in Sources */, FD37EA0F28AB3330003AE748 /* _006_FixHiddenModAdminSupport.swift in Sources */,
FD2B4AFD294688D000AB4848 /* SessionUtil+Contacts.swift in Sources */,
7B81682328A4C1210069F315 /* UpdateTypes.swift in Sources */, 7B81682328A4C1210069F315 /* UpdateTypes.swift in Sources */,
FD2B4AFF2946C93200AB4848 /* ConfigurationSyncJob.swift in Sources */,
FDC438A627BB113A00C60D73 /* UserUnbanRequest.swift in Sources */, FDC438A627BB113A00C60D73 /* UserUnbanRequest.swift in Sources */,
FD5C72FB284F0EA10029977D /* MessageReceiver+DataExtractionNotification.swift in Sources */, FD5C72FB284F0EA10029977D /* MessageReceiver+DataExtractionNotification.swift in Sources */,
FDC4386727B4E10E00C60D73 /* Capabilities.swift in Sources */, FDC4386727B4E10E00C60D73 /* Capabilities.swift in Sources */,
@ -5728,6 +5743,7 @@
C32C5DBF256DD743003C73A2 /* ClosedGroupPoller.swift in Sources */, C32C5DBF256DD743003C73A2 /* ClosedGroupPoller.swift in Sources */,
C352A35B2557824E00338F3E /* AttachmentUploadJob.swift in Sources */, C352A35B2557824E00338F3E /* AttachmentUploadJob.swift in Sources */,
FD5C7305284F0FF30029977D /* MessageReceiver+VisibleMessages.swift in Sources */, FD5C7305284F0FF30029977D /* MessageReceiver+VisibleMessages.swift in Sources */,
FD2B4B042949887A00AB4848 /* QueryInterfaceRequest+Utilities.swift in Sources */,
FD09797027FA6FF300936362 /* Profile.swift in Sources */, FD09797027FA6FF300936362 /* Profile.swift in Sources */,
FD245C56285065EA00B966DD /* SNProto.swift in Sources */, FD245C56285065EA00B966DD /* SNProto.swift in Sources */,
FD09798B27FD1CFE00936362 /* Capability.swift in Sources */, FD09798B27FD1CFE00936362 /* Capability.swift in Sources */,

View File

@ -1837,7 +1837,7 @@ extension ConversationVC:
message: unsendRequest, message: unsendRequest,
threadId: threadId, threadId: threadId,
interactionId: nil, interactionId: nil,
to: .contact(publicKey: userPublicKey, namespace: .default) to: .contact(publicKey: userPublicKey)
) )
} }
return return
@ -1856,7 +1856,7 @@ extension ConversationVC:
message: unsendRequest, message: unsendRequest,
threadId: threadId, threadId: threadId,
interactionId: nil, interactionId: nil,
to: .contact(publicKey: userPublicKey, namespace: .default) to: .contact(publicKey: userPublicKey)
) )
} }
self?.showInputAccessoryView() self?.showInputAccessoryView()
@ -2303,8 +2303,8 @@ extension ConversationVC {
return return
} }
Storage.shared.writeAsync( Storage.shared
updates: { db in .writePublisher { db in
// If we aren't creating a new thread (ie. sending a message request) then send a // If we aren't creating a new thread (ie. sending a message request) then send a
// messageRequestResponse back to the sender (this allows the sender to know that // messageRequestResponse back to the sender (this allows the sender to know that
// they have been approved and can now use this contact in closed groups) // they have been approved and can now use this contact in closed groups)
@ -2321,21 +2321,22 @@ extension ConversationVC {
} }
// Default 'didApproveMe' to true for the person approving the message request // Default 'didApproveMe' to true for the person approving the message request
try approvalData.contact try approvalData.contact.save(db)
.with( try Contact
isApproved: true, .filter(id: approvalData.contact.id)
didApproveMe: .update(approvalData.contact.didApproveMe || !isNewThread) .updateAllAndConfig(
db,
Contact.Columns.isApproved.set(to: true),
Contact.Columns.didApproveMe
.set(to: approvalData.contact.didApproveMe || !isNewThread)
) )
.save(db) }
.sinkUntilComplete(
receiveCompletion: { _ in
// Update the config with the approved contact // Update the UI
try MessageSender updateNavigationBackStack()
.syncConfiguration(db, forceSyncNow: true) }
.sinkUntilComplete() )
},
completion: { _, _ in updateNavigationBackStack() }
)
} }
@objc func acceptMessageRequest() { @objc func acceptMessageRequest() {

View File

@ -481,11 +481,7 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
Storage.shared.writeAsync { db in Storage.shared.writeAsync { db in
try Contact try Contact
.filter(id: threadId) .filter(id: threadId)
.updateAll(db, Contact.Columns.isBlocked.set(to: false)) .updateAllAndConfig(db, Contact.Columns.isBlocked.set(to: false))
try MessageSender
.syncConfiguration(db, forceSyncNow: true)
.sinkUntilComplete()
} }
} }

View File

@ -152,7 +152,7 @@ class ThreadSettingsViewModel: SessionTableViewModel<ThreadSettingsViewModel.Nav
dependencies.storage.writeAsync { db in dependencies.storage.writeAsync { db in
try Profile try Profile
.filter(id: threadId) .filter(id: threadId)
.updateAll( .updateAllAndConfig(
db, db,
Profile.Columns.nickname Profile.Columns.nickname
.set(to: (updatedNickname.isEmpty ? nil : editedDisplayName)) .set(to: (updatedNickname.isEmpty ? nil : editedDisplayName))
@ -749,15 +749,13 @@ class ThreadSettingsViewModel: SessionTableViewModel<ThreadSettingsViewModel.Nav
dependencies.storage.writeAsync( dependencies.storage.writeAsync(
updates: { db in updates: { db in
try Contact try Contact
.fetchOrCreate(db, id: threadId) .filter(id: threadId)
.with(isBlocked: .updateTo(isBlocked)) .updateAllAndConfig(
.save(db) db,
Contact.Columns.isBlocked.set(to: isBlocked)
)
}, },
completion: { [weak self] db, _ in completion: { [weak self] db, _ in
try MessageSender
.syncConfiguration(db, forceSyncNow: true)
.sinkUntilComplete()
DispatchQueue.main.async { DispatchQueue.main.async {
let modal: ConfirmationModal = ConfirmationModal( let modal: ConfirmationModal = ConfirmationModal(
info: ConfirmationModal.Info( info: ConfirmationModal.Info(

View File

@ -278,9 +278,11 @@ final class HomeVC: BaseVC, UITableViewDataSource, UITableViewDelegate, SeedRemi
if Identity.userExists(), let appDelegate: AppDelegate = UIApplication.shared.delegate as? AppDelegate { if Identity.userExists(), let appDelegate: AppDelegate = UIApplication.shared.delegate as? AppDelegate {
appDelegate.startPollersIfNeeded() appDelegate.startPollersIfNeeded()
// Do this only if we created a new Session ID, or if we already received the initial configuration message if !Features.useSharedUtilForUserConfig {
if UserDefaults.standard[.hasSyncedInitialConfiguration] { // Do this only if we created a new Session ID, or if we already received the initial configuration message
appDelegate.syncConfigurationIfNeeded() if UserDefaults.standard[.hasSyncedInitialConfiguration] {
appDelegate.syncConfigurationIfNeeded()
}
} }
} }
@ -709,22 +711,21 @@ final class HomeVC: BaseVC, UITableViewDataSource, UITableViewDelegate, SeedRemi
// Delay the change to give the cell "unswipe" animation some time to complete // Delay the change to give the cell "unswipe" animation some time to complete
DispatchQueue.global(qos: .default).asyncAfter(deadline: .now() + unswipeAnimationDelay) { DispatchQueue.global(qos: .default).asyncAfter(deadline: .now() + unswipeAnimationDelay) {
Storage.shared.writeAsync { db in Storage.shared
try Contact .writePublisher { db in
.filter(id: threadViewModel.threadId) try Contact
.updateAll( .filter(id: threadViewModel.threadId)
db, .updateAllAndConfig(
Contact.Columns.isBlocked.set( db,
to: (threadViewModel.threadIsBlocked == false ? Contact.Columns.isBlocked.set(
true: to: (threadViewModel.threadIsBlocked == false ?
false true:
false
)
) )
) )
) }
.sinkUntilComplete()
try MessageSender.syncConfiguration(db, forceSyncNow: true)
.sinkUntilComplete()
}
} }
} }
block.themeBackgroundColor = .conversationButton_swipeSecondary block.themeBackgroundColor = .conversationButton_swipeSecondary

View File

@ -195,10 +195,8 @@ public class MessageRequestsViewModel {
removeGroupData: true removeGroupData: true
) )
// Force a config sync // Trigger a config sync
try MessageSender ConfigurationSyncJob.enqueue(db)
.syncConfiguration(db, forceSyncNow: true)
.sinkUntilComplete()
} }
} }
@ -227,28 +225,26 @@ public class MessageRequestsViewModel {
Storage.shared.writeAsync( Storage.shared.writeAsync(
updates: { db in updates: { db in
// Update the contact // Update the contact
_ = try Contact try Contact
.fetchOrCreate(db, id: threadId) .fetchOrCreate(db, id: threadId)
.with( .save(db)
isApproved: false, try Contact
isBlocked: true, .filter(id: threadId)
.updateAllAndConfig(
db,
Contact.Columns.isApproved.set(to: false),
Contact.Columns.isBlocked.set(to: true),
// Note: We set this to true so the current user will be able to send a // Note: We set this to true so the current user will be able to send a
// message to the person who originally sent them the message request in // message to the person who originally sent them the message request in
// the future if they unblock them // the future if they unblock them
didApproveMe: true Contact.Columns.didApproveMe.set(to: true)
) )
.saved(db)
// Remove the thread // Remove the thread
_ = try SessionThread _ = try SessionThread
.filter(id: threadId) .filter(id: threadId)
.deleteAll(db) .deleteAll(db)
// Force a config sync
try MessageSender
.syncConfiguration(db, forceSyncNow: true)
.sinkUntilComplete()
}, },
completion: { _, _ in completion?() } completion: { _, _ in completion?() }
) )

View File

@ -652,17 +652,20 @@ class AppDelegate: UIResponder, UIApplicationDelegate, UNUserNotificationCenterD
// MARK: - Config Sync // MARK: - Config Sync
func syncConfigurationIfNeeded() { func syncConfigurationIfNeeded() {
// FIXME: Remove this once `useSharedUtilForUserConfig` is permanent
guard !Features.useSharedUtilForUserConfig else { return }
let lastSync: Date = (UserDefaults.standard[.lastConfigurationSync] ?? .distantPast) let lastSync: Date = (UserDefaults.standard[.lastConfigurationSync] ?? .distantPast)
guard Date().timeIntervalSince(lastSync) > (7 * 24 * 60 * 60) else { return } // Sync every 2 days guard Date().timeIntervalSince(lastSync) > (7 * 24 * 60 * 60) else { return } // Sync every 2 days
Storage.shared Storage.shared
.writePublisherFlatMap { db in try MessageSender.syncConfiguration(db, forceSyncNow: false) } .writeAsync(
.sinkUntilComplete( updates: { db in ConfigurationSyncJob.enqueue(db) },
receiveCompletion: { result in completion: { _, result in
switch result { switch result {
case .failure: break case .failure: break
case .finished: case .success:
// Only update the 'lastConfigurationSync' timestamp if we have done the // Only update the 'lastConfigurationSync' timestamp if we have done the
// first sync (Don't want a new device config sync to override config // first sync (Don't want a new device config sync to override config
// syncs from other devices) // syncs from other devices)

View File

@ -6,11 +6,25 @@ import SessionMessagingKit
import SignalUtilitiesKit import SignalUtilitiesKit
final class DisplayNameVC: BaseVC { final class DisplayNameVC: BaseVC {
private let flow: Onboarding.Flow
private var spacer1HeightConstraint: NSLayoutConstraint! private var spacer1HeightConstraint: NSLayoutConstraint!
private var spacer2HeightConstraint: NSLayoutConstraint! private var spacer2HeightConstraint: NSLayoutConstraint!
private var registerButtonBottomOffsetConstraint: NSLayoutConstraint! private var registerButtonBottomOffsetConstraint: NSLayoutConstraint!
private var bottomConstraint: NSLayoutConstraint! private var bottomConstraint: NSLayoutConstraint!
// MARK: - Initialization
init(flow: Onboarding.Flow) {
self.flow = flow
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// MARK: - Components // MARK: - Components
private lazy var displayNameTextField: TextField = { private lazy var displayNameTextField: TextField = {
@ -176,11 +190,22 @@ final class DisplayNameVC: BaseVC {
// Try to save the user name but ignore the result // Try to save the user name but ignore the result
ProfileManager.updateLocal( ProfileManager.updateLocal(
queue: DispatchQueue.global(qos: .default), queue: DispatchQueue.global(qos: .default),
profileName: displayName, profileName: displayName
image: nil,
imageFilePath: nil
) )
let pnModeVC = PNModeVC()
// If we are not in the registration flow then we are finished and should go straight
// to the home screen
guard self.flow == .register else {
self.flow.completeRegistration()
// Go to the home screen
let homeVC: HomeVC = HomeVC()
self.navigationController?.setViewControllers([ homeVC ], animated: true)
return
}
// Need to get the PN mode if registering
let pnModeVC = PNModeVC(flow: .register)
navigationController?.pushViewController(pnModeVC, animated: true) navigationController?.pushViewController(pnModeVC, animated: true)
} }
} }

View File

@ -91,10 +91,6 @@ final class LinkDeviceVC: BaseVC, UIPageViewControllerDataSource, UIPageViewCont
tabBarTopConstraint.constant = navigationController!.navigationBar.height() tabBarTopConstraint.constant = navigationController!.navigationBar.height()
} }
deinit {
NotificationCenter.default.removeObserver(self)
}
// MARK: - General // MARK: - General
func pageViewController(_ pageViewController: UIPageViewController, viewControllerBefore viewController: UIViewController) -> UIViewController? { func pageViewController(_ pageViewController: UIPageViewController, viewControllerBefore viewController: UIViewController) -> UIViewController? {
@ -154,32 +150,17 @@ final class LinkDeviceVC: BaseVC, UIPageViewControllerDataSource, UIPageViewCont
return return
} }
let (ed25519KeyPair, x25519KeyPair) = try! Identity.generate(from: seed) let (ed25519KeyPair, x25519KeyPair) = try! Identity.generate(from: seed)
Onboarding.Flow.link.preregister(with: seed, ed25519KeyPair: ed25519KeyPair, x25519KeyPair: x25519KeyPair)
Identity.didRegister() Onboarding.Flow.link
.preregister(
with: seed,
ed25519KeyPair: ed25519KeyPair,
x25519KeyPair: x25519KeyPair
)
// Now that we have registered get the Snode pool // Otherwise continue on to request push notifications permissions
GetSnodePoolJob.run() let pnModeVC: PNModeVC = PNModeVC(flow: .link)
self.navigationController?.pushViewController(pnModeVC, animated: true)
NotificationCenter.default.addObserver(self, selector: #selector(handleInitialConfigurationMessageReceived), name: .initialConfigurationMessageReceived, object: nil)
ModalActivityIndicatorViewController
.present(
// There was some crashing here due to force-unwrapping so just falling back to
// using self if there is no nav controller
fromViewController: (self.navigationController ?? self)
) { [weak self] modal in
self?.activityIndicatorModal = modal
}
}
@objc private func handleInitialConfigurationMessageReceived(_ notification: Notification) {
DispatchQueue.main.async {
self.navigationController!.dismiss(animated: true) {
let pnModeVC = PNModeVC()
self.navigationController!.setViewControllers([ pnModeVC ], animated: true)
}
}
} }
} }

View File

@ -1,58 +1,124 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved. // Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation import Foundation
import Combine
import Sodium import Sodium
import GRDB import GRDB
import SessionUtilitiesKit import SessionUtilitiesKit
import SessionMessagingKit import SessionMessagingKit
enum Onboarding { enum Onboarding {
private static let profileNameRetrievalPublisher: Atomic<AnyPublisher<String?, Error>> = {
let userPublicKey: String = getUserHexEncodedPublicKey()
return Atomic(
SnodeAPI.getSwarm(for: userPublicKey)
.subscribe(on: DispatchQueue.global(qos: .userInitiated))
.flatMap { swarm -> AnyPublisher<Void, Error> in
guard let snode = swarm.randomElement() else {
return Fail(error: SnodeAPIError.generic)
.eraseToAnyPublisher()
}
return CurrentUserPoller.poll(
namespaces: [.configUserProfile],
from: snode,
for: userPublicKey,
on: DispatchQueue.global(qos: .userInitiated),
// Note: These values mean the received messages will be
// processed immediately rather than async as part of a Job
calledFromBackgroundPoller: true,
isBackgroundPollValid: { true }
)
}
.flatMap { _ -> AnyPublisher<String?, Error> in
Storage.shared.readPublisher { db in
try Profile
.filter(id: userPublicKey)
.select(.name)
.asRequest(of: String.self)
.fetchOne(db)
}
}
.shareReplay(1)
.eraseToAnyPublisher()
)
}()
public static var profileNamePublisher: AnyPublisher<String?, Error> {
profileNameRetrievalPublisher.wrappedValue
}
enum Flow { enum Flow {
case register, recover, link case register, recover, link
func preregister(with seed: Data, ed25519KeyPair: KeyPair, x25519KeyPair: KeyPair) { func preregister(with seed: Data, ed25519KeyPair: KeyPair, x25519KeyPair: KeyPair) {
let userDefaults = UserDefaults.standard
Identity.store(seed: seed, ed25519KeyPair: ed25519KeyPair, x25519KeyPair: x25519KeyPair)
let x25519PublicKey = x25519KeyPair.hexEncodedPublicKey let x25519PublicKey = x25519KeyPair.hexEncodedPublicKey
// Create the initial shared util state (won't have been created on
// launch due to lack of ed25519 key)
SessionUtil.loadState(
userPublicKey: x25519PublicKey,
ed25519SecretKey: ed25519KeyPair.secretKey
)
// Store the user identity information
Storage.shared.write { db in Storage.shared.write { db in
try Contact(id: x25519PublicKey) try Identity.store(
.with( db,
isApproved: true, seed: seed,
didApproveMe: true ed25519KeyPair: ed25519KeyPair,
) x25519KeyPair: x25519KeyPair
.save(db) )
// No need to show the seed again if the user is restoring or linking
db[.hasViewedSeed] = (self == .recover || self == .link)
// Create a contact for the current user and set their approval/trusted statuses so
// they don't get weird behaviours
try Contact
.fetchOrCreate(db, id: x25519PublicKey)
.save(db)
try Contact
.filter(id: x25519PublicKey)
.updateAllAndConfig(
db,
Contact.Columns.isTrusted.set(to: true), // Always trust the current user
Contact.Columns.isApproved.set(to: true),
Contact.Columns.didApproveMe.set(to: true)
)
// Create the 'Note to Self' thread (not visible by default) // Create the 'Note to Self' thread (not visible by default)
try SessionThread try SessionThread
.fetchOrCreate(db, id: x25519PublicKey, variant: .contact) .fetchOrCreate(db, id: x25519PublicKey, variant: .contact)
.save(db) .save(db)
// Create the initial shared util state (won't have been created on
// launch due to lack of ed25519 key)
SessionUtil.loadState(ed25519SecretKey: ed25519KeyPair.secretKey)
// No need to show the seed again if the user is restoring or linking
db[.hasViewedSeed] = (self == .recover || self == .link)
} }
// Set hasSyncedInitialConfiguration to true so that when we hit the // Set hasSyncedInitialConfiguration to true so that when we hit the
// home screen a configuration sync is triggered (yes, the logic is a // home screen a configuration sync is triggered (yes, the logic is a
// bit weird). This is needed so that if the user registers and // bit weird). This is needed so that if the user registers and
// immediately links a device, there'll be a configuration in their swarm. // immediately links a device, there'll be a configuration in their swarm.
userDefaults[.hasSyncedInitialConfiguration] = (self == .register) UserDefaults.standard[.hasSyncedInitialConfiguration] = (self == .register)
switch self { // Only continue if this isn't a new account
case .register, .recover: guard self != .register else { return }
// Set both lastDisplayNameUpdate and lastProfilePictureUpdate to the
// current date, so that we don't overwrite what the user set in the // Fetch the
// display name step with whatever we find in their swarm. Onboarding.profileNamePublisher.sinkUntilComplete()
userDefaults[.lastDisplayNameUpdate] = Date() }
userDefaults[.lastProfilePictureUpdate] = Date()
func completeRegistration() {
case .link: break // Set the `lastDisplayNameUpdate` to the current date, so that we don't
} // overwrite what the user set in the display name step with whatever we
// find in their swarm (otherwise the user could enter a display name and
// have it immediately overwritten due to the config request running slow)
UserDefaults.standard[.lastDisplayNameUpdate] = Date()
// Notify the app that registration is complete
Identity.didRegister()
// Now that we have registered get the Snode pool and sync push tokens
GetSnodePoolJob.run()
SyncPushTokensJob.run(uploadOnlyIfStale: false)
} }
} }
} }

View File

@ -1,13 +1,15 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved. // Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import UIKit import UIKit
import Combine
import SessionUIKit import SessionUIKit
import SessionMessagingKit import SessionMessagingKit
import SessionSnodeKit import SessionSnodeKit
import SignalUtilitiesKit import SignalUtilitiesKit
final class PNModeVC: BaseVC, OptionViewDelegate { final class PNModeVC: BaseVC, OptionViewDelegate {
private let flow: Onboarding.Flow
private var optionViews: [OptionView] { private var optionViews: [OptionView] {
[ apnsOptionView, backgroundPollingOptionView ] [ apnsOptionView, backgroundPollingOptionView ]
} }
@ -15,7 +17,19 @@ final class PNModeVC: BaseVC, OptionViewDelegate {
private var selectedOptionView: OptionView? { private var selectedOptionView: OptionView? {
return optionViews.first { $0.isSelected } return optionViews.first { $0.isSelected }
} }
// MARK: - Initialization
init(flow: Onboarding.Flow) {
self.flow = flow
super.init(nibName: nil, bundle: nil)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// MARK: - Components // MARK: - Components
private lazy var apnsOptionView: OptionView = { private lazy var apnsOptionView: OptionView = {
@ -128,14 +142,68 @@ final class PNModeVC: BaseVC, OptionViewDelegate {
} }
UserDefaults.standard[.isUsingFullAPNs] = (selectedOptionView == apnsOptionView) UserDefaults.standard[.isUsingFullAPNs] = (selectedOptionView == apnsOptionView)
Identity.didRegister() // If we are registering then we can just continue on
guard flow != .register else {
self.flow.completeRegistration()
// Go to the home screen
let homeVC: HomeVC = HomeVC()
self.navigationController?.setViewControllers([ homeVC ], animated: true)
return
}
// Go to the home screen // Check if we already have a profile name (ie. profile retrieval completed while waiting on
let homeVC: HomeVC = HomeVC() // this screen)
self.navigationController?.setViewControllers([ homeVC ], animated: true) let existingProfileName: String? = Storage.shared
.read { db in
try Profile
.filter(id: getUserHexEncodedPublicKey(db))
.select(.name)
.asRequest(of: String.self)
.fetchOne(db)
}
// Now that we have registered get the Snode pool and sync push tokens guard existingProfileName?.isEmpty != false else {
GetSnodePoolJob.run() // If we have one then we can go straight to the home screen
SyncPushTokensJob.run(uploadOnlyIfStale: false) self.flow.completeRegistration()
// Go to the home screen
let homeVC: HomeVC = HomeVC()
self.navigationController?.setViewControllers([ homeVC ], animated: true)
return
}
// If we don't have one then show a loading indicator and try to retrieve the existing name
ModalActivityIndicatorViewController.present(fromViewController: self) { viewController in
Onboarding.profileNamePublisher
.timeout(.seconds(10), scheduler: DispatchQueue.main, customError: { HTTPError.timeout })
.catch { _ -> AnyPublisher<String?, Error> in
SNLog("Onboarding failed to retrieve existing profile information")
return Just(nil)
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
}
.receive(on: DispatchQueue.main)
.sinkUntilComplete(
receiveValue: { [weak self, flow = self.flow] value in
// Hide the loading indicator
viewController.dismiss(animated: true)
// If we have no display name we need to collect one
guard value?.isEmpty == false else {
let displayNameVC: DisplayNameVC = DisplayNameVC(flow: flow)
self?.navigationController?.pushViewController(displayNameVC, animated: true)
return
}
// Otherwise we are done and can go to the home screen
self?.flow.completeRegistration()
// Go to the home screen
let homeVC: HomeVC = HomeVC()
self?.navigationController?.setViewControllers([ homeVC ], animated: true)
}
)
}
} }
} }

View File

@ -198,11 +198,18 @@ final class RegisterVC : BaseVC {
animate() animate()
} }
// MARK: Interaction // MARK: - Interaction
@objc private func register() { @objc private func register() {
Onboarding.Flow.register.preregister(with: seed, ed25519KeyPair: ed25519KeyPair, x25519KeyPair: x25519KeyPair) Onboarding.Flow.register
let displayNameVC = DisplayNameVC() .preregister(
navigationController!.pushViewController(displayNameVC, animated: true) with: seed,
ed25519KeyPair: ed25519KeyPair,
x25519KeyPair: x25519KeyPair
)
let displayNameVC: DisplayNameVC = DisplayNameVC(flow: .register)
self.navigationController?.pushViewController(displayNameVC, animated: true)
} }
@objc private func copyPublicKey() { @objc private func copyPublicKey() {

View File

@ -194,22 +194,33 @@ final class RestoreVC: BaseVC {
present(modal, animated: true) present(modal, animated: true)
} }
let mnemonic = mnemonicTextView.text!.lowercased() let seed: Data
let keyPairs: (ed25519KeyPair: KeyPair, x25519KeyPair: KeyPair)
do { do {
let hexEncodedSeed = try Mnemonic.decode(mnemonic: mnemonic) let mnemonic: String = mnemonicTextView.text!.lowercased()
let seed = Data(hex: hexEncodedSeed) let hexEncodedSeed: String = try Mnemonic.decode(mnemonic: mnemonic)
let (ed25519KeyPair, x25519KeyPair) = try! Identity.generate(from: seed) seed = Data(hex: hexEncodedSeed)
Onboarding.Flow.recover.preregister(with: seed, ed25519KeyPair: ed25519KeyPair, x25519KeyPair: x25519KeyPair) keyPairs = try Identity.generate(from: seed)
mnemonicTextView.resignFirstResponder() }
catch let error {
Timer.scheduledTimer(withTimeInterval: 0.25, repeats: false) { _ in
let displayNameVC = DisplayNameVC()
self.navigationController!.pushViewController(displayNameVC, animated: true)
}
} catch let error {
let error = error as? Mnemonic.DecodingError ?? Mnemonic.DecodingError.generic let error = error as? Mnemonic.DecodingError ?? Mnemonic.DecodingError.generic
showError(title: error.errorDescription!) showError(title: error.errorDescription!)
return
} }
// Load in the user config and progress to the next screen
mnemonicTextView.resignFirstResponder()
Onboarding.Flow.recover
.preregister(
with: seed,
ed25519KeyPair: keyPairs.ed25519KeyPair,
x25519KeyPair: keyPairs.x25519KeyPair
)
let pnModeVC: PNModeVC = PNModeVC(flow: .recover)
self.navigationController?.pushViewController(pnModeVC, animated: true)
} }
@objc private func handleLegalLabelTapped(_ tapGestureRecognizer: UITapGestureRecognizer) { @objc private func handleLegalLabelTapped(_ tapGestureRecognizer: UITapGestureRecognizer) {

View File

@ -246,10 +246,7 @@ class BlockedContactsViewModel: SessionTableViewModel<NoNav, BlockedContactsView
Storage.shared.write { db in Storage.shared.write { db in
_ = try Contact _ = try Contact
.filter(ids: contactIds) .filter(ids: contactIds)
.updateAll(db, Contact.Columns.isBlocked.set(to: false)) .updateAllAndConfig(db, Contact.Columns.isBlocked.set(to: false))
// Force a config sync
try MessageSender.syncConfiguration(db, forceSyncNow: true).sinkUntilComplete()
} }
self?.selectedContactIdsSubject.send([]) self?.selectedContactIdsSubject.send([])

View File

@ -4,16 +4,19 @@ import Foundation
class ImagePickerHandler: NSObject, UIImagePickerControllerDelegate & UINavigationControllerDelegate { class ImagePickerHandler: NSObject, UIImagePickerControllerDelegate & UINavigationControllerDelegate {
private let onTransition: (UIViewController, TransitionType) -> Void private let onTransition: (UIViewController, TransitionType) -> Void
private let onImagePicked: (UIImage?, String?) -> Void private let onImagePicked: (UIImage) -> Void
private let onImageFilePicked: (String) -> Void
// MARK: - Initialization // MARK: - Initialization
init( init(
onTransition: @escaping (UIViewController, TransitionType) -> Void, onTransition: @escaping (UIViewController, TransitionType) -> Void,
onImagePicked: @escaping (UIImage?, String?) -> Void onImagePicked: @escaping (UIImage) -> Void,
onImageFilePicked: @escaping (String) -> Void
) { ) {
self.onTransition = onTransition self.onTransition = onTransition
self.onImagePicked = onImagePicked self.onImagePicked = onImagePicked
self.onImageFilePicked = onImageFilePicked
} }
// MARK: - UIImagePickerControllerDelegate // MARK: - UIImagePickerControllerDelegate
@ -44,14 +47,14 @@ class ImagePickerHandler: NSObject, UIImagePickerControllerDelegate & UINavigati
let viewController: CropScaleImageViewController = CropScaleImageViewController( let viewController: CropScaleImageViewController = CropScaleImageViewController(
srcImage: rawAvatar, srcImage: rawAvatar,
successCompletion: { resultImage in successCompletion: { resultImage in
self?.onImagePicked(resultImage, nil) self?.onImagePicked(resultImage)
} }
) )
self?.onTransition(viewController, .present) self?.onTransition(viewController, .present)
return return
} }
self?.onImagePicked(nil, imageUrl.path) self?.onImageFilePicked(imageUrl.path)
} }
} }
} }

View File

@ -149,8 +149,7 @@ final class NukeDataModal: Modal {
private func clearDeviceOnly() { private func clearDeviceOnly() {
ModalActivityIndicatorViewController.present(fromViewController: self, canCancel: false) { [weak self] _ in ModalActivityIndicatorViewController.present(fromViewController: self, canCancel: false) { [weak self] _ in
Storage.shared ConfigurationSyncJob.run()
.writePublisherFlatMap { db in try MessageSender.syncConfiguration(db, forceSyncNow: true) }
.receive(on: DispatchQueue.main) .receive(on: DispatchQueue.main)
.sinkUntilComplete( .sinkUntilComplete(
receiveCompletion: { _ in receiveCompletion: { _ in

View File

@ -70,13 +70,20 @@ class SettingsViewModel: SessionTableViewModel<SettingsViewModel.NavButton, Sett
private let userSessionId: String private let userSessionId: String
private lazy var imagePickerHandler: ImagePickerHandler = ImagePickerHandler( private lazy var imagePickerHandler: ImagePickerHandler = ImagePickerHandler(
onTransition: { [weak self] in self?.transitionToScreen($0, transitionType: $1) }, onTransition: { [weak self] in self?.transitionToScreen($0, transitionType: $1) },
onImagePicked: { [weak self] resultImage, resultImagePath in onImagePicked: { [weak self] resultImage in
guard let oldDisplayName: String = self?.oldDisplayName else { return }
self?.updateProfile( self?.updateProfile(
name: (self?.oldDisplayName ?? ""), name: oldDisplayName,
profilePicture: resultImage, avatarUpdate: .uploadImage(resultImage)
profilePictureFilePath: resultImagePath, )
isUpdatingDisplayName: false, },
isUpdatingProfilePicture: true onImageFilePicked: { [weak self] resultImagePath in
guard let oldDisplayName: String = self?.oldDisplayName else { return }
self?.updateProfile(
name: oldDisplayName,
avatarUpdate: .uploadFilePath(resultImagePath)
) )
} }
) )
@ -204,10 +211,7 @@ class SettingsViewModel: SessionTableViewModel<SettingsViewModel.NavButton, Sett
self?.oldDisplayName = updatedNickname self?.oldDisplayName = updatedNickname
self?.updateProfile( self?.updateProfile(
name: updatedNickname, name: updatedNickname,
profilePicture: nil, avatarUpdate: .none
profilePictureFilePath: nil,
isUpdatingDisplayName: true,
isUpdatingProfilePicture: false
) )
} }
] ]
@ -512,17 +516,14 @@ class SettingsViewModel: SessionTableViewModel<SettingsViewModel.NavButton, Sett
} }
private func removeProfileImage() { private func removeProfileImage() {
let oldDisplayName: String = self.oldDisplayName
let viewController = ModalActivityIndicatorViewController(canCancel: false) { [weak self] modalActivityIndicator in let viewController = ModalActivityIndicatorViewController(canCancel: false) { [weak self] modalActivityIndicator in
ProfileManager.updateLocal( ProfileManager.updateLocal(
queue: DispatchQueue.global(qos: .default), queue: DispatchQueue.global(qos: .default),
profileName: (self?.oldDisplayName ?? ""), profileName: oldDisplayName,
image: nil, avatarUpdate: .remove,
imageFilePath: nil, success: { db in
success: { db, updatedProfile in
UserDefaults.standard[.lastProfilePictureUpdate] = Date()
try MessageSender.syncConfiguration(db, forceSyncNow: true).sinkUntilComplete()
// Wait for the database transaction to complete before updating the UI // Wait for the database transaction to complete before updating the UI
db.afterNextTransaction { _ in db.afterNextTransaction { _ in
DispatchQueue.main.async { DispatchQueue.main.async {
@ -554,33 +555,14 @@ class SettingsViewModel: SessionTableViewModel<SettingsViewModel.NavButton, Sett
fileprivate func updateProfile( fileprivate func updateProfile(
name: String, name: String,
profilePicture: UIImage?, avatarUpdate: ProfileManager.AvatarUpdate
profilePictureFilePath: String?,
isUpdatingDisplayName: Bool,
isUpdatingProfilePicture: Bool
) { ) {
let imageFilePath: String? = (
profilePictureFilePath ??
ProfileManager.profileAvatarFilepath(id: self.userSessionId)
)
let viewController = ModalActivityIndicatorViewController(canCancel: false) { [weak self] modalActivityIndicator in let viewController = ModalActivityIndicatorViewController(canCancel: false) { [weak self] modalActivityIndicator in
ProfileManager.updateLocal( ProfileManager.updateLocal(
queue: DispatchQueue.global(qos: .default), queue: DispatchQueue.global(qos: .default),
profileName: name, profileName: name,
image: profilePicture, avatarUpdate: avatarUpdate,
imageFilePath: imageFilePath, success: { db in
success: { db, updatedProfile in
if isUpdatingDisplayName {
UserDefaults.standard[.lastDisplayNameUpdate] = Date()
}
if isUpdatingProfilePicture {
UserDefaults.standard[.lastProfilePictureUpdate] = Date()
}
try MessageSender.syncConfiguration(db, forceSyncNow: true).sinkUntilComplete()
// Wait for the database transaction to complete before updating the UI // Wait for the database transaction to complete before updating the UI
db.afterNextTransaction { _ in db.afterNextTransaction { _ in
DispatchQueue.main.async { DispatchQueue.main.async {

View File

@ -43,7 +43,8 @@ public enum SNMessagingKit { // Just to make the external API nice
JobRunner.add(executor: MessageReceiveJob.self, for: .messageReceive) JobRunner.add(executor: MessageReceiveJob.self, for: .messageReceive)
JobRunner.add(executor: NotifyPushServerJob.self, for: .notifyPushServer) JobRunner.add(executor: NotifyPushServerJob.self, for: .notifyPushServer)
JobRunner.add(executor: SendReadReceiptsJob.self, for: .sendReadReceipts) JobRunner.add(executor: SendReadReceiptsJob.self, for: .sendReadReceipts)
JobRunner.add(executor: AttachmentDownloadJob.self, for: .attachmentDownload)
JobRunner.add(executor: AttachmentUploadJob.self, for: .attachmentUpload) JobRunner.add(executor: AttachmentUploadJob.self, for: .attachmentUpload)
JobRunner.add(executor: AttachmentDownloadJob.self, for: .attachmentDownload)
JobRunner.add(executor: ConfigurationSyncJob.self, for: .configurationSync)
} }
} }

View File

@ -1639,10 +1639,10 @@ public enum SMKLegacy {
self.message = message self.message = message
if let destString: String = _MessageSendJob.process(rawDestination, type: "contact") { if let destString: String = _MessageSendJob.process(rawDestination, type: "contact") {
destination = .contact(publicKey: destString, namespace: .default) destination = .contact(publicKey: destString)
} }
else if let destString: String = _MessageSendJob.process(rawDestination, type: "closedGroup") { else if let destString: String = _MessageSendJob.process(rawDestination, type: "closedGroup") {
destination = .closedGroup(groupPublicKey: destString, namespace: .legacyClosedGroup) destination = .closedGroup(groupPublicKey: destString)
} }
else if _MessageSendJob.process(rawDestination, type: "openGroup") != nil { else if _MessageSendJob.process(rawDestination, type: "openGroup") != nil {
// We can no longer support sending messages to legacy open groups // We can no longer support sending messages to legacy open groups

View File

@ -525,7 +525,7 @@ enum _003_YDBToGRDBMigration: Migration {
let recipientString: String = { let recipientString: String = {
if let destination: Message.Destination = destination { if let destination: Message.Destination = destination {
switch destination { switch destination {
case .contact(let publicKey, _): return publicKey case .contact(let publicKey): return publicKey
default: break default: break
} }
} }
@ -974,7 +974,7 @@ enum _003_YDBToGRDBMigration: Migration {
.map { $0 }) .map { $0 })
.defaulting(to: []), .defaulting(to: []),
destination: (threadVariant == .contact ? destination: (threadVariant == .contact ?
.contact(publicKey: threadId, namespace: .default) : .contact(publicKey: threadId) :
nil nil
), ),
variant: variant, variant: variant,
@ -989,7 +989,7 @@ enum _003_YDBToGRDBMigration: Migration {
.map { $0 }) .map { $0 })
.defaulting(to: []), .defaulting(to: []),
destination: (threadVariant == .contact ? destination: (threadVariant == .contact ?
.contact(publicKey: threadId, namespace: .default) : .contact(publicKey: threadId) :
nil nil
), ),
variant: variant, variant: variant,
@ -1278,8 +1278,8 @@ enum _003_YDBToGRDBMigration: Migration {
// Fetch the threadId and interactionId this job should be associated with // Fetch the threadId and interactionId this job should be associated with
let threadId: String = { let threadId: String = {
switch legacyJob.destination { switch legacyJob.destination {
case .contact(let publicKey, _): return publicKey case .contact(let publicKey): return publicKey
case .closedGroup(let groupPublicKey, _): return groupPublicKey case .closedGroup(let groupPublicKey): return groupPublicKey
case .openGroup(let roomToken, let server, _, _, _): case .openGroup(let roomToken, let server, _, _, _):
return OpenGroup.idFor(roomToken: roomToken, server: server) return OpenGroup.idFor(roomToken: roomToken, server: server)
@ -1435,7 +1435,7 @@ enum _003_YDBToGRDBMigration: Migration {
behaviour: .recurring, behaviour: .recurring,
threadId: threadId, threadId: threadId,
details: SendReadReceiptsJob.Details( details: SendReadReceiptsJob.Details(
destination: .contact(publicKey: threadId, namespace: .default), destination: .contact(publicKey: threadId),
timestampMsValues: timestampsMs timestampMsValues: timestampsMs
) )
)?.migrationSafeInserted(db) )?.migrationSafeInserted(db)

View File

@ -17,12 +17,19 @@ enum _011_SharedUtilChanges: Migration {
try db.create(table: ConfigDump.self) { t in try db.create(table: ConfigDump.self) { t in
t.column(.variant, .text) t.column(.variant, .text)
.notNull() .notNull()
.primaryKey() t.column(.publicKey, .text)
.notNull()
.indexed()
t.column(.data, .blob) t.column(.data, .blob)
.notNull() .notNull()
t.column(.combinedMessageHashes, .text)
t.primaryKey([.variant, .publicKey])
} }
// If we don't have an ed25519 key then no need to create cached dump data // If we don't have an ed25519 key then no need to create cached dump data
let userPublicKey: String = getUserHexEncodedPublicKey(db)
guard let secretKey: [UInt8] = Identity.fetchUserEd25519KeyPair(db)?.secretKey else { guard let secretKey: [UInt8] = Identity.fetchUserEd25519KeyPair(db)?.secretKey else {
Storage.update(progress: 1, for: self, in: target) // In case this is the last migration Storage.update(progress: 1, for: self, in: target) // In case this is the last migration
return return
@ -34,13 +41,57 @@ enum _011_SharedUtilChanges: Migration {
secretKey: secretKey, secretKey: secretKey,
cachedData: nil cachedData: nil
) )
let confResult: SessionUtil.ConfResult = try SessionUtil.update( let userProfileConfResult: SessionUtil.ConfResult = try SessionUtil.update(
profile: Profile.fetchOrCreateCurrentUser(db), profile: Profile.fetchOrCreateCurrentUser(db),
in: .custom(conf: Atomic(userProfileConf)) in: Atomic(userProfileConf)
) )
if confResult.needsDump { if userProfileConfResult.needsDump {
try SessionUtil.saveState(db, conf: userProfileConf, for: .userProfile) try SessionUtil
.createDump(
conf: userProfileConf,
for: .userProfile,
publicKey: userPublicKey,
messageHashes: nil
)?
.save(db)
}
// Create a dump for the contacts data
struct ContactInfo: FetchableRecord, Decodable, ColumnExpressible {
typealias Columns = CodingKeys
enum CodingKeys: String, CodingKey, ColumnExpression, CaseIterable {
case contact
case profile
}
let contact: Contact
let profile: Profile?
}
let contactsData: [ContactInfo] = try Contact
.including(optional: Contact.profile)
.asRequest(of: ContactInfo.self)
.fetchAll(db)
let contactsConf: UnsafeMutablePointer<config_object>? = try SessionUtil.loadState(
for: .contacts,
secretKey: secretKey,
cachedData: nil
)
let contactsConfResult: SessionUtil.ConfResult = try SessionUtil.upsert(
contactData: contactsData.map { ($0.contact.id, $0.contact, $0.profile) },
in: Atomic(contactsConf)
)
if contactsConfResult.needsDump {
try SessionUtil
.createDump(
conf: contactsConf,
for: .contacts,
publicKey: userPublicKey,
messageHashes: nil
)?
.save(db)
} }
Storage.update(progress: 1, for: self, in: target) // In case this is the last migration Storage.update(progress: 1, for: self, in: target) // In case this is the last migration

View File

@ -130,7 +130,7 @@ public extension BlindedIdLookup {
if isCheckingForOutbox && !contact.isApproved { if isCheckingForOutbox && !contact.isApproved {
try Contact try Contact
.filter(id: contact.id) .filter(id: contact.id)
.updateAll(db, Contact.Columns.isApproved.set(to: true)) .updateAllAndConfig(db, Contact.Columns.isApproved.set(to: true))
} }
break break

View File

@ -4,6 +4,8 @@ import Foundation
import GRDB import GRDB
import SessionUtilitiesKit import SessionUtilitiesKit
/// This type is duplicate in both the database and within the SessionUtil config so should only ever have it's data changes via the
/// `updateAllAndConfig` function. Updating it elsewhere could result in issues with syncing data between devices
public struct Contact: Codable, Identifiable, Equatable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible { public struct Contact: Codable, Identifiable, Equatable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible {
public static var databaseTableName: String { "contact" } public static var databaseTableName: String { "contact" }
internal static let threadForeignKey = ForeignKey([Columns.id], to: [SessionThread.Columns.id]) internal static let threadForeignKey = ForeignKey([Columns.id], to: [SessionThread.Columns.id])
@ -66,29 +68,6 @@ public struct Contact: Codable, Identifiable, Equatable, FetchableRecord, Persis
} }
} }
// MARK: - Convenience
public extension Contact {
func with(
isTrusted: Updatable<Bool> = .existing,
isApproved: Updatable<Bool> = .existing,
isBlocked: Updatable<Bool> = .existing,
didApproveMe: Updatable<Bool> = .existing
) -> Contact {
return Contact(
id: id,
isTrusted: (
(isTrusted ?? self.isTrusted) ||
self.id == getUserHexEncodedPublicKey() // Always trust ourselves
),
isApproved: (isApproved ?? self.isApproved),
isBlocked: (isBlocked ?? self.isBlocked),
didApproveMe: (didApproveMe ?? self.didApproveMe),
hasBeenBlocked: ((isBlocked ?? self.isBlocked) || self.hasBeenBlocked)
)
}
}
// MARK: - GRDB Interactions // MARK: - GRDB Interactions
public extension Contact { public extension Contact {

View File

@ -5,6 +5,8 @@ import GRDB
import DifferenceKit import DifferenceKit
import SessionUtilitiesKit import SessionUtilitiesKit
/// This type is duplicate in both the database and within the SessionUtil config so should only ever have it's data changes via the
/// `updateAllAndConfig` function. Updating it elsewhere could result in issues with syncing data between devices
public struct Profile: Codable, Identifiable, Equatable, Hashable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible, CustomStringConvertible, Differentiable { public struct Profile: Codable, Identifiable, Equatable, Hashable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible, CustomStringConvertible, Differentiable {
public static var databaseTableName: String { "profile" } public static var databaseTableName: String { "profile" }
internal static let interactionForeignKey = ForeignKey([Columns.id], to: [Interaction.Columns.authorId]) internal static let interactionForeignKey = ForeignKey([Columns.id], to: [Interaction.Columns.authorId])
@ -160,26 +162,6 @@ public extension Profile {
} }
} }
// MARK: - Mutation
public extension Profile {
func with(
name: String? = nil,
profilePictureUrl: Updatable<String?> = .existing,
profilePictureFileName: Updatable<String?> = .existing,
profileEncryptionKey: Updatable<Data?> = .existing
) -> Profile {
return Profile(
id: id,
name: (name ?? self.name),
nickname: self.nickname,
profilePictureUrl: (profilePictureUrl ?? self.profilePictureUrl),
profilePictureFileName: (profilePictureFileName ?? self.profilePictureFileName),
profileEncryptionKey: (profileEncryptionKey ?? self.profileEncryptionKey)
)
}
}
// MARK: - GRDB Interactions // MARK: - GRDB Interactions
public extension Profile { public extension Profile {

View File

@ -2,38 +2,81 @@
import Foundation import Foundation
import GRDB import GRDB
import SessionSnodeKit
import SessionUtilitiesKit import SessionUtilitiesKit
public struct ConfigDump: Codable, Equatable, Hashable, Identifiable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible { public struct ConfigDump: Codable, Equatable, Hashable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible {
public static var databaseTableName: String { "configDump" } public static var databaseTableName: String { "configDump" }
public typealias Columns = CodingKeys public typealias Columns = CodingKeys
public enum CodingKeys: String, CodingKey, ColumnExpression { public enum CodingKeys: String, CodingKey, ColumnExpression {
case variant case variant
case publicKey
case data case data
case combinedMessageHashes
} }
public enum Variant: String, Codable, DatabaseValueConvertible, CaseIterable { public enum Variant: String, Codable, DatabaseValueConvertible {
case userProfile case userProfile
case contacts case contacts
} }
public var id: Variant { variant }
/// The type of config this dump is for /// The type of config this dump is for
public let variant: Variant public let variant: Variant
/// The public key for the swarm this dump is for
///
/// **Note:** For user config items this will be an empty string
public let publicKey: String
/// The data for this dump /// The data for this dump
public let data: Data public let data: Data
/// A comma delimited array of message hashes for previously stored messages on the server
private let combinedMessageHashes: String?
/// An array of message hashes for previously stored messages on the server
var messageHashes: [String]? { ConfigDump.messageHashes(from: combinedMessageHashes) }
internal init(
variant: Variant,
publicKey: String,
data: Data,
messageHashes: [String]?
) {
self.variant = variant
self.publicKey = publicKey
self.data = data
self.combinedMessageHashes = ConfigDump.combinedMessageHashes(from: messageHashes)
}
} }
// MARK: - Convenience // MARK: - Convenience
public extension ConfigDump {
static func combinedMessageHashes(from messageHashes: [String]?) -> String? {
return messageHashes?.joined(separator: ",")
}
static func messageHashes(from combinedMessageHashes: String?) -> [String]? {
return combinedMessageHashes?.components(separatedBy: ",")
}
}
public extension ConfigDump.Variant { public extension ConfigDump.Variant {
static let userVariants: [ConfigDump.Variant] = [ .userProfile, .contacts ]
var configMessageKind: SharedConfigMessage.Kind { var configMessageKind: SharedConfigMessage.Kind {
switch self { switch self {
case .userProfile: return .userProfile case .userProfile: return .userProfile
case .contacts: return .contacts case .contacts: return .contacts
} }
} }
var namespace: SnodeAPI.Namespace {
switch self {
case .userProfile: return SnodeAPI.Namespace.configUserProfile
case .contacts: return SnodeAPI.Namespace.configContacts
}
}
} }

View File

@ -0,0 +1,355 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import Combine
import GRDB
import SessionUtil
import SessionSnodeKit
import SessionUtilitiesKit
public enum ConfigurationSyncJob: JobExecutor {
public static let maxFailureCount: Int = -1
public static let requiresThreadId: Bool = false
public static let requiresInteractionId: Bool = false
private static let maxRunFrequency: TimeInterval = 3
public static func run(
_ job: Job,
queue: DispatchQueue,
success: @escaping (Job, Bool) -> (),
failure: @escaping (Job, Error?, Bool) -> (),
deferred: @escaping (Job) -> ()
) {
guard Features.useSharedUtilForUserConfig else {
success(job, true)
return
}
// If we don't have a userKeyPair yet then there is no need to sync the configuration
// as the user doesn't exist yet (this will get triggered on the first launch of a
// fresh install due to the migrations getting run)
guard
let pendingSwarmConfigChanges: [SingleDestinationChanges] = Storage.shared
.read({ db -> [SessionUtil.OutgoingConfResult]? in
guard
Identity.userExists(db),
let ed25519SecretKey: [UInt8] = Identity.fetchUserEd25519KeyPair(db)?.secretKey
else { return nil }
return try SessionUtil.pendingChanges(
db,
userPublicKey: getUserHexEncodedPublicKey(db),
ed25519SecretKey: ed25519SecretKey
)
})?
.grouped(by: { $0.destination })
.map({ (destination: Message.Destination, value: [SessionUtil.OutgoingConfResult]) -> SingleDestinationChanges in
SingleDestinationChanges(
destination: destination,
messages: value,
allOldHashes: value
.map { ($0.oldMessageHashes ?? []) }
.reduce([], +)
.asSet()
)
})
else {
failure(job, StorageError.generic, false)
return
}
// If there are no pending changes then the job can just complete (next time something
// is updated we want to try and run immediately so don't scuedule another run in this case)
guard !pendingSwarmConfigChanges.isEmpty else {
success(job, true)
return
}
Storage.shared
.readPublisher { db in
try pendingSwarmConfigChanges
.map { (change: SingleDestinationChanges) -> (messages: [TargetedMessage], allOldHashes: Set<String>) in
(
messages: try change.messages
.map { (outgoingConf: SessionUtil.OutgoingConfResult) -> TargetedMessage in
TargetedMessage(
sendData: try MessageSender.preparedSendData(
db,
message: outgoingConf.message,
to: change.destination,
interactionId: nil
),
namespace: outgoingConf.namespace,
oldHashes: (outgoingConf.oldMessageHashes ?? [])
)
},
allOldHashes: change.allOldHashes
)
}
}
.subscribe(on: queue)
.receive(on: queue)
.flatMap { (pendingSwarmChange: [(messages: [TargetedMessage], allOldHashes: Set<String>)]) -> AnyPublisher<[HTTP.BatchResponse], Error> in
Publishers
.MergeMany(
pendingSwarmChange
.map { (messages: [TargetedMessage], oldHashes: Set<String>) in
// Note: We do custom sending logic here because we want to batch the
// sending and deletion of messages within the same swarm
SnodeAPI
.sendConfigMessages(
messages
.compactMap { targetedMessage -> SnodeAPI.TargetedMessage? in
targetedMessage.sendData.snodeMessage
.map { ($0, targetedMessage.namespace) }
},
oldHashes: Array(oldHashes)
)
}
)
.collect()
.eraseToAnyPublisher()
}
.map { (responses: [HTTP.BatchResponse]) -> [SuccessfulChange] in
// Process the response data into an easy to understand for (this isn't strictly
// needed but the code gets convoluted without this)
zip(responses, pendingSwarmConfigChanges)
.compactMap { (batchResponse: HTTP.BatchResponse, pendingSwarmChange: SingleDestinationChanges) -> [SuccessfulChange]? in
let maybePublicKey: String? = {
switch pendingSwarmChange.destination {
case .contact(let publicKey), .closedGroup(let publicKey):
return publicKey
default: return nil
}
}()
// If we don't have a publicKey then this is an invalid config
guard let publicKey: String = maybePublicKey else { return nil }
// Need to know if we successfully deleted old messages (if we didn't then
// we want to keep the old hashes so we can delete them the next time)
let didDeleteOldConfigMessages: Bool = {
guard
let subResponse: HTTP.BatchSubResponse<DeleteMessagesResponse> = (batchResponse.responses.last as? HTTP.BatchSubResponse<DeleteMessagesResponse>),
200...299 ~= subResponse.code
else { return false }
return true
}()
return zip(batchResponse.responses, pendingSwarmChange.messages)
.reduce(into: []) { (result: inout [SuccessfulChange], next: ResponseChange) in
// If the request wasn't successful then just ignore it (the next
// config sync will try make the changes again
guard
let subResponse: HTTP.BatchSubResponse<SendMessagesResponse> = (next.response as? HTTP.BatchSubResponse<SendMessagesResponse>),
200...299 ~= subResponse.code,
let sendMessageResponse: SendMessagesResponse = subResponse.body
else { return }
result.append(
SuccessfulChange(
message: next.change.message,
publicKey: publicKey,
updatedHashes: (didDeleteOldConfigMessages ?
[sendMessageResponse.hash] :
(next.change.oldMessageHashes ?? [])
.appending(sendMessageResponse.hash)
)
)
)
}
}
.flatMap { $0 }
}
.map { (successfulChanges: [SuccessfulChange]) -> [ConfigDump] in
// Now that we have the successful changes, we need to mark them as pushed and
// generate any config dumps which need to be stored
successfulChanges
.compactMap { successfulChange -> ConfigDump? in
// Updating the pushed state returns a flag indicating whether the config
// needs to be dumped
guard SessionUtil.markAsPushed(message: successfulChange.message, publicKey: successfulChange.publicKey) else {
return nil
}
let variant: ConfigDump.Variant = successfulChange.message.kind.configDumpVariant
let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = SessionUtil.config(
for: variant,
publicKey: successfulChange.publicKey
)
return try? SessionUtil.createDump(
conf: atomicConf.wrappedValue,
for: variant,
publicKey: successfulChange.publicKey,
messageHashes: successfulChange.updatedHashes
)
}
}
.sinkUntilComplete(
receiveValue: { (configDumps: [ConfigDump]) in
// Flag to indicate whether the job should be finished or will run again
var shouldFinishCurrentJob: Bool = false
// Lastly we need to save the updated dumps to the database
let updatedJob: Job? = Storage.shared.write { db in
// Save the updated dumps to the database
try configDumps.forEach { try $0.save(db) }
// When we complete the 'ConfigurationSync' job we want to immediately schedule
// another one with a 'nextRunTimestamp' set to the 'maxRunFrequency' value to
// throttle the config sync requests
let nextRunTimestamp: TimeInterval = (Date().timeIntervalSince1970 + maxRunFrequency)
// If another 'ConfigurationSync' job was scheduled then update that one
// to run at 'nextRunTimestamp' and make the current job stop
if
let existingJob: Job = try? Job
.filter(Job.Columns.id != job.id)
.filter(Job.Columns.variant == Job.Variant.configurationSync)
.fetchOne(db),
!JobRunner.isCurrentlyRunning(existingJob)
{
_ = try existingJob
.with(nextRunTimestamp: nextRunTimestamp)
.saved(db)
shouldFinishCurrentJob = true
return job
}
return try job
.with(nextRunTimestamp: nextRunTimestamp)
.saved(db)
}
success((updatedJob ?? job), shouldFinishCurrentJob)
}
)
}
}
// MARK: - Convenience Types
public extension ConfigurationSyncJob {
fileprivate struct SingleDestinationChanges {
let destination: Message.Destination
let messages: [SessionUtil.OutgoingConfResult]
let allOldHashes: Set<String>
}
fileprivate struct TargetedMessage {
let sendData: MessageSender.PreparedSendData
let namespace: SnodeAPI.Namespace
let oldHashes: [String]
}
typealias ResponseChange = (response: Codable, change: SessionUtil.OutgoingConfResult)
fileprivate struct SuccessfulChange {
let message: SharedConfigMessage
let publicKey: String
let updatedHashes: [String]
}
}
// MARK: - Convenience
public extension ConfigurationSyncJob {
static func enqueue(_ db: Database? = nil) {
guard let db: Database = db else {
Storage.shared.writeAsync { ConfigurationSyncJob.enqueue($0) }
return
}
// FIXME: Remove this once `useSharedUtilForUserConfig` is permanent
guard Features.useSharedUtilForUserConfig else {
// If we don't have a userKeyPair yet then there is no need to sync the configuration
// as the user doesn't exist yet (this will get triggered on the first launch of a
// fresh install due to the migrations getting run)
guard
Identity.userExists(db),
let legacyConfigMessage: Message = try? ConfigurationMessage.getCurrent(db)
else { return }
let publicKey: String = getUserHexEncodedPublicKey(db)
JobRunner.add(
db,
job: Job(
variant: .messageSend,
threadId: publicKey,
details: MessageSendJob.Details(
destination: Message.Destination.contact(publicKey: publicKey),
message: legacyConfigMessage
)
)
)
return
}
// Upsert a config sync job (if there is already an pending one then no need
// to add another one)
JobRunner.upsert(
db,
job: ConfigurationSyncJob.createOrUpdateIfNeeded(db)
)
}
@discardableResult static func createOrUpdateIfNeeded(_ db: Database) -> Job {
// Try to get an existing job (if there is one that's not running)
if
let existingJob: Job = try? Job
.filter(Job.Columns.variant == Job.Variant.configurationSync)
.fetchOne(db),
!JobRunner.isCurrentlyRunning(existingJob)
{
return existingJob
}
// Otherwise create a new job
return Job(
variant: .configurationSync,
behaviour: .recurring
)
}
static func run() -> AnyPublisher<Void, Error> {
// FIXME: Remove this once `useSharedUtilForUserConfig` is permanent
guard Features.useSharedUtilForUserConfig else {
return Storage.shared
.writePublisher { db -> MessageSender.PreparedSendData in
// If we don't have a userKeyPair yet then there is no need to sync the configuration
// as the user doesn't exist yet (this will get triggered on the first launch of a
// fresh install due to the migrations getting run)
guard Identity.userExists(db) else { throw StorageError.generic }
let publicKey: String = getUserHexEncodedPublicKey(db)
return try MessageSender.preparedSendData(
db,
message: try ConfigurationMessage.getCurrent(db),
to: Message.Destination.contact(publicKey: publicKey),
interactionId: nil
)
}
.subscribe(on: DispatchQueue.global(qos: .userInitiated))
.receive(on: DispatchQueue.global(qos: .userInitiated))
.flatMap { MessageSender.sendImmediate(preparedSendData: $0) }
.eraseToAnyPublisher()
}
// Trigger the job emitting the result when completed
return Future { resolver in
ConfigurationSyncJob.run(
Job(variant: .configurationSync),
queue: DispatchQueue.global(qos: .userInitiated),
success: { _, _ in resolver(Result.success(())) },
failure: { _, error, _ in resolver(Result.failure(error ?? HTTPError.generic)) },
deferred: { _ in }
)
}
.eraseToAnyPublisher()
}
}

View File

@ -33,7 +33,11 @@ public enum MessageReceiveJob: JobExecutor {
Storage.shared.write { db in Storage.shared.write { db in
// Send any SharedConfigMessages to the SessionUtil to handle it // Send any SharedConfigMessages to the SessionUtil to handle it
try SessionUtil.handleConfigMessages(db, messages: sharedConfigMessages) try SessionUtil.handleConfigMessages(
db,
messages: sharedConfigMessages,
publicKey: (job.threadId ?? "")
)
// Handle the remaining messages // Handle the remaining messages
var remainingMessagesToProcess: [Details.MessageInfo] = [] var remainingMessagesToProcess: [Details.MessageInfo] = []

View File

@ -9,7 +9,7 @@ public enum SendReadReceiptsJob: JobExecutor {
public static let maxFailureCount: Int = -1 public static let maxFailureCount: Int = -1
public static let requiresThreadId: Bool = false public static let requiresThreadId: Bool = false
public static let requiresInteractionId: Bool = false public static let requiresInteractionId: Bool = false
private static let minRunFrequency: TimeInterval = 3 private static let maxRunFrequency: TimeInterval = 3
public static func run( public static func run(
_ job: Job, _ job: Job,
@ -56,9 +56,9 @@ public enum SendReadReceiptsJob: JobExecutor {
case .finished: case .finished:
// When we complete the 'SendReadReceiptsJob' we want to immediately schedule // When we complete the 'SendReadReceiptsJob' we want to immediately schedule
// another one for the same thread but with a 'nextRunTimestamp' set to the // another one for the same thread but with a 'nextRunTimestamp' set to the
// 'minRunFrequency' value to throttle the read receipt requests // 'maxRunFrequency' value to throttle the read receipt requests
var shouldFinishCurrentJob: Bool = false var shouldFinishCurrentJob: Bool = false
let nextRunTimestamp: TimeInterval = (Date().timeIntervalSince1970 + minRunFrequency) let nextRunTimestamp: TimeInterval = (Date().timeIntervalSince1970 + maxRunFrequency)
let updatedJob: Job? = Storage.shared.write { db in let updatedJob: Job? = Storage.shared.write { db in
// If another 'sendReadReceipts' job was scheduled then update that one // If another 'sendReadReceipts' job was scheduled then update that one
@ -163,7 +163,7 @@ public extension SendReadReceiptsJob {
behaviour: .recurring, behaviour: .recurring,
threadId: threadId, threadId: threadId,
details: Details( details: Details(
destination: .contact(publicKey: threadId, namespace: .default), destination: .contact(publicKey: threadId),
timestampMsValues: timestampMsValues.asSet() timestampMsValues: timestampMsValues.asSet()
) )
) )

View File

@ -49,11 +49,8 @@ public enum UpdateProfilePictureJob: JobExecutor {
ProfileManager.updateLocal( ProfileManager.updateLocal(
queue: queue, queue: queue,
profileName: profile.name, profileName: profile.name,
image: nil, avatarUpdate: (profileFilePath.map { .uploadFilePath($0) } ?? .none),
imageFilePath: profileFilePath, success: { db in
success: { db, _ in
try MessageSender.syncConfiguration(db, forceSyncNow: true).sinkUntilComplete()
// Need to call the 'success' closure asynchronously on the queue to prevent a reentrancy // Need to call the 'success' closure asynchronously on the queue to prevent a reentrancy
// issue as it will write to the database and this closure is already called within // issue as it will write to the database and this closure is already called within
// another database write // another database write

View File

@ -0,0 +1,366 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import GRDB
import SessionUtil
import SessionUtilitiesKit
internal extension SessionUtil {
// MARK: - Incoming Changes
static func handleContactsUpdate(
_ db: Database,
in atomicConf: Atomic<UnsafeMutablePointer<config_object>?>,
needsDump: Bool
) throws {
typealias ContactData = [String: (contact: Contact, profile: Profile)]
guard needsDump else { return }
guard atomicConf.wrappedValue != nil else { throw SessionUtilError.nilConfigObject }
// Since we are doing direct memory manipulation we are using an `Atomic` type which has
// blocking access in it's `mutate` closure
let contactData: ContactData = atomicConf.mutate { conf -> ContactData in
var contactData: ContactData = [:]
var contact: contacts_contact = contacts_contact()
let contactIterator: UnsafeMutablePointer<contacts_iterator> = contacts_iterator_new(conf)
while !contacts_iterator_done(contactIterator, &contact) {
let contactId: String = String(cString: withUnsafeBytes(of: contact.session_id) { [UInt8]($0) }
.map { CChar($0) }
.nullTerminated()
)
let contactResult: Contact = Contact(
id: contactId,
isApproved: contact.approved,
isBlocked: contact.blocked,
didApproveMe: contact.approved_me
)
let profileResult: Profile = Profile(
id: contactId,
name: (contact.name.map { String(cString: $0) } ?? ""),
nickname: contact.nickname.map { String(cString: $0) },
profilePictureUrl: contact.profile_pic.url.map { String(cString: $0) },
profileEncryptionKey: (contact.profile_pic.key != nil && contact.profile_pic.keylen > 0 ?
Data(bytes: contact.profile_pic.key, count: contact.profile_pic.keylen) :
nil
)
)
contactData[contactId] = (contactResult, profileResult)
contacts_iterator_advance(contactIterator)
}
contacts_iterator_free(contactIterator) // Need to free the iterator
return contactData
}
// The current users contact data is handled separately so exclude it if it's present (as that's
// actually a bug)
let userPublicKey: String = getUserHexEncodedPublicKey()
let targetContactData: ContactData = contactData.filter { $0.key != userPublicKey }
// If we only updated the current user contact then no need to continue
guard !targetContactData.isEmpty else { return }
// Since we don't sync 100% of the data stored against the contact and profile objects we
// need to only update the data we do have to ensure we don't overwrite anything that doesn't
// get synced
try targetContactData
.forEach { sessionId, data in
// Note: We only update the contact and profile records if the data has actually changed
// in order to avoid triggering UI updates for every thread on the home screen (the DB
// observation system can't differ between update calls which do and don't change anything)
let contact: Contact = Contact.fetchOrCreate(db, id: sessionId)
let profile: Profile = Profile.fetchOrCreate(db, id: sessionId)
if
(!data.profile.name.isEmpty && profile.name != data.profile.name) ||
profile.nickname != data.profile.nickname ||
profile.profilePictureUrl != data.profile.profilePictureUrl ||
profile.profileEncryptionKey != data.profile.profileEncryptionKey
{
try profile.save(db)
try Profile
.filter(id: sessionId)
.updateAll( // Handling a config update so don't use `updateAllAndConfig`
db,
[
(data.profile.name.isEmpty || profile.name == data.profile.name ? nil :
Profile.Columns.name.set(to: data.profile.name)
),
(profile.nickname == data.profile.nickname ? nil :
Profile.Columns.nickname.set(to: data.profile.nickname)
),
(profile.profilePictureUrl != data.profile.profilePictureUrl ? nil :
Profile.Columns.profilePictureUrl.set(to: data.profile.profilePictureUrl)
),
(profile.profileEncryptionKey != data.profile.profileEncryptionKey ? nil :
Profile.Columns.profileEncryptionKey.set(to: data.profile.profileEncryptionKey)
)
].compactMap { $0 }
)
}
/// Since message requests have no reverse, we should only handle setting `isApproved`
/// and `didApproveMe` to `true`. This may prevent some weird edge cases where a config message
/// swapping `isApproved` and `didApproveMe` to `false`
if
(contact.isApproved != data.contact.isApproved) ||
(contact.isBlocked != data.contact.isBlocked) ||
(contact.didApproveMe != data.contact.didApproveMe)
{
try contact.save(db)
try Contact
.filter(id: sessionId)
.updateAll( // Handling a config update so don't use `updateAllAndConfig`
db,
[
(!data.contact.isApproved ? nil :
Contact.Columns.isApproved.set(to: true)
),
Contact.Columns.isBlocked.set(to: data.contact.isBlocked),
(!data.contact.didApproveMe ? nil :
Contact.Columns.didApproveMe.set(to: true)
)
].compactMap { $0 }
)
}
}
}
// MARK: - Outgoing Changes
static func upsert(
contactData: [(id: String, contact: Contact?, profile: Profile?)],
in atomicConf: Atomic<UnsafeMutablePointer<config_object>?>
) throws -> ConfResult {
guard atomicConf.wrappedValue != nil else { throw SessionUtilError.nilConfigObject }
// The current users contact data doesn't need to sync so exclude it
let userPublicKey: String = getUserHexEncodedPublicKey()
let targetContacts: [(id: String, contact: Contact?, profile: Profile?)] = contactData
.filter { $0.id != userPublicKey }
// If we only updated the current user contact then no need to continue
guard !targetContacts.isEmpty else { return ConfResult(needsPush: false, needsDump: false) }
// Since we are doing direct memory manipulation we are using an `Atomic` type which has
// blocking access in it's `mutate` closure
return atomicConf.mutate { conf in
// Update the name
targetContacts
.forEach { (id, maybeContact, maybeProfile) in
var sessionId: [CChar] = id
.bytes
.map { CChar(bitPattern: $0) }
var contact: contacts_contact = contacts_contact()
guard contacts_get_or_create(conf, &contact, &sessionId) else {
SNLog("Unable to upsert contact from Config Message")
return
}
// Assign all properties to match the updated contact (if there is one)
if let updatedContact: Contact = maybeContact {
contact.approved = updatedContact.isApproved
contact.approved_me = updatedContact.didApproveMe
contact.blocked = updatedContact.isBlocked
}
// Update the profile data (if there is one)
if let updatedProfile: Profile = maybeProfile {
/// Users we have sent a message request to may not have profile info in certain situations
///
/// Note: We **MUST** store these in local variables rather than access them directly or they won't
/// exist in memory long enough to actually be assigned in the C type
let updatedName: [CChar]? = (updatedProfile.name.isEmpty ?
nil :
updatedProfile.name
.bytes
.map { CChar(bitPattern: $0) }
)
let updatedNickname: [CChar]? = updatedProfile.nickname?
.bytes
.map { CChar(bitPattern: $0) }
let updatedAvatarUrl: [CChar]? = updatedProfile.profilePictureUrl?
.bytes
.map { CChar(bitPattern: $0) }
let updatedAvatarKey: [UInt8]? = updatedProfile.profileEncryptionKey?
.bytes
let oldAvatarUrl: String? = contact.profile_pic.url.map { String(cString: $0) }
let oldAvatarKey: Data? = (contact.profile_pic.key != nil && contact.profile_pic.keylen > 0 ?
Data(bytes: contact.profile_pic.key, count: contact.profile_pic.keylen) :
nil
)
updatedName?.withUnsafeBufferPointer { contact.name = $0.baseAddress }
(updatedNickname == nil ?
contact.nickname = nil :
updatedNickname?.withUnsafeBufferPointer { contact.nickname = $0.baseAddress }
)
(updatedAvatarUrl == nil ?
contact.profile_pic.url = nil :
updatedAvatarUrl?.withUnsafeBufferPointer {
contact.profile_pic.url = $0.baseAddress
}
)
(updatedAvatarKey == nil ?
contact.profile_pic.key = nil :
updatedAvatarKey?.withUnsafeBufferPointer {
contact.profile_pic.key = $0.baseAddress
}
)
contact.profile_pic.keylen = (updatedAvatarKey?.count ?? 0)
// Download the profile picture if needed
if oldAvatarUrl != updatedProfile.profilePictureUrl || oldAvatarKey != updatedProfile.profileEncryptionKey {
ProfileManager.downloadAvatar(for: updatedProfile)
}
}
// Store the updated contact
contacts_set(conf, &contact)
}
return ConfResult(
needsPush: config_needs_push(conf),
needsDump: config_needs_dump(conf)
)
}
}
}
// MARK: - Convenience
internal extension SessionUtil {
static func updatingContacts<T>(_ db: Database, _ updated: [T]) throws -> [T] {
guard let updatedContacts: [Contact] = updated as? [Contact] else { throw StorageError.generic }
// The current users contact data doesn't need to sync so exclude it
let userPublicKey: String = getUserHexEncodedPublicKey(db)
let targetContacts: [Contact] = updatedContacts.filter { $0.id != userPublicKey }
// If we only updated the current user contact then no need to continue
guard !targetContacts.isEmpty else { return updated }
db.afterNextTransaction { db in
do {
let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = SessionUtil.config(
for: .contacts,
publicKey: userPublicKey
)
let result: ConfResult = try SessionUtil
.upsert(
contactData: targetContacts.map { (id: $0.id, contact: $0, profile: nil) },
in: atomicConf
)
// If we don't need to dump the data the we can finish early
guard result.needsDump else { return }
try SessionUtil.saveState(
db,
keepingExistingMessageHashes: true,
configDump: try atomicConf.mutate { conf in
try SessionUtil.createDump(
conf: conf,
for: .contacts,
publicKey: userPublicKey,
messageHashes: nil
)
}
)
}
catch {
SNLog("[libSession-util] Failed to dump updated data")
}
}
return updated
}
static func updatingProfiles<T>(_ db: Database, _ updated: [T]) throws -> [T] {
guard let updatedProfiles: [Profile] = updated as? [Profile] else { throw StorageError.generic }
// We should only sync profiles which are associated to contact data to avoid including profiles
// for random people in community conversations so filter out any profiles which don't have an
// associated contact
let existingContactIds: [String] = (try? Contact
.filter(ids: updatedProfiles.map { $0.id })
.select(.id)
.asRequest(of: String.self)
.fetchAll(db))
.defaulting(to: [])
// If none of the profiles are associated with existing contacts then ignore the changes (no need
// to do a config sync)
guard !existingContactIds.isEmpty else { return updated }
// Get the user public key (updating their profile is handled separately
let userPublicKey: String = getUserHexEncodedPublicKey(db)
db.afterNextTransaction { db in
do {
// Update the user profile first (if needed)
if let updatedUserProfile: Profile = updatedProfiles.first(where: { $0.id == userPublicKey }) {
let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = SessionUtil.config(
for: .userProfile,
publicKey: userPublicKey
)
let result: ConfResult = try SessionUtil.update(
profile: updatedUserProfile,
in: atomicConf
)
if result.needsDump {
try SessionUtil.saveState(
db,
keepingExistingMessageHashes: true,
configDump: try atomicConf.mutate { conf in
try SessionUtil.createDump(
conf: conf,
for: .userProfile,
publicKey: userPublicKey,
messageHashes: nil
)
}
)
}
}
// Then update other contacts
let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = SessionUtil.config(
for: .contacts,
publicKey: userPublicKey
)
let result: ConfResult = try SessionUtil
.upsert(
contactData: updatedProfiles
.filter { $0.id != userPublicKey }
.map { (id: $0.id, contact: nil, profile: $0) },
in: atomicConf
)
// If we don't need to dump the data the we can finish early
guard result.needsDump else { return }
try SessionUtil.saveState(
db,
keepingExistingMessageHashes: true,
configDump: try atomicConf.mutate { conf in
try SessionUtil.createDump(
conf: conf,
for: .contacts,
publicKey: userPublicKey,
messageHashes: nil
)
}
)
}
catch {
SNLog("[libSession-util] Failed to dump updated data")
}
}
return updated
}
}

View File

@ -6,22 +6,24 @@ import SessionUtil
import SessionUtilitiesKit import SessionUtilitiesKit
internal extension SessionUtil { internal extension SessionUtil {
// MARK: - Incoming Changes
static func handleUserProfileUpdate( static func handleUserProfileUpdate(
_ db: Database, _ db: Database,
in target: Target, in atomicConf: Atomic<UnsafeMutablePointer<config_object>?>,
needsDump: Bool, needsDump: Bool,
latestConfigUpdateSentTimestamp: TimeInterval latestConfigUpdateSentTimestamp: TimeInterval
) throws { ) throws {
typealias ProfileData = (profileName: String, profilePictureUrl: String?, profilePictureKey: Data?) typealias ProfileData = (profileName: String, profilePictureUrl: String?, profilePictureKey: Data?)
guard needsDump else { return } guard needsDump else { return }
guard target.conf.wrappedValue != nil else { throw SessionUtilError.nilConfigObject } guard atomicConf.wrappedValue != nil else { throw SessionUtilError.nilConfigObject }
let userPublicKey: String = getUserHexEncodedPublicKey(db) let userPublicKey: String = getUserHexEncodedPublicKey(db)
// Since we are doing direct memory manipulation we are using an `Atomic` type which has // Since we are doing direct memory manipulation we are using an `Atomic` type which has
// blocking access in it's `mutate` closure // blocking access in it's `mutate` closure
let maybeProfileData: ProfileData? = target.conf.mutate { conf -> ProfileData? in let maybeProfileData: ProfileData? = atomicConf.mutate { conf -> ProfileData? in
// A profile must have a name so if this is null then it's invalid and can be ignored // A profile must have a name so if this is null then it's invalid and can be ignored
guard let profileNamePtr: UnsafePointer<CChar> = user_profile_get_name(conf) else { guard let profileNamePtr: UnsafePointer<CChar> = user_profile_get_name(conf) else {
return nil return nil
@ -52,33 +54,55 @@ internal extension SessionUtil {
// Only save the data in the database if it's valid // Only save the data in the database if it's valid
guard let profileData: ProfileData = maybeProfileData else { return } guard let profileData: ProfileData = maybeProfileData else { return }
// Profile (also force-approve the current user in case the account got into a weird state or // Handle user profile changes
// restored directly from a migration) try ProfileManager.updateProfileIfNeeded(
try MessageReceiver.updateProfileIfNeeded(
db, db,
publicKey: userPublicKey, publicKey: userPublicKey,
name: profileData.profileName, name: profileData.profileName,
profilePictureUrl: profileData.profilePictureUrl, avatarUpdate: {
profileKey: profileData.profilePictureKey, guard
sentTimestamp: latestConfigUpdateSentTimestamp let profilePictureUrl: String = profileData.profilePictureUrl,
let profileKey: Data = profileData.profilePictureKey
else { return .none }
return .updateTo(
url: profilePictureUrl,
key: profileKey,
fileName: nil
)
}(),
sentTimestamp: latestConfigUpdateSentTimestamp,
calledFromConfigHandling: true
) )
try Contact(id: userPublicKey)
.with( // Create a contact for the current user if needed (also force-approve the current user
isApproved: true, // in case the account got into a weird state or restored directly from a migration)
didApproveMe: true let userContact: Contact = Contact.fetchOrCreate(db, id: userPublicKey)
)
.save(db) if !userContact.isTrusted || !userContact.isApproved || !userContact.didApproveMe {
try userContact.save(db)
try Contact
.filter(id: userPublicKey)
.updateAll( // Handling a config update so don't use `updateAllAndConfig`
db,
Contact.Columns.isTrusted.set(to: true), // Always trust the current user
Contact.Columns.isApproved.set(to: true),
Contact.Columns.didApproveMe.set(to: true)
)
}
} }
@discardableResult static func update( // MARK: - Outgoing Changes
static func update(
profile: Profile, profile: Profile,
in target: Target in atomicConf: Atomic<UnsafeMutablePointer<config_object>?>
) throws -> ConfResult { ) throws -> ConfResult {
guard target.conf.wrappedValue != nil else { throw SessionUtilError.nilConfigObject } guard atomicConf.wrappedValue != nil else { throw SessionUtilError.nilConfigObject }
// Since we are doing direct memory manipulation we are using an `Atomic` type which has // Since we are doing direct memory manipulation we are using an `Atomic` type which has
// blocking access in it's `mutate` closure // blocking access in it's `mutate` closure
return target.conf.mutate { conf in return atomicConf.mutate { conf in
// Update the name // Update the name
user_profile_set_name(conf, profile.name) user_profile_set_name(conf, profile.name)
@ -101,7 +125,7 @@ internal extension SessionUtil {
user_profile_set_pic(conf, profilePic) user_profile_set_pic(conf, profilePic)
} }
return ( return ConfResult(
needsPush: config_needs_push(conf), needsPush: config_needs_push(conf),
needsDump: config_needs_dump(conf) needsDump: config_needs_dump(conf)
) )

View File

@ -0,0 +1,73 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation
import GRDB
import SessionUtilitiesKit
// MARK: - GRDB
public extension QueryInterfaceRequest {
@discardableResult
func updateAllAndConfig(
_ db: Database,
_ assignments: ColumnAssignment...
) throws -> Int {
return try updateAllAndConfig(db, assignments)
}
@discardableResult
func updateAllAndConfig(
_ db: Database,
_ assignments: [ColumnAssignment]
) throws -> Int {
switch self {
case let contactRequest as QueryInterfaceRequest<Contact>:
return try contactRequest.updateAndFetchAllAndUpdateConfig(db, assignments).count
case let profileRequest as QueryInterfaceRequest<Profile>:
return try profileRequest.updateAndFetchAllAndUpdateConfig(db, assignments).count
default: return try self.updateAll(db, assignments)
}
}
}
public extension QueryInterfaceRequest where RowDecoder: FetchableRecord & TableRecord {
@discardableResult
func updateAndFetchAllAndUpdateConfig(
_ db: Database,
_ assignments: ColumnAssignment...
) throws -> [RowDecoder] {
return try updateAndFetchAllAndUpdateConfig(db, assignments)
}
@discardableResult
func updateAndFetchAllAndUpdateConfig(
_ db: Database,
_ assignments: [ColumnAssignment]
) throws -> [RowDecoder] {
defer {
db.afterNextTransaction { db in
guard
self is QueryInterfaceRequest<Contact> ||
self is QueryInterfaceRequest<Profile> ||
self is QueryInterfaceRequest<ClosedGroup>
else { return }
// If we change one of these types then we may as well automatically enqueue
// a new config sync job once the transaction completes
ConfigurationSyncJob.enqueue(db)
}
}
switch self {
case is QueryInterfaceRequest<Contact>:
return try SessionUtil.updatingContacts(db, try updateAndFetchAll(db, assignments))
case is QueryInterfaceRequest<Profile>:
return try SessionUtil.updatingProfiles(db, try updateAndFetchAll(db, assignments))
default: return try self.updateAndFetchAll(db, assignments)
}
}
}

View File

@ -2,73 +2,94 @@
import Foundation import Foundation
import GRDB import GRDB
import SessionSnodeKit
import SessionUtil import SessionUtil
import SessionUtilitiesKit import SessionUtilitiesKit
/*internal*/public enum SessionUtil { public enum SessionUtil {
public typealias ConfResult = (needsPush: Bool, needsDump: Bool) public struct ConfResult {
public typealias IncomingConfResult = (needsPush: Bool, needsDump: Bool, latestSentTimestamp: TimeInterval) let needsPush: Bool
let needsDump: Bool
}
enum Target { public struct IncomingConfResult {
case global(variant: ConfigDump.Variant) let needsPush: Bool
case custom(conf: Atomic<UnsafeMutablePointer<config_object>?>) let needsDump: Bool
let messageHashes: [String]
var conf: Atomic<UnsafeMutablePointer<config_object>?> { let latestSentTimestamp: TimeInterval
switch self { }
case .global(let variant): return SessionUtil.config(for: variant)
case .custom(let conf): return conf public struct OutgoingConfResult {
} let message: SharedConfigMessage
} let namespace: SnodeAPI.Namespace
let destination: Message.Destination
let oldMessageHashes: [String]?
} }
// MARK: - Configs // MARK: - Configs
private static var userProfileConfig: Atomic<UnsafeMutablePointer<config_object>?> = Atomic(nil) fileprivate static var configStore: Atomic<[ConfigKey: Atomic<UnsafeMutablePointer<config_object>?>]> = Atomic([:])
private static var contactsConfig: Atomic<UnsafeMutablePointer<config_object>?> = Atomic(nil)
public static func config(for variant: ConfigDump.Variant, publicKey: String) -> Atomic<UnsafeMutablePointer<config_object>?> {
let key: ConfigKey = ConfigKey(variant: variant, publicKey: publicKey)
return (
SessionUtil.configStore.wrappedValue[key] ??
Atomic(nil)
)
}
// MARK: - Variables // MARK: - Variables
/// Returns `true` if there is a config which needs to be pushed, but returns `false` if the configs are all up to date or haven't been
/// loaded yet (eg. fresh install)
public static var needsSync: Bool { public static var needsSync: Bool {
return ConfigDump.Variant.allCases.contains { variant in return configStore
switch variant { .wrappedValue
case .userProfile: .contains { _, atomicConf in config_needs_push(atomicConf.wrappedValue) }
return (userProfileConfig.wrappedValue.map { config_needs_push($0) } ?? false)
case .contacts:
return (contactsConfig.wrappedValue.map { config_needs_push($0) } ?? false)
}
}
}
// MARK: - Convenience
private static func config(for variant: ConfigDump.Variant) -> Atomic<UnsafeMutablePointer<config_object>?> {
switch variant {
case .userProfile: return SessionUtil.userProfileConfig
case .contacts: return SessionUtil.contactsConfig
}
} }
// MARK: - Loading // MARK: - Loading
/*internal*/public static func loadState(ed25519SecretKey: [UInt8]?) { public static func loadState(
userPublicKey: String,
ed25519SecretKey: [UInt8]?
) {
guard let secretKey: [UInt8] = ed25519SecretKey else { return } guard let secretKey: [UInt8] = ed25519SecretKey else { return }
SessionUtil.userProfileConfig.mutate { $0 = loadState(for: .userProfile, secretKey: secretKey) } // Retrieve the existing dumps from the database
SessionUtil.contactsConfig.mutate { $0 = loadState(for: .contacts, secretKey: secretKey) } let existingDumps: Set<ConfigDump> = Storage.shared
} .read { db in try ConfigDump.fetchSet(db) }
.defaulting(to: [])
private static func loadState( let existingDumpVariants: Set<ConfigDump.Variant> = existingDumps
for variant: ConfigDump.Variant, .map { $0.variant }
secretKey ed25519SecretKey: [UInt8]? .asSet()
) -> UnsafeMutablePointer<config_object>? { let missingRequiredVariants: Set<ConfigDump.Variant> = ConfigDump.Variant.userVariants
guard let secretKey: [UInt8] = ed25519SecretKey else { return nil } .asSet()
.subtracting(existingDumpVariants)
// Load any // Create the 'config_object' records for each dump
let storedDump: Data? = Storage.shared SessionUtil.configStore.mutate { confStore in
.read { db in try ConfigDump.fetchOne(db, id: variant) }? existingDumps.forEach { dump in
.data confStore[ConfigKey(variant: dump.variant, publicKey: dump.publicKey)] = Atomic(
try? SessionUtil.loadState(
return try? loadState(for: variant, secretKey: secretKey, cachedData: storedDump) for: dump.variant,
secretKey: secretKey,
cachedData: dump.data
)
)
}
missingRequiredVariants.forEach { variant in
confStore[ConfigKey(variant: variant, publicKey: userPublicKey)] = Atomic(
try? SessionUtil.loadState(
for: variant,
secretKey: secretKey,
cachedData: nil
)
)
}
}
} }
internal static func loadState( internal static func loadState(
@ -117,87 +138,165 @@ import SessionUtilitiesKit
internal static func saveState( internal static func saveState(
_ db: Database, _ db: Database,
conf: UnsafeMutablePointer<config_object>?, keepingExistingMessageHashes: Bool,
for variant: ConfigDump.Variant configDump: ConfigDump?
) throws { ) throws {
guard let configDump: ConfigDump = configDump else { return }
// If we want to keep the existing message hashes then we need
// to fetch them from the db and create a new 'ConfigDump' instance
let targetDump: ConfigDump = try {
guard keepingExistingMessageHashes else { return configDump }
let existingCombinedMessageHashes: String? = try ConfigDump
.filter(
ConfigDump.Columns.variant == configDump.variant &&
ConfigDump.Columns.publicKey == configDump.publicKey
)
.select(.combinedMessageHashes)
.asRequest(of: String.self)
.fetchOne(db)
return ConfigDump(
variant: configDump.variant,
publicKey: configDump.publicKey,
data: configDump.data,
messageHashes: ConfigDump.messageHashes(from: existingCombinedMessageHashes)
)
}()
// Actually save the dump
try targetDump.save(db)
}
internal static func createDump(
conf: UnsafeMutablePointer<config_object>?,
for variant: ConfigDump.Variant,
publicKey: String,
messageHashes: [String]?
) throws -> ConfigDump? {
guard conf != nil else { throw SessionUtilError.nilConfigObject } guard conf != nil else { throw SessionUtilError.nilConfigObject }
// If it doesn't need a dump then do nothing // If it doesn't need a dump then do nothing
guard config_needs_dump(conf) else { return } guard config_needs_dump(conf) else { return nil }
var dumpResult: UnsafeMutablePointer<UInt8>? = nil var dumpResult: UnsafeMutablePointer<UInt8>? = nil
var dumpResultLen: Int = 0 var dumpResultLen: Int = 0
config_dump(conf, &dumpResult, &dumpResultLen) config_dump(conf, &dumpResult, &dumpResultLen)
guard let dumpResult: UnsafeMutablePointer<UInt8> = dumpResult else { return } guard let dumpResult: UnsafeMutablePointer<UInt8> = dumpResult else { return nil }
let dumpData: Data = Data(bytes: dumpResult, count: dumpResultLen) let dumpData: Data = Data(bytes: dumpResult, count: dumpResultLen)
dumpResult.deallocate() dumpResult.deallocate()
try ConfigDump( return ConfigDump(
variant: variant, variant: variant,
data: dumpData publicKey: publicKey,
data: dumpData,
messageHashes: messageHashes
) )
.save(db)
} }
// MARK: - Pushes // MARK: - Pushes
public static func getChanges( public static func pendingChanges(
for variants: [ConfigDump.Variant] = ConfigDump.Variant.allCases, _ db: Database,
userPublicKey: String,
ed25519SecretKey: [UInt8] ed25519SecretKey: [UInt8]
) -> [SharedConfigMessage] { ) throws -> [OutgoingConfResult] {
return variants let existingDumpInfo: Set<DumpInfo> = try ConfigDump
.compactMap { variant -> SharedConfigMessage? in .select(.variant, .publicKey, .combinedMessageHashes)
let conf = SessionUtil.config(for: variant) .asRequest(of: DumpInfo.self)
.fetchSet(db)
// Ensure we always check the required user config types for changes even if there is no dump
// data yet (to deal with first launch cases)
return existingDumpInfo
.inserting(
contentsOf: DumpInfo.requiredUserConfigDumpInfo(userPublicKey: userPublicKey)
.filter { requiredInfo -> Bool in
!existingDumpInfo.contains(where: {
$0.variant == requiredInfo.variant &&
$0.publicKey == requiredInfo.publicKey
})
}
)
.compactMap { dumpInfo -> OutgoingConfResult? in
let key: ConfigKey = ConfigKey(variant: dumpInfo.variant, publicKey: dumpInfo.publicKey)
let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = (
SessionUtil.configStore.wrappedValue[key] ??
Atomic(nil)
)
// Check if the config needs to be pushed // Check if the config needs to be pushed
guard config_needs_push(conf.wrappedValue) else { return nil } guard config_needs_push(atomicConf.wrappedValue) else { return nil }
var toPush: UnsafeMutablePointer<UInt8>? = nil var toPush: UnsafeMutablePointer<UInt8>? = nil
var toPushLen: Int = 0 var toPushLen: Int = 0
let seqNo: Int64 = conf.mutate { config_push($0, &toPush, &toPushLen) } let seqNo: Int64 = atomicConf.mutate { config_push($0, &toPush, &toPushLen) }
guard let toPush: UnsafeMutablePointer<UInt8> = toPush else { return nil } guard let toPush: UnsafeMutablePointer<UInt8> = toPush else { return nil }
let pushData: Data = Data(bytes: toPush, count: toPushLen) let pushData: Data = Data(bytes: toPush, count: toPushLen)
toPush.deallocate() toPush.deallocate()
return SharedConfigMessage( return OutgoingConfResult(
kind: variant.configMessageKind, message: SharedConfigMessage(
seqNo: seqNo, kind: dumpInfo.variant.configMessageKind,
data: pushData seqNo: seqNo,
data: pushData
),
namespace: dumpInfo.variant.namespace,
destination: (dumpInfo.publicKey == userPublicKey ?
Message.Destination.contact(publicKey: userPublicKey) :
Message.Destination.closedGroup(groupPublicKey: dumpInfo.publicKey)
),
oldMessageHashes: dumpInfo.messageHashes
) )
} }
} }
public static func markAsPushed(messages: [SharedConfigMessage]) -> [ConfigDump.Variant: Bool] { public static func markAsPushed(
messages.reduce(into: [:]) { result, message in message: SharedConfigMessage,
let conf = SessionUtil.config(for: message.kind.configDumpVariant) publicKey: String
) -> Bool {
// Mark the config as pushed let key: ConfigKey = ConfigKey(variant: message.kind.configDumpVariant, publicKey: publicKey)
config_confirm_pushed(conf.wrappedValue, message.seqNo) let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = (
SessionUtil.configStore.wrappedValue[key] ??
// Update the result to indicate whether the config needs to be dumped Atomic(nil)
result[message.kind.configDumpVariant] = config_needs_dump(conf.wrappedValue) )
}
// Mark the config as pushed
config_confirm_pushed(atomicConf.wrappedValue, message.seqNo)
// Update the result to indicate whether the config needs to be dumped
return config_needs_dump(atomicConf.wrappedValue)
} }
// MARK: - Receiving // MARK: - Receiving
public static func handleConfigMessages( public static func handleConfigMessages(
_ db: Database, _ db: Database,
messages: [SharedConfigMessage] messages: [SharedConfigMessage],
publicKey: String
) throws { ) throws {
guard !messages.isEmpty else { return }
guard !publicKey.isEmpty else { throw MessageReceiverError.noThread }
let groupedMessages: [SharedConfigMessage.Kind: [SharedConfigMessage]] = messages let groupedMessages: [SharedConfigMessage.Kind: [SharedConfigMessage]] = messages
.grouped(by: \.kind) .grouped(by: \.kind)
// Merge the config messages into the current state // Merge the config messages into the current state
let results: [ConfigDump.Variant: IncomingConfResult] = groupedMessages let results: [ConfigDump.Variant: IncomingConfResult] = groupedMessages
.reduce(into: [:]) { result, next in .reduce(into: [:]) { result, next in
let atomicConf = SessionUtil.config(for: next.key.configDumpVariant) let key: ConfigKey = ConfigKey(variant: next.key.configDumpVariant, publicKey: publicKey)
let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = (
SessionUtil.configStore.wrappedValue[key] ??
Atomic(nil)
)
var needsPush: Bool = false var needsPush: Bool = false
var needsDump: Bool = false var needsDump: Bool = false
let messageHashes: [String] = next.value.compactMap { $0.serverHash }
let messageSentTimestamp: TimeInterval = TimeInterval( let messageSentTimestamp: TimeInterval = TimeInterval(
(next.value.compactMap { $0.sentTimestamp }.max() ?? 0) / 1000 (next.value.compactMap { $0.sentTimestamp }.max() ?? 0) / 1000
) )
@ -217,25 +316,119 @@ import SessionUtilitiesKit
} }
// Return the current state of the config // Return the current state of the config
result[next.key.configDumpVariant] = ( result[next.key.configDumpVariant] = IncomingConfResult(
needsPush: needsPush, needsPush: needsPush,
needsDump: needsDump, needsDump: needsDump,
messageHashes: messageHashes,
latestSentTimestamp: messageSentTimestamp latestSentTimestamp: messageSentTimestamp
) )
} }
// If the data needs to be dumped then apply the relevant local changes // Process the results from the merging
try results.forEach { variant, result in try results.forEach { variant, result in
let key: ConfigKey = ConfigKey(variant: variant, publicKey: publicKey)
let atomicConf: Atomic<UnsafeMutablePointer<config_object>?> = (
SessionUtil.configStore.wrappedValue[key] ??
Atomic(nil)
)
// Apply the updated states to the database
switch variant { switch variant {
case .userProfile: case .userProfile:
try SessionUtil.handleUserProfileUpdate( try SessionUtil.handleUserProfileUpdate(
db, db,
in: .global(variant: variant), in: atomicConf,
needsDump: result.needsDump, needsDump: result.needsDump,
latestConfigUpdateSentTimestamp: result.latestSentTimestamp latestConfigUpdateSentTimestamp: result.latestSentTimestamp
) )
case .contacts:
try SessionUtil.handleContactsUpdate(
db,
in: atomicConf,
needsDump: result.needsDump
)
} }
// We need to get the existing message hashes and combine them with the latest from the
// service node to ensure the next push will properly clean up old messages
let oldMessageHashes: Set<String> = try ConfigDump
.filter(
ConfigDump.Columns.variant == variant &&
ConfigDump.Columns.publicKey == publicKey
)
.select(.combinedMessageHashes)
.asRequest(of: String.self)
.fetchOne(db)
.map { ConfigDump.messageHashes(from: $0) }
.defaulting(to: [])
.asSet()
let allMessageHashes: [String] = Array(oldMessageHashes
.inserting(contentsOf: result.messageHashes.asSet()))
let messageHashesChanged: Bool = (oldMessageHashes != result.messageHashes.asSet())
// Now that the changes are applied, update the cached dumps
switch (result.needsDump, messageHashesChanged) {
case (true, _):
// The config data had changes so regenerate the dump and save it
try atomicConf
.mutate { conf -> ConfigDump? in
try SessionUtil.createDump(
conf: conf,
for: variant,
publicKey: publicKey,
messageHashes: allMessageHashes
)
}?
.save(db)
case (false, true):
// The config data didn't change but there were different messages on the service node
// so just update the message hashes so the next sync can properly remove any old ones
try ConfigDump
.filter(
ConfigDump.Columns.variant == variant &&
ConfigDump.Columns.publicKey == publicKey
)
.updateAll(
db,
ConfigDump.Columns.combinedMessageHashes
.set(to: ConfigDump.combinedMessageHashes(from: allMessageHashes))
)
default: break
}
// Now that the local state has been updated, trigger a config sync (this will push any
// pending updates and properly update the state)
if results.contains(where: { $0.value.needsPush }) {
ConfigurationSyncJob.enqueue(db)
} }
} }
} }
// MARK: - Internal Convenience
fileprivate extension SessionUtil {
struct ConfigKey: Hashable {
let variant: ConfigDump.Variant
let publicKey: String
}
struct DumpInfo: FetchableRecord, Decodable, Hashable {
let variant: ConfigDump.Variant
let publicKey: String
private let combinedMessageHashes: String?
var messageHashes: [String]? { ConfigDump.messageHashes(from: combinedMessageHashes) }
// MARK: - Convenience
static func requiredUserConfigDumpInfo(userPublicKey: String) -> Set<DumpInfo> {
return ConfigDump.Variant.userVariants
.map { DumpInfo(variant: $0, publicKey: userPublicKey, combinedMessageHashes: nil) }
.asSet()
}
}
}

View File

@ -6,15 +6,9 @@ import SessionSnodeKit
import SessionUtilitiesKit import SessionUtilitiesKit
public extension Message { public extension Message {
enum Destination: Codable { enum Destination: Codable, Hashable {
case contact( case contact(publicKey: String)
publicKey: String, case closedGroup(groupPublicKey: String)
namespace: SnodeAPI.Namespace
)
case closedGroup(
groupPublicKey: String,
namespace: SnodeAPI.Namespace
)
case openGroup( case openGroup(
roomToken: String, roomToken: String,
server: String, server: String,
@ -23,13 +17,6 @@ public extension Message {
fileIds: [String]? = nil fileIds: [String]? = nil
) )
case openGroupInbox(server: String, openGroupPublicKey: String, blindedPublicKey: String) case openGroupInbox(server: String, openGroupPublicKey: String, blindedPublicKey: String)
var namespace: SnodeAPI.Namespace {
switch self {
case .contact(_, let namespace), .closedGroup(_, let namespace): return namespace
default: preconditionFailure("Attepted to retrieve namespace for invalid destination")
}
}
public static func from( public static func from(
_ db: Database, _ db: Database,
@ -50,10 +37,10 @@ public extension Message {
) )
} }
return .contact(publicKey: thread.id, namespace: .default) return .contact(publicKey: thread.id)
case .closedGroup: case .closedGroup:
return .closedGroup(groupPublicKey: thread.id, namespace: .legacyClosedGroup) return .closedGroup(groupPublicKey: thread.id)
case .openGroup: case .openGroup:
guard let openGroup: OpenGroup = try thread.openGroup.fetchOne(db) else { guard let openGroup: OpenGroup = try thread.openGroup.fetchOne(db) else {
@ -79,59 +66,5 @@ public extension Message {
default: return self default: return self
} }
} }
// MARK: - Codable
// FIXME: Remove this custom implementation after enough time has passed (added the 'namespace' properties)
public init(from decoder: Decoder) throws {
let container: KeyedDecodingContainer<CodingKeys> = try decoder.container(keyedBy: CodingKeys.self)
// Should only have a single root key so we can just switch on it to have cleaner code
switch container.allKeys.first {
case .contact:
let childContainer: KeyedDecodingContainer<ContactCodingKeys> = try container.nestedContainer(keyedBy: ContactCodingKeys.self, forKey: .contact)
self = .contact(
publicKey: try childContainer.decode(String.self, forKey: .publicKey),
namespace: (
(try? childContainer.decode(SnodeAPI.Namespace.self, forKey: .namespace)) ??
.default
)
)
case .closedGroup:
let childContainer: KeyedDecodingContainer<ClosedGroupCodingKeys> = try container.nestedContainer(keyedBy: ClosedGroupCodingKeys.self, forKey: .closedGroup)
self = .closedGroup(
groupPublicKey: try childContainer.decode(String.self, forKey: .groupPublicKey),
namespace: (
(try? childContainer.decode(SnodeAPI.Namespace.self, forKey: .namespace)) ??
.legacyClosedGroup
)
)
case .openGroup:
let childContainer: KeyedDecodingContainer<OpenGroupCodingKeys> = try container.nestedContainer(keyedBy: OpenGroupCodingKeys.self, forKey: .openGroup)
self = .openGroup(
roomToken: try childContainer.decode(String.self, forKey: .roomToken),
server: try childContainer.decode(String.self, forKey: .server),
whisperTo: try? childContainer.decode(String.self, forKey: .whisperTo),
whisperMods: try childContainer.decode(Bool.self, forKey: .whisperMods),
fileIds: try? childContainer.decode([String].self, forKey: .fileIds)
)
case .openGroupInbox:
let childContainer: KeyedDecodingContainer<OpenGroupInboxCodingKeys> = try container.nestedContainer(keyedBy: OpenGroupInboxCodingKeys.self, forKey: .openGroupInbox)
self = .openGroupInbox(
server: try childContainer.decode(String.self, forKey: .server),
openGroupPublicKey: try childContainer.decode(String.self, forKey: .openGroupPublicKey),
blindedPublicKey: try childContainer.decode(String.self, forKey: .blindedPublicKey)
)
default: throw MessageReceiverError.invalidMessage
}
}
} }
} }

View File

@ -183,7 +183,7 @@ public extension Message {
default: return false default: return false
} }
case is ConfigurationMessage: return true case is ConfigurationMessage, is SharedConfigMessage: return true
case is UnsendRequest: return true case is UnsendRequest: return true
default: return false default: return false
} }

View File

@ -108,15 +108,18 @@ internal extension AnyPublisher where Output == HTTP.BatchResponse, Failure == E
func map<E: EndpointType>( func map<E: EndpointType>(
requests: [OpenGroupAPI.BatchRequest.Info], requests: [OpenGroupAPI.BatchRequest.Info],
toHashMapFor endpointType: E.Type toHashMapFor endpointType: E.Type
) -> AnyPublisher<[E: (ResponseInfoType, Codable?)], Error> { ) -> AnyPublisher<(info: ResponseInfoType, data: [E: Codable]), Error> {
return self return self
.map { result in .map { result -> (info: ResponseInfoType, data: [E: Codable]) in
result.enumerated() (
.reduce(into: [:]) { prev, next in info: result.info,
guard let endpoint: E = requests[next.offset].endpoint as? E else { return } data: result.responses.enumerated()
.reduce(into: [:]) { prev, next in
prev[endpoint] = next.element guard let endpoint: E = requests[next.offset].endpoint as? E else { return }
}
prev[endpoint] = next.element
}
)
} }
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }

View File

@ -32,7 +32,7 @@ public enum OpenGroupAPI {
hasPerformedInitialPoll: Bool, hasPerformedInitialPoll: Bool,
timeSinceLastPoll: TimeInterval, timeSinceLastPoll: TimeInterval,
using dependencies: SMKDependencies = SMKDependencies() using dependencies: SMKDependencies = SMKDependencies()
) -> AnyPublisher<[Endpoint: (ResponseInfoType, Codable?)], Error> { ) -> AnyPublisher<(info: ResponseInfoType, data: [Endpoint: Codable]), Error> {
let lastInboxMessageId: Int64 = (try? OpenGroup let lastInboxMessageId: Int64 = (try? OpenGroup
.select(.inboxLatestMessageId) .select(.inboxLatestMessageId)
.filter(OpenGroup.Columns.server == server) .filter(OpenGroup.Columns.server == server)
@ -152,7 +152,7 @@ public enum OpenGroupAPI {
server: String, server: String,
requests: [BatchRequest.Info], requests: [BatchRequest.Info],
using dependencies: SMKDependencies = SMKDependencies() using dependencies: SMKDependencies = SMKDependencies()
) -> AnyPublisher<[Endpoint: (ResponseInfoType, Codable?)], Error> { ) -> AnyPublisher<(info: ResponseInfoType, data: [Endpoint: Codable]), Error> {
let responseTypes = requests.map { $0.responseType } let responseTypes = requests.map { $0.responseType }
return OpenGroupAPI return OpenGroupAPI
@ -184,7 +184,7 @@ public enum OpenGroupAPI {
server: String, server: String,
requests: [BatchRequest.Info], requests: [BatchRequest.Info],
using dependencies: SMKDependencies = SMKDependencies() using dependencies: SMKDependencies = SMKDependencies()
) -> AnyPublisher<[Endpoint: (ResponseInfoType, Codable?)], Error> { ) -> AnyPublisher<(info: ResponseInfoType, data: [Endpoint: Codable]), Error> {
let responseTypes = requests.map { $0.responseType } let responseTypes = requests.map { $0.responseType }
return OpenGroupAPI return OpenGroupAPI
@ -339,10 +339,9 @@ public enum OpenGroupAPI {
requests: requestResponseType, requests: requestResponseType,
using: dependencies using: dependencies
) )
.flatMap { (response: [Endpoint: (ResponseInfoType, Codable?)]) -> AnyPublisher<(capabilities: (info: ResponseInfoType, data: Capabilities), room: (info: ResponseInfoType, data: Room)), Error> in .flatMap { (info: ResponseInfoType, data: [Endpoint: Codable]) -> AnyPublisher<(capabilities: (info: ResponseInfoType, data: Capabilities), room: (info: ResponseInfoType, data: Room)), Error> in
let maybeCapabilities: (info: ResponseInfoType, data: Capabilities?)? = response[.capabilities] let maybeCapabilities: HTTP.BatchSubResponse<Capabilities>? = (data[.capabilities] as? HTTP.BatchSubResponse<Capabilities>)
.map { info, data in (info, (data as? HTTP.BatchSubResponse<Capabilities>)?.body) } let maybeRoomResponse: Codable? = data
let maybeRoomResponse: (ResponseInfoType, Codable?)? = response
.first(where: { key, _ in .first(where: { key, _ in
switch key { switch key {
case .room: return true case .room: return true
@ -350,14 +349,13 @@ public enum OpenGroupAPI {
} }
}) })
.map { _, value in value } .map { _, value in value }
let maybeRoom: (info: ResponseInfoType, data: Room?)? = maybeRoomResponse let maybeRoom: HTTP.BatchSubResponse<Room>? = (maybeRoomResponse as? HTTP.BatchSubResponse<Room>)
.map { info, data in (info, (data as? HTTP.BatchSubResponse<Room>)?.body) }
guard guard
let capabilitiesInfo: ResponseInfoType = maybeCapabilities?.info, let capabilitiesInfo: ResponseInfoType = maybeCapabilities?.responseInfo,
let capabilities: Capabilities = maybeCapabilities?.data, let capabilities: Capabilities = maybeCapabilities?.body,
let roomInfo: ResponseInfoType = maybeRoom?.info, let roomInfo: ResponseInfoType = maybeRoom?.responseInfo,
let room: Room = maybeRoom?.data let room: Room = maybeRoom?.body
else { else {
return Fail(error: HTTPError.parsingFailed) return Fail(error: HTTPError.parsingFailed)
.eraseToAnyPublisher() .eraseToAnyPublisher()
@ -407,25 +405,22 @@ public enum OpenGroupAPI {
requests: requestResponseType, requests: requestResponseType,
using: dependencies using: dependencies
) )
.flatMap { (response: [Endpoint: (ResponseInfoType, Codable?)]) -> AnyPublisher<(capabilities: (info: ResponseInfoType, data: Capabilities), rooms: (info: ResponseInfoType, data: [Room])), Error> in .flatMap { (info: ResponseInfoType, data: [Endpoint: Codable]) -> AnyPublisher<(capabilities: (info: ResponseInfoType, data: Capabilities), rooms: (info: ResponseInfoType, data: [Room])), Error> in
let maybeCapabilities: (info: ResponseInfoType, data: Capabilities?)? = response[.capabilities] let maybeCapabilities: HTTP.BatchSubResponse<Capabilities>? = (data[.capabilities] as? HTTP.BatchSubResponse<Capabilities>)
.map { info, data in (info, (data as? HTTP.BatchSubResponse<Capabilities>)?.body) } let maybeRooms: HTTP.BatchSubResponse<[Room]>? = data
let maybeRoomResponse: (ResponseInfoType, Codable?)? = response
.first(where: { key, _ in .first(where: { key, _ in
switch key { switch key {
case .rooms: return true case .rooms: return true
default: return false default: return false
} }
}) })
.map { _, value in value } .map { _, value in value as? HTTP.BatchSubResponse<[Room]> }
let maybeRooms: (info: ResponseInfoType, data: [Room]?)? = maybeRoomResponse
.map { info, data in (info, (data as? HTTP.BatchSubResponse<[Room]>)?.body) }
guard guard
let capabilitiesInfo: ResponseInfoType = maybeCapabilities?.info, let capabilitiesInfo: ResponseInfoType = maybeCapabilities?.responseInfo,
let capabilities: Capabilities = maybeCapabilities?.data, let capabilities: Capabilities = maybeCapabilities?.body,
let roomsInfo: ResponseInfoType = maybeRooms?.info, let roomsInfo: ResponseInfoType = maybeRooms?.responseInfo,
let rooms: [Room] = maybeRooms?.data let rooms: [Room] = maybeRooms?.body
else { else {
return Fail(error: HTTPError.parsingFailed) return Fail(error: HTTPError.parsingFailed)
.eraseToAnyPublisher() .eraseToAnyPublisher()
@ -1263,7 +1258,9 @@ public enum OpenGroupAPI {
requests: requestResponseType, requests: requestResponseType,
using: dependencies using: dependencies
) )
.map { $0.values.map { responseInfo, _ in responseInfo } } .map { _, data -> [ResponseInfoType] in
data.values.compactMap { ($0 as? BatchSubResponseType)?.responseInfo }
}
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }

View File

@ -68,36 +68,40 @@ public final class OpenGroupManager {
// MARK: - Polling // MARK: - Polling
public func startPolling(using dependencies: OGMDependencies = OGMDependencies()) { public func startPolling(using dependencies: OGMDependencies = OGMDependencies()) {
guard !dependencies.cache.isPolling else { return } // Run on the 'workQueue' to ensure any 'Atomic' access doesn't block the main thread
// on startup
OpenGroupAPI.workQueue.async {
guard !dependencies.cache.isPolling else { return }
let servers: Set<String> = dependencies.storage let servers: Set<String> = dependencies.storage
.read { db in .read { db in
// The default room promise creates an OpenGroup with an empty `roomToken` value, // The default room promise creates an OpenGroup with an empty `roomToken` value,
// we don't want to start a poller for this as the user hasn't actually joined a room // we don't want to start a poller for this as the user hasn't actually joined a room
try OpenGroup try OpenGroup
.select(.server) .select(.server)
.filter(OpenGroup.Columns.isActive == true) .filter(OpenGroup.Columns.isActive == true)
.filter(OpenGroup.Columns.roomToken != "") .filter(OpenGroup.Columns.roomToken != "")
.distinct() .distinct()
.asRequest(of: String.self) .asRequest(of: String.self)
.fetchSet(db) .fetchSet(db)
}
.defaulting(to: [])
dependencies.mutableCache.mutate { cache in
cache.isPolling = true
cache.pollers = servers
.reduce(into: [:]) { result, server in
result[server.lowercased()]?.stop() // Should never occur
result[server.lowercased()] = OpenGroupAPI.Poller(for: server.lowercased())
} }
.defaulting(to: [])
// Note: We loop separately here because when the cache is mocked-out for tests it dependencies.mutableCache.mutate { cache in
// doesn't actually store the value (meaning the pollers won't be started), but if cache.isPolling = true
// we do it in the 'reduce' function, the 'reduce' result will actually store the cache.pollers = servers
// poller value resulting in a bunch of OpenGroup pollers running in a way that can't .reduce(into: [:]) { result, server in
// be stopped during unit tests result[server.lowercased()]?.stop() // Should never occur
cache.pollers.forEach { _, poller in poller.startIfNeeded(using: dependencies) } result[server.lowercased()] = OpenGroupAPI.Poller(for: server.lowercased())
}
// Note: We loop separately here because when the cache is mocked-out for tests it
// doesn't actually store the value (meaning the pollers won't be started), but if
// we do it in the 'reduce' function, the 'reduce' result will actually store the
// poller value resulting in a bunch of OpenGroup pollers running in a way that can't
// be stopped during unit tests
cache.pollers.forEach { _, poller in poller.startIfNeeded(using: dependencies) }
}
} }
} }

View File

@ -7,6 +7,11 @@ import SessionUtilitiesKit
extension MessageReceiver { extension MessageReceiver {
internal static func handleConfigurationMessage(_ db: Database, message: ConfigurationMessage) throws { internal static func handleConfigurationMessage(_ db: Database, message: ConfigurationMessage) throws {
guard !Features.useSharedUtilForUserConfig else {
// TODO: Show warning prompt for X days
return
}
let userPublicKey = getUserHexEncodedPublicKey(db) let userPublicKey = getUserHexEncodedPublicKey(db)
guard message.sender == userPublicKey else { return } guard message.sender == userPublicKey else { return }
@ -21,22 +26,41 @@ extension MessageReceiver {
.defaulting(to: Date(timeIntervalSince1970: 0)) .defaulting(to: Date(timeIntervalSince1970: 0))
.timeIntervalSince1970 .timeIntervalSince1970
// Profile (also force-approve the current user in case the account got into a weird state or // Handle user profile changes
// restored directly from a migration) try ProfileManager.updateProfileIfNeeded(
try MessageReceiver.updateProfileIfNeeded(
db, db,
publicKey: userPublicKey, publicKey: userPublicKey,
name: message.displayName, name: message.displayName,
profilePictureUrl: message.profilePictureUrl, avatarUpdate: {
profileKey: message.profileKey, guard
let profilePictureUrl: String = message.profilePictureUrl,
let profileKey: Data = message.profileKey
else { return .none }
return .updateTo(
url: profilePictureUrl,
key: profileKey,
fileName: nil
)
}(),
sentTimestamp: messageSentTimestamp sentTimestamp: messageSentTimestamp
) )
try Contact(id: userPublicKey)
.with( // Create a contact for the current user if needed (also force-approve the current user
isApproved: true, // in case the account got into a weird state or restored directly from a migration)
didApproveMe: true let userContact: Contact = Contact.fetchOrCreate(db, id: userPublicKey)
)
.save(db) if !userContact.isTrusted || !userContact.isApproved || !userContact.didApproveMe {
try userContact.save(db)
try Contact
.filter(id: userPublicKey)
.updateAll( // Handling a config update so don't use `updateAllAndConfig`
db,
Contact.Columns.isTrusted.set(to: true),
Contact.Columns.isApproved.set(to: true),
Contact.Columns.didApproveMe.set(to: true)
)
}
if isInitialSync || messageSentTimestamp > lastConfigTimestamp { if isInitialSync || messageSentTimestamp > lastConfigTimestamp {
if isInitialSync { if isInitialSync {
@ -53,11 +77,10 @@ extension MessageReceiver {
// If the contact is a blinded contact then only add them if they haven't already been // If the contact is a blinded contact then only add them if they haven't already been
// unblinded // unblinded
if SessionId.Prefix(from: sessionId) == .blinded { if SessionId.Prefix(from: sessionId) == .blinded {
let hasUnblindedContact: Bool = (try? BlindedIdLookup let hasUnblindedContact: Bool = BlindedIdLookup
.filter(BlindedIdLookup.Columns.blindedId == sessionId) .filter(BlindedIdLookup.Columns.blindedId == sessionId)
.filter(BlindedIdLookup.Columns.sessionId != nil) .filter(BlindedIdLookup.Columns.sessionId != nil)
.isNotEmpty(db)) .isNotEmpty(db)
.defaulting(to: false)
if hasUnblindedContact { if hasUnblindedContact {
return return
@ -74,13 +97,21 @@ extension MessageReceiver {
profile.profilePictureUrl != contactInfo.profilePictureUrl || profile.profilePictureUrl != contactInfo.profilePictureUrl ||
profile.profileEncryptionKey != contactInfo.profileKey profile.profileEncryptionKey != contactInfo.profileKey
{ {
try profile try profile.save(db)
.with( try Profile
name: contactInfo.displayName, .filter(id: sessionId)
profilePictureUrl: .updateIf(contactInfo.profilePictureUrl), .updateAll( // Handling a config update so don't use `updateAllAndConfig`
profileEncryptionKey: .updateIf(contactInfo.profileKey) db,
[
Profile.Columns.name.set(to: contactInfo.displayName),
(contactInfo.profilePictureUrl == nil ? nil :
Profile.Columns.profilePictureUrl.set(to: contactInfo.profilePictureUrl)
),
(contactInfo.profileKey == nil ? nil :
Profile.Columns.profileEncryptionKey.set(to: contactInfo.profileKey)
)
].compactMap { $0 }
) )
.save(db)
} }
/// We only update these values if the proto actually has values for them (this is to prevent an /// We only update these values if the proto actually has values for them (this is to prevent an
@ -94,22 +125,23 @@ extension MessageReceiver {
(contactInfo.hasIsBlocked && (contact.isBlocked != contactInfo.isBlocked)) || (contactInfo.hasIsBlocked && (contact.isBlocked != contactInfo.isBlocked)) ||
(contactInfo.hasDidApproveMe && (contact.didApproveMe != contactInfo.didApproveMe)) (contactInfo.hasDidApproveMe && (contact.didApproveMe != contactInfo.didApproveMe))
{ {
try contact try contact.save(db)
.with( try Contact
isApproved: (contactInfo.hasIsApproved && contactInfo.isApproved ? .filter(id: sessionId)
true : .updateAll( // Handling a config update so don't use `updateAllAndConfig`
.existing db,
), [
isBlocked: (contactInfo.hasIsBlocked ? (!contactInfo.hasIsApproved || !contactInfo.isApproved ? nil :
.update(contactInfo.isBlocked) : Contact.Columns.isApproved.set(to: true)
.existing ),
), (!contactInfo.hasIsBlocked ? nil :
didApproveMe: (contactInfo.hasDidApproveMe && contactInfo.didApproveMe ? Contact.Columns.isBlocked.set(to: contactInfo.isBlocked)
true : ),
.existing (!contactInfo.hasDidApproveMe || !contactInfo.didApproveMe ? nil :
) Contact.Columns.didApproveMe.set(to: contactInfo.didApproveMe)
)
].compactMap { $0 }
) )
.save(db)
} }
// If the contact is blocked // If the contact is blocked

View File

@ -23,12 +23,22 @@ extension MessageReceiver {
if let profile = message.profile { if let profile = message.profile {
let messageSentTimestamp: TimeInterval = (TimeInterval(message.sentTimestamp ?? 0) / 1000) let messageSentTimestamp: TimeInterval = (TimeInterval(message.sentTimestamp ?? 0) / 1000)
try MessageReceiver.updateProfileIfNeeded( try ProfileManager.updateProfileIfNeeded(
db, db,
publicKey: senderId, publicKey: senderId,
name: profile.displayName, name: profile.displayName,
profilePictureUrl: profile.profilePictureUrl, avatarUpdate: {
profileKey: profile.profileKey, guard
let profilePictureUrl: String = profile.profilePictureUrl,
let profileKey: Data = profile.profileKey
else { return .none }
return .updateTo(
url: profilePictureUrl,
key: profileKey,
fileName: nil
)
}(),
sentTimestamp: messageSentTimestamp sentTimestamp: messageSentTimestamp
) )
} }
@ -88,8 +98,7 @@ extension MessageReceiver {
try updateContactApprovalStatusIfNeeded( try updateContactApprovalStatusIfNeeded(
db, db,
senderSessionId: senderId, senderSessionId: senderId,
threadId: nil, threadId: nil
forceConfigSync: blindedContactIds.isEmpty // Sync here if there were no blinded contacts
) )
// If there were blinded contacts which have now been resolved to this contact then we should remove // If there were blinded contacts which have now been resolved to this contact then we should remove
@ -103,8 +112,7 @@ extension MessageReceiver {
try updateContactApprovalStatusIfNeeded( try updateContactApprovalStatusIfNeeded(
db, db,
senderSessionId: userPublicKey, senderSessionId: userPublicKey,
threadId: unblindedThread.id, threadId: unblindedThread.id
forceConfigSync: true
) )
} }
@ -128,8 +136,7 @@ extension MessageReceiver {
internal static func updateContactApprovalStatusIfNeeded( internal static func updateContactApprovalStatusIfNeeded(
_ db: Database, _ db: Database,
senderSessionId: String, senderSessionId: String,
threadId: String?, threadId: String?
forceConfigSync: Bool
) throws { ) throws {
let userPublicKey: String = getUserHexEncodedPublicKey(db) let userPublicKey: String = getUserHexEncodedPublicKey(db)
@ -149,9 +156,10 @@ extension MessageReceiver {
guard !contact.isApproved else { return } guard !contact.isApproved else { return }
_ = try? contact try? contact.save(db)
.with(isApproved: true) _ = try? Contact
.saved(db) .filter(id: threadId)
.updateAllAndConfig(db, Contact.Columns.isApproved.set(to: true))
} }
else { else {
// The message was sent to the current user so flag their 'didApproveMe' as true (can't send a message to // The message was sent to the current user so flag their 'didApproveMe' as true (can't send a message to
@ -160,14 +168,10 @@ extension MessageReceiver {
guard !contact.didApproveMe else { return } guard !contact.didApproveMe else { return }
_ = try? contact try? contact.save(db)
.with(didApproveMe: true) _ = try? Contact
.saved(db) .filter(id: senderSessionId)
.updateAllAndConfig(db, Contact.Columns.didApproveMe.set(to: true))
} }
// Force a config sync to ensure all devices know the contact approval state if desired
guard forceConfigSync else { return }
try MessageSender.syncConfiguration(db, forceSyncNow: true).sinkUntilComplete()
} }
} }

View File

@ -25,12 +25,22 @@ extension MessageReceiver {
// Update profile if needed (want to do this regardless of whether the message exists or // Update profile if needed (want to do this regardless of whether the message exists or
// not to ensure the profile info gets sync between a users devices at every chance) // not to ensure the profile info gets sync between a users devices at every chance)
if let profile = message.profile { if let profile = message.profile {
try MessageReceiver.updateProfileIfNeeded( try ProfileManager.updateProfileIfNeeded(
db, db,
publicKey: sender, publicKey: sender,
name: profile.displayName, name: profile.displayName,
profilePictureUrl: profile.profilePictureUrl, avatarUpdate: {
profileKey: profile.profileKey, guard
let profilePictureUrl: String = profile.profilePictureUrl,
let profileKey: Data = profile.profileKey
else { return .none }
return .updateTo(
url: profilePictureUrl,
key: profileKey,
fileName: nil
)
}(),
sentTimestamp: messageSentTimestamp sentTimestamp: messageSentTimestamp
) )
} }
@ -272,8 +282,7 @@ extension MessageReceiver {
try MessageReceiver.updateContactApprovalStatusIfNeeded( try MessageReceiver.updateContactApprovalStatusIfNeeded(
db, db,
senderSessionId: sender, senderSessionId: sender,
threadId: thread.id, threadId: thread.id
forceConfigSync: false
) )
} }

View File

@ -103,7 +103,7 @@ extension MessageSender {
// the 'ClosedGroup' object we created // the 'ClosedGroup' object we created
sentTimestampMs: UInt64(floor(formationTimestamp * 1000)) sentTimestampMs: UInt64(floor(formationTimestamp * 1000))
), ),
to: .contact(publicKey: memberId, namespace: .default), to: .contact(publicKey: memberId),
interactionId: nil interactionId: nil
) )
} }
@ -197,7 +197,8 @@ extension MessageSender {
ClosedGroupControlMessage.KeyPairWrapper( ClosedGroupControlMessage.KeyPairWrapper(
publicKey: memberPublicKey, publicKey: memberPublicKey,
encryptedKeyPair: try MessageSender.encryptWithSessionProtocol( encryptedKeyPair: try MessageSender.encryptWithSessionProtocol(
plaintext, db,
plaintext: plaintext,
for: memberPublicKey for: memberPublicKey
) )
) )
@ -645,7 +646,11 @@ extension MessageSender {
let plaintext = try proto.serializedData() let plaintext = try proto.serializedData()
let thread: SessionThread = try SessionThread let thread: SessionThread = try SessionThread
.fetchOrCreate(db, id: publicKey, variant: .contact) .fetchOrCreate(db, id: publicKey, variant: .contact)
let ciphertext = try MessageSender.encryptWithSessionProtocol(plaintext, for: publicKey) let ciphertext = try MessageSender.encryptWithSessionProtocol(
db,
plaintext: plaintext,
for: publicKey
)
SNLog("Sending latest encryption key pair to: \(publicKey).") SNLog("Sending latest encryption key pair to: \(publicKey).")
try MessageSender.send( try MessageSender.send(

View File

@ -305,82 +305,4 @@ public enum MessageReceiver {
return (contactId, .contact) return (contactId, .contact)
} }
internal static func updateProfileIfNeeded(
_ db: Database,
publicKey: String,
name: String?,
profilePictureUrl: String?,
profileKey: Data?,
sentTimestamp: TimeInterval,
dependencies: Dependencies = Dependencies()
) throws {
let isCurrentUser = (publicKey == getUserHexEncodedPublicKey(db, dependencies: dependencies))
let profile: Profile = Profile.fetchOrCreate(id: publicKey)
var updatedProfile: Profile = profile
// Name
if let name = name, name != profile.name {
let shouldUpdate: Bool
if isCurrentUser {
shouldUpdate = given(UserDefaults.standard[.lastDisplayNameUpdate]) {
sentTimestamp > $0.timeIntervalSince1970
}
.defaulting(to: true)
}
else {
shouldUpdate = true
}
if shouldUpdate {
if isCurrentUser {
UserDefaults.standard[.lastDisplayNameUpdate] = Date(timeIntervalSince1970: sentTimestamp)
}
updatedProfile = updatedProfile.with(name: name)
}
}
// Profile picture & profile key
if
let profileKey: Data = profileKey,
let profilePictureUrl: String = profilePictureUrl,
profileKey.count == ProfileManager.avatarAES256KeyByteLength,
profileKey != profile.profileEncryptionKey
{
let shouldUpdate: Bool
if isCurrentUser {
shouldUpdate = given(UserDefaults.standard[.lastProfilePictureUpdate]) {
sentTimestamp > $0.timeIntervalSince1970
}
.defaulting(to: true)
}
else {
shouldUpdate = true
}
if shouldUpdate {
if isCurrentUser {
UserDefaults.standard[.lastProfilePictureUpdate] = Date(timeIntervalSince1970: sentTimestamp)
}
updatedProfile = updatedProfile.with(
profilePictureUrl: .update(profilePictureUrl),
profileEncryptionKey: .update(profileKey)
)
}
}
// Persist any changes
if updatedProfile != profile {
try updatedProfile.save(db)
}
// Download the profile picture if needed
if updatedProfile.profilePictureUrl != profile.profilePictureUrl || updatedProfile.profileEncryptionKey != profile.profileEncryptionKey {
db.afterNextTransaction { _ in
ProfileManager.downloadAvatar(for: updatedProfile)
}
}
}
} }

View File

@ -85,8 +85,8 @@ extension MessageSender {
let threadId: String = { let threadId: String = {
switch destination { switch destination {
case .contact(let publicKey, _): return publicKey case .contact(let publicKey): return publicKey
case .closedGroup(let groupPublicKey, _): return groupPublicKey case .closedGroup(let groupPublicKey): return groupPublicKey
case .openGroup(let roomToken, let server, _, _, _): case .openGroup(let roomToken, let server, _, _, _):
return OpenGroup.idFor(roomToken: roomToken, server: server) return OpenGroup.idFor(roomToken: roomToken, server: server)
@ -152,87 +152,4 @@ extension MessageSender {
} }
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
/// This method requires the `db` value to be passed in because if it's called within a `writeAsync` completion block
/// it will throw a "re-entrant" fatal error when attempting to write again
public static func syncConfiguration(
_ db: Database,
forceSyncNow: Bool = true
) throws -> AnyPublisher<Void, Error> {
// If we don't have a userKeyPair yet then there is no need to sync the configuration
// as the user doesn't exist yet (this will get triggered on the first launch of a
// fresh install due to the migrations getting run)
guard
Identity.userExists(db),
let ed25519SecretKey: [UInt8] = Identity.fetchUserEd25519KeyPair(db)?.secretKey
else {
return Fail(error: StorageError.generic)
.eraseToAnyPublisher()
}
let publicKey: String = getUserHexEncodedPublicKey(db)
let legacyDestination: Message.Destination = Message.Destination.contact(
publicKey: publicKey,
namespace: .default
)
let legacyConfigurationMessage = try ConfigurationMessage.getCurrent(db)
let userConfigMessageChanges: [SharedConfigMessage] = SessionUtil.getChanges(
ed25519SecretKey: ed25519SecretKey
)
let destination: Message.Destination = Message.Destination.contact(
publicKey: publicKey,
namespace: .userProfileConfig
)
guard forceSyncNow else {
JobRunner.add(
db,
job: Job(
variant: .messageSend,
threadId: publicKey,
details: MessageSendJob.Details(
destination: legacyDestination,
message: legacyConfigurationMessage
)
)
)
return Just(())
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
}
let sendData: PreparedSendData = try MessageSender.preparedSendData(
db,
message: legacyConfigurationMessage,
to: legacyDestination,
interactionId: nil
)
let userConfigSendData: [PreparedSendData] = try userConfigMessageChanges
.map { message in
try MessageSender.preparedSendData(
db,
message: message,
to: destination,
interactionId: nil
)
}
/// We want to avoid blocking the db write thread so we dispatch the API call to a different thread
return Just(())
.setFailureType(to: Error.self)
.receive(on: DispatchQueue.global(qos: .userInitiated))
.flatMap { _ -> AnyPublisher<Void, Error> in
Publishers
.MergeMany(
([sendData] + userConfigSendData)
.map { MessageSender.sendImmediate(preparedSendData: $0) }
)
.collect()
.map { _ in () }
.eraseToAnyPublisher()
}
.eraseToAnyPublisher()
}
} }

View File

@ -1,16 +1,18 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved. // Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation import Foundation
import GRDB
import Sodium import Sodium
import SessionUtilitiesKit import SessionUtilitiesKit
extension MessageSender { extension MessageSender {
internal static func encryptWithSessionProtocol( internal static func encryptWithSessionProtocol(
_ plaintext: Data, _ db: Database,
plaintext: Data,
for recipientHexEncodedX25519PublicKey: String, for recipientHexEncodedX25519PublicKey: String,
using dependencies: SMKDependencies = SMKDependencies() using dependencies: SMKDependencies = SMKDependencies()
) throws -> Data { ) throws -> Data {
guard let userEd25519KeyPair: Box.KeyPair = dependencies.storage.read({ db in Identity.fetchUserEd25519KeyPair(db) }) else { guard let userEd25519KeyPair: Box.KeyPair = Identity.fetchUserEd25519KeyPair(db) else {
throw MessageSenderError.noUserED25519KeyPair throw MessageSenderError.noUserED25519KeyPair
} }
@ -30,13 +32,16 @@ extension MessageSender {
} }
internal static func encryptWithSessionBlindingProtocol( internal static func encryptWithSessionBlindingProtocol(
_ plaintext: Data, _ db: Database,
plaintext: Data,
for recipientBlindedId: String, for recipientBlindedId: String,
openGroupPublicKey: String, openGroupPublicKey: String,
using dependencies: SMKDependencies = SMKDependencies() using dependencies: SMKDependencies = SMKDependencies()
) throws -> Data { ) throws -> Data {
guard SessionId.Prefix(from: recipientBlindedId) == .blinded else { throw MessageSenderError.signingFailed } guard SessionId.Prefix(from: recipientBlindedId) == .blinded else {
guard let userEd25519KeyPair: Box.KeyPair = dependencies.storage.read({ db in Identity.fetchUserEd25519KeyPair(db) }) else { throw MessageSenderError.signingFailed
}
guard let userEd25519KeyPair: Box.KeyPair = Identity.fetchUserEd25519KeyPair(db) else {
throw MessageSenderError.noUserED25519KeyPair throw MessageSenderError.noUserED25519KeyPair
} }
guard let blindedKeyPair = dependencies.sodium.blindedKeyPair(serverPublicKey: openGroupPublicKey, edKeyPair: userEd25519KeyPair, genericHash: dependencies.genericHash) else { guard let blindedKeyPair = dependencies.sodium.blindedKeyPair(serverPublicKey: openGroupPublicKey, edKeyPair: userEd25519KeyPair, genericHash: dependencies.genericHash) else {

View File

@ -206,8 +206,8 @@ public final class MessageSender {
message.sender = userPublicKey message.sender = userPublicKey
message.recipient = { message.recipient = {
switch destination { switch destination {
case .contact(let publicKey, _): return publicKey case .contact(let publicKey): return publicKey
case .closedGroup(let groupPublicKey, _): return groupPublicKey case .closedGroup(let groupPublicKey): return groupPublicKey
case .openGroup, .openGroupInbox: preconditionFailure() case .openGroup, .openGroupInbox: preconditionFailure()
} }
}() }()
@ -283,16 +283,17 @@ public final class MessageSender {
let ciphertext: Data let ciphertext: Data
do { do {
switch destination { switch destination {
case .contact(let publicKey, _): case .contact(let publicKey):
ciphertext = try encryptWithSessionProtocol(plaintext, for: publicKey) ciphertext = try encryptWithSessionProtocol(db, plaintext: plaintext, for: publicKey)
case .closedGroup(let groupPublicKey, _): case .closedGroup(let groupPublicKey):
guard let encryptionKeyPair: ClosedGroupKeyPair = try? ClosedGroupKeyPair.fetchLatestKeyPair(db, threadId: groupPublicKey) else { guard let encryptionKeyPair: ClosedGroupKeyPair = try? ClosedGroupKeyPair.fetchLatestKeyPair(db, threadId: groupPublicKey) else {
throw MessageSenderError.noKeyPair throw MessageSenderError.noKeyPair
} }
ciphertext = try encryptWithSessionProtocol( ciphertext = try encryptWithSessionProtocol(
plaintext, db,
plaintext: plaintext,
for: SessionId(.standard, publicKey: encryptionKeyPair.publicKey.bytes).hexString for: SessionId(.standard, publicKey: encryptionKeyPair.publicKey.bytes).hexString
) )
@ -319,7 +320,7 @@ public final class MessageSender {
kind = .sessionMessage kind = .sessionMessage
senderPublicKey = "" senderPublicKey = ""
case .closedGroup(let groupPublicKey, _): case .closedGroup(let groupPublicKey):
kind = .closedGroupMessage kind = .closedGroupMessage
senderPublicKey = groupPublicKey senderPublicKey = groupPublicKey
@ -553,7 +554,8 @@ public final class MessageSender {
do { do {
ciphertext = try encryptWithSessionBlindingProtocol( ciphertext = try encryptWithSessionBlindingProtocol(
plaintext, db,
plaintext: plaintext,
for: recipientBlindedPublicKey, for: recipientBlindedPublicKey,
openGroupPublicKey: openGroupPublicKey, openGroupPublicKey: openGroupPublicKey,
using: dependencies using: dependencies
@ -636,107 +638,86 @@ public final class MessageSender {
let isMainAppActive: Bool = (UserDefaults.sharedLokiProject?[.isMainAppActive]) let isMainAppActive: Bool = (UserDefaults.sharedLokiProject?[.isMainAppActive])
.defaulting(to: false) .defaulting(to: false)
var isSuccess = false
var errorCount = 0
return SnodeAPI return SnodeAPI
.sendMessage( .sendMessage(
snodeMessage, snodeMessage,
in: destination.namespace in: {
switch destination {
case .closedGroup: return .legacyClosedGroup
default: return .`default`
}
}()
) )
.subscribe(on: DispatchQueue.global(qos: .default)) .subscribe(on: DispatchQueue.global(qos: .default))
.flatMap { result, totalCount -> AnyPublisher<Bool, Error> in .flatMap { response -> AnyPublisher<Bool, Error> in
switch result { let updatedMessage: Message = message
case .success(let response): updatedMessage.serverHash = response.1.hash
// Don't emit if we've already succeeded
guard !isSuccess else {
return Just(false)
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
}
isSuccess = true
let updatedMessage: Message = message
updatedMessage.serverHash = response.1.hash
let job: Job? = Job(
variant: .notifyPushServer,
behaviour: .runOnce,
details: NotifyPushServerJob.Details(message: snodeMessage)
)
let shouldNotify: Bool = {
switch updatedMessage {
case is VisibleMessage, is UnsendRequest: return !isSyncMessage
case let callMessage as CallMessage:
switch callMessage.kind {
case .preOffer: return true
default: return false
}
let job: Job? = Job(
variant: .notifyPushServer,
behaviour: .runOnce,
details: NotifyPushServerJob.Details(message: snodeMessage)
)
let shouldNotify: Bool = {
switch updatedMessage {
case is VisibleMessage, is UnsendRequest: return !isSyncMessage
case let callMessage as CallMessage:
switch callMessage.kind {
case .preOffer: return true
default: return false default: return false
} }
}()
return dependencies.storage default: return false
.writePublisher { db -> Void in }
try MessageSender.handleSuccessfulMessageSend( }()
db,
message: updatedMessage,
to: destination,
interactionId: data.interactionId,
isSyncMessage: isSyncMessage,
using: dependencies
)
guard shouldNotify && isMainAppActive else { return () } return dependencies.storage
.writePublisher { db -> Void in
try MessageSender.handleSuccessfulMessageSend(
db,
message: updatedMessage,
to: destination,
interactionId: data.interactionId,
isSyncMessage: isSyncMessage,
using: dependencies
)
JobRunner.add(db, job: job) guard shouldNotify else { return () }
return ()
}
.flatMap { _ -> AnyPublisher<Bool, Error> in
guard shouldNotify && !isMainAppActive else {
return Just(true)
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
}
guard let job: Job = job else {
return Just(true)
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
}
return Future<Bool, Error> { resolver in JobRunner.add(db, job: job)
NotifyPushServerJob.run( return ()
job, }
queue: DispatchQueue.global(qos: .default), .flatMap { _ -> AnyPublisher<Bool, Error> in
success: { _, _ in resolver(Result.success(true)) }, guard shouldNotify && !isMainAppActive else {
failure: { _, _, _ in return Just(true)
// Always fulfill because the notify PN server job isn't critical. .setFailureType(to: Error.self)
resolver(Result.success(true))
},
deferred: { _ in
// Always fulfill because the notify PN server job isn't critical.
resolver(Result.success(true))
}
)
}
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
.eraseToAnyPublisher() guard let job: Job = job else {
return Just(true)
case .failure(let error):
errorCount += 1
// Only process the error if all promises failed
guard errorCount == totalCount else {
return Just(false)
.setFailureType(to: Error.self) .setFailureType(to: Error.self)
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
return Fail(error: error) return Future<Bool, Error> { resolver in
.eraseToAnyPublisher() NotifyPushServerJob.run(
} job,
queue: DispatchQueue.global(qos: .default),
success: { _, _ in resolver(Result.success(true)) },
failure: { _, _, _ in
// Always fulfill because the notify PN server job isn't critical.
resolver(Result.success(true))
},
deferred: { _ in
// Always fulfill because the notify PN server job isn't critical.
resolver(Result.success(true))
}
)
}
.eraseToAnyPublisher()
}
.eraseToAnyPublisher()
} }
.filter { $0 } .filter { $0 }
.handleEvents( .handleEvents(
@ -960,8 +941,8 @@ public final class MessageSender {
try? ControlMessageProcessRecord( try? ControlMessageProcessRecord(
threadId: { threadId: {
switch destination { switch destination {
case .contact(let publicKey, _): return publicKey case .contact(let publicKey): return publicKey
case .closedGroup(let groupPublicKey, _): return groupPublicKey case .closedGroup(let groupPublicKey): return groupPublicKey
case .openGroup(let roomToken, let server, _, _, _): case .openGroup(let roomToken, let server, _, _, _):
return OpenGroup.idFor(roomToken: roomToken, server: server) return OpenGroup.idFor(roomToken: roomToken, server: server)
@ -977,7 +958,7 @@ public final class MessageSender {
// the destination was a contact // the destination was a contact
// we didn't sync it already // we didn't sync it already
let userPublicKey = getUserHexEncodedPublicKey(db) let userPublicKey = getUserHexEncodedPublicKey(db)
if case .contact(let publicKey, let namespace) = destination, !isSyncMessage { if case .contact(let publicKey) = destination, !isSyncMessage {
if let message = message as? VisibleMessage { message.syncTarget = publicKey } if let message = message as? VisibleMessage { message.syncTarget = publicKey }
if let message = message as? ExpirationTimerUpdate { message.syncTarget = publicKey } if let message = message as? ExpirationTimerUpdate { message.syncTarget = publicKey }
@ -986,7 +967,7 @@ public final class MessageSender {
data: try prepareSendToSnodeDestination( data: try prepareSendToSnodeDestination(
db, db,
message: message, message: message,
to: .contact(publicKey: userPublicKey, namespace: namespace), to: .contact(publicKey: userPublicKey),
interactionId: interactionId, interactionId: interactionId,
userPublicKey: userPublicKey, userPublicKey: userPublicKey,
messageSendTimestamp: Int64(floor(Date().timeIntervalSince1970 * 1000)), messageSendTimestamp: Int64(floor(Date().timeIntervalSince1970 * 1000)),

View File

@ -4,6 +4,7 @@ import Foundation
public extension Notification.Name { public extension Notification.Name {
// FIXME: Remove once `useSharedUtilForUserConfig` is permanent
static let initialConfigurationMessageReceived = Notification.Name("initialConfigurationMessageReceived") static let initialConfigurationMessageReceived = Notification.Name("initialConfigurationMessageReceived")
static let missedCall = Notification.Name("missedCall") static let missedCall = Notification.Name("missedCall")
} }
@ -14,5 +15,6 @@ public extension Notification.Key {
@objc public extension NSNotification { @objc public extension NSNotification {
// FIXME: Remove once `useSharedUtilForUserConfig` is permanent
@objc static let initialConfigurationMessageReceived = Notification.Name.initialConfigurationMessageReceived.rawValue as NSString @objc static let initialConfigurationMessageReceived = Notification.Name.initialConfigurationMessageReceived.rawValue as NSString
} }

View File

@ -8,7 +8,7 @@ import SessionSnodeKit
import SessionUtilitiesKit import SessionUtilitiesKit
public final class CurrentUserPoller: Poller { public final class CurrentUserPoller: Poller {
public static var namespaces: [SnodeAPI.Namespace] = [.default, .userProfileConfig] public static var namespaces: [SnodeAPI.Namespace] = [.default, .configUserProfile, .configContacts]
private var targetSnode: Atomic<Snode?> = Atomic(nil) private var targetSnode: Atomic<Snode?> = Atomic(nil)
private var usedSnodes: Atomic<Set<Snode>> = Atomic([]) private var usedSnodes: Atomic<Set<Snode>> = Atomic([])

View File

@ -8,7 +8,7 @@ import SessionUtilitiesKit
extension OpenGroupAPI { extension OpenGroupAPI {
public final class Poller { public final class Poller {
typealias PollResponse = [OpenGroupAPI.Endpoint: (info: ResponseInfoType, data: Codable?)] typealias PollResponse = (info: ResponseInfoType, data: [OpenGroupAPI.Endpoint: Codable])
private let server: String private let server: String
private var timer: Timer? = nil private var timer: Timer? = nil
@ -279,11 +279,11 @@ extension OpenGroupAPI {
using dependencies: OpenGroupManager.OGMDependencies = OpenGroupManager.OGMDependencies() using dependencies: OpenGroupManager.OGMDependencies = OpenGroupManager.OGMDependencies()
) { ) {
let server: String = self.server let server: String = self.server
let validResponses: PollResponse = response let validResponses: [OpenGroupAPI.Endpoint: Codable] = response.data
.filter { endpoint, endpointResponse in .filter { endpoint, data in
switch endpoint { switch endpoint {
case .capabilities: case .capabilities:
guard (endpointResponse.data as? HTTP.BatchSubResponse<Capabilities>)?.body != nil else { guard (data as? HTTP.BatchSubResponse<Capabilities>)?.body != nil else {
SNLog("Open group polling failed due to invalid capability data.") SNLog("Open group polling failed due to invalid capability data.")
return false return false
} }
@ -291,8 +291,8 @@ extension OpenGroupAPI {
return true return true
case .roomPollInfo(let roomToken, _): case .roomPollInfo(let roomToken, _):
guard (endpointResponse.data as? HTTP.BatchSubResponse<RoomPollInfo>)?.body != nil else { guard (data as? HTTP.BatchSubResponse<RoomPollInfo>)?.body != nil else {
switch (endpointResponse.data as? HTTP.BatchSubResponse<RoomPollInfo>)?.code { switch (data as? HTTP.BatchSubResponse<RoomPollInfo>)?.code {
case 404: SNLog("Open group polling failed to retrieve info for unknown room '\(roomToken)'.") case 404: SNLog("Open group polling failed to retrieve info for unknown room '\(roomToken)'.")
default: SNLog("Open group polling failed due to invalid room info data.") default: SNLog("Open group polling failed due to invalid room info data.")
} }
@ -303,10 +303,10 @@ extension OpenGroupAPI {
case .roomMessagesRecent(let roomToken), .roomMessagesBefore(let roomToken, _), .roomMessagesSince(let roomToken, _): case .roomMessagesRecent(let roomToken), .roomMessagesBefore(let roomToken, _), .roomMessagesSince(let roomToken, _):
guard guard
let responseData: HTTP.BatchSubResponse<[Failable<Message>]> = endpointResponse.data as? HTTP.BatchSubResponse<[Failable<Message>]>, let responseData: HTTP.BatchSubResponse<[Failable<Message>]> = data as? HTTP.BatchSubResponse<[Failable<Message>]>,
let responseBody: [Failable<Message>] = responseData.body let responseBody: [Failable<Message>] = responseData.body
else { else {
switch (endpointResponse.data as? HTTP.BatchSubResponse<[Failable<Message>]>)?.code { switch (data as? HTTP.BatchSubResponse<[Failable<Message>]>)?.code {
case 404: SNLog("Open group polling failed to retrieve messages for unknown room '\(roomToken)'.") case 404: SNLog("Open group polling failed to retrieve messages for unknown room '\(roomToken)'.")
default: SNLog("Open group polling failed due to invalid messages data.") default: SNLog("Open group polling failed due to invalid messages data.")
} }
@ -325,7 +325,7 @@ extension OpenGroupAPI {
case .inbox, .inboxSince, .outbox, .outboxSince: case .inbox, .inboxSince, .outbox, .outboxSince:
guard guard
let responseData: HTTP.BatchSubResponse<[DirectMessage]?> = endpointResponse.data as? HTTP.BatchSubResponse<[DirectMessage]?>, let responseData: HTTP.BatchSubResponse<[DirectMessage]?> = data as? HTTP.BatchSubResponse<[DirectMessage]?>,
!responseData.failedToParseBody !responseData.failedToParseBody
else { else {
SNLog("Open group polling failed due to invalid inbox/outbox data.") SNLog("Open group polling failed due to invalid inbox/outbox data.")
@ -378,12 +378,12 @@ extension OpenGroupAPI {
return (capabilities, groups) return (capabilities, groups)
} }
let changedResponses: PollResponse = validResponses let changedResponses: [OpenGroupAPI.Endpoint: Codable] = validResponses
.filter { endpoint, endpointResponse in .filter { endpoint, data in
switch endpoint { switch endpoint {
case .capabilities: case .capabilities:
guard guard
let responseData: HTTP.BatchSubResponse<Capabilities> = endpointResponse.data as? HTTP.BatchSubResponse<Capabilities>, let responseData: HTTP.BatchSubResponse<Capabilities> = data as? HTTP.BatchSubResponse<Capabilities>,
let responseBody: Capabilities = responseData.body let responseBody: Capabilities = responseData.body
else { return false } else { return false }
@ -391,7 +391,7 @@ extension OpenGroupAPI {
case .roomPollInfo(let roomToken, _): case .roomPollInfo(let roomToken, _):
guard guard
let responseData: HTTP.BatchSubResponse<RoomPollInfo> = endpointResponse.data as? HTTP.BatchSubResponse<RoomPollInfo>, let responseData: HTTP.BatchSubResponse<RoomPollInfo> = data as? HTTP.BatchSubResponse<RoomPollInfo>,
let responseBody: RoomPollInfo = responseData.body let responseBody: RoomPollInfo = responseData.body
else { return false } else { return false }
guard let existingOpenGroup: OpenGroup = currentInfo?.groups.first(where: { $0.roomToken == roomToken }) else { guard let existingOpenGroup: OpenGroup = currentInfo?.groups.first(where: { $0.roomToken == roomToken }) else {
@ -424,11 +424,11 @@ extension OpenGroupAPI {
.updateAll(db, OpenGroup.Columns.pollFailureCount.set(to: 0)) .updateAll(db, OpenGroup.Columns.pollFailureCount.set(to: 0))
} }
try changedResponses.forEach { endpoint, endpointResponse in try changedResponses.forEach { endpoint, data in
switch endpoint { switch endpoint {
case .capabilities: case .capabilities:
guard guard
let responseData: HTTP.BatchSubResponse<Capabilities> = endpointResponse.data as? HTTP.BatchSubResponse<Capabilities>, let responseData: HTTP.BatchSubResponse<Capabilities> = data as? HTTP.BatchSubResponse<Capabilities>,
let responseBody: Capabilities = responseData.body let responseBody: Capabilities = responseData.body
else { return } else { return }
@ -440,7 +440,7 @@ extension OpenGroupAPI {
case .roomPollInfo(let roomToken, _): case .roomPollInfo(let roomToken, _):
guard guard
let responseData: HTTP.BatchSubResponse<RoomPollInfo> = endpointResponse.data as? HTTP.BatchSubResponse<RoomPollInfo>, let responseData: HTTP.BatchSubResponse<RoomPollInfo> = data as? HTTP.BatchSubResponse<RoomPollInfo>,
let responseBody: RoomPollInfo = responseData.body let responseBody: RoomPollInfo = responseData.body
else { return } else { return }
@ -455,7 +455,7 @@ extension OpenGroupAPI {
case .roomMessagesRecent(let roomToken), .roomMessagesBefore(let roomToken, _), .roomMessagesSince(let roomToken, _): case .roomMessagesRecent(let roomToken), .roomMessagesBefore(let roomToken, _), .roomMessagesSince(let roomToken, _):
guard guard
let responseData: HTTP.BatchSubResponse<[Failable<Message>]> = endpointResponse.data as? HTTP.BatchSubResponse<[Failable<Message>]>, let responseData: HTTP.BatchSubResponse<[Failable<Message>]> = data as? HTTP.BatchSubResponse<[Failable<Message>]>,
let responseBody: [Failable<Message>] = responseData.body let responseBody: [Failable<Message>] = responseData.body
else { return } else { return }
@ -469,7 +469,7 @@ extension OpenGroupAPI {
case .inbox, .inboxSince, .outbox, .outboxSince: case .inbox, .inboxSince, .outbox, .outboxSince:
guard guard
let responseData: HTTP.BatchSubResponse<[DirectMessage]?> = endpointResponse.data as? HTTP.BatchSubResponse<[DirectMessage]?>, let responseData: HTTP.BatchSubResponse<[DirectMessage]?> = data as? HTTP.BatchSubResponse<[DirectMessage]?>,
!responseData.failedToParseBody !responseData.failedToParseBody
else { return } else { return }

View File

@ -66,13 +66,17 @@ public class Poller {
// MARK: - Private API // MARK: - Private API
internal func startIfNeeded(for publicKey: String) { internal func startIfNeeded(for publicKey: String) {
guard isPolling.wrappedValue[publicKey] != true else { return } // Run on the 'pollerQueue' to ensure any 'Atomic' access doesn't block the main thread
// on startup
// Might be a race condition that the setUpPolling finishes too soon, Threading.pollerQueue.async { [weak self] in
// and the timer is not created, if we mark the group as is polling guard self?.isPolling.wrappedValue[publicKey] != true else { return }
// after setUpPolling. So the poller may not work, thus misses messages
isPolling.mutate { $0[publicKey] = true } // Might be a race condition that the setUpPolling finishes too soon,
setUpPolling(for: publicKey) // and the timer is not created, if we mark the group as is polling
// after setUpPolling. So the poller may not work, thus misses messages
self?.isPolling.mutate { $0[publicKey] = true }
self?.setUpPolling(for: publicKey)
}
} }
/// We want to initially trigger a poll against the target service node and then run the recursive polling, /// We want to initially trigger a poll against the target service node and then run the recursive polling,

View File

@ -146,8 +146,8 @@ public struct MessageViewModel: FetchableRecordWithRowId, Decodable, Equatable,
// MARK: - Mutation // MARK: - Mutation
public func with( public func with(
attachments: Updatable<[Attachment]> = .existing, attachments: [Attachment]? = nil,
reactionInfo: Updatable<[ReactionInfo]> = .existing reactionInfo: [ReactionInfo]? = nil
) -> MessageViewModel { ) -> MessageViewModel {
return MessageViewModel( return MessageViewModel(
threadId: self.threadId, threadId: self.threadId,
@ -845,11 +845,9 @@ public extension MessageViewModel.AttachmentInteractionInfo {
updatedPagedDataCache = updatedPagedDataCache.upserting( updatedPagedDataCache = updatedPagedDataCache.upserting(
dataToUpdate.with( dataToUpdate.with(
attachments: .update( attachments: attachments
attachments .sorted()
.sorted() .map { $0.attachment }
.map { $0.attachment }
)
) )
) )
} }
@ -927,7 +925,7 @@ public extension MessageViewModel.ReactionInfo {
else { return } else { return }
updatedPagedDataCache = updatedPagedDataCache.upserting( updatedPagedDataCache = updatedPagedDataCache.upserting(
dataToUpdate.with(reactionInfo: .update(reactionInfo.sorted())) dataToUpdate.with(reactionInfo: reactionInfo.sorted())
) )
pagedRowIdsWithNoReactions.remove(interactionRowId) pagedRowIdsWithNoReactions.remove(interactionRowId)
} }

View File

@ -8,6 +8,28 @@ import SignalCoreKit
import SessionUtilitiesKit import SessionUtilitiesKit
public struct ProfileManager { public struct ProfileManager {
public enum AvatarUpdate {
case none
case remove
case uploadImage(UIImage)
case uploadFilePath(String)
case updateTo(url: String, key: Data, fileName: String?)
var image: UIImage? {
switch self {
case .uploadImage(let image): return image
default: return nil
}
}
var filePath: String? {
switch self {
case .uploadFilePath(let filePath): return filePath
default: return nil
}
}
}
// The max bytes for a user's profile name, encoded in UTF8. // The max bytes for a user's profile name, encoded in UTF8.
// Before encrypting and submitting we NULL pad the name data to this length. // Before encrypting and submitting we NULL pad the name data to this length.
private static let nameDataLength: UInt = 64 private static let nameDataLength: UInt = 64
@ -263,77 +285,85 @@ public struct ProfileManager {
public static func updateLocal( public static func updateLocal(
queue: DispatchQueue, queue: DispatchQueue,
profileName: String, profileName: String,
image: UIImage?, avatarUpdate: AvatarUpdate = .none,
imageFilePath: String?, success: ((Database) throws -> ())? = nil,
success: ((Database, Profile) throws -> ())? = nil,
failure: ((ProfileManagerError) -> ())? = nil failure: ((ProfileManagerError) -> ())? = nil
) { ) {
prepareAndUploadAvatarImage( let userPublicKey: String = getUserHexEncodedPublicKey()
queue: queue, let isRemovingAvatar: Bool = {
image: image, switch avatarUpdate {
imageFilePath: imageFilePath, case .remove: return true
success: { fileInfo, newProfileKey in default: return false
// If we have no download url the we are removing the profile image }
guard let (downloadUrl, fileName): (String, String) = fileInfo else { }()
Storage.shared.writeAsync { db in
let existingProfile: Profile = Profile.fetchOrCreateCurrentUser(db) switch avatarUpdate {
case .none, .remove, .updateTo:
Storage.shared.writeAsync { db in
if isRemovingAvatar {
let existingProfileUrl: String? = try Profile
.filter(id: userPublicKey)
.select(.profilePictureUrl)
.asRequest(of: String.self)
.fetchOne(db)
let existingProfileFileName: String? = try Profile
.filter(id: userPublicKey)
.select(.profilePictureFileName)
.asRequest(of: String.self)
.fetchOne(db)
OWSLogger.verbose(existingProfile.profilePictureUrl != nil ? // Remove any cached avatar image value
if let fileName: String = existingProfileFileName {
profileAvatarCache.mutate { $0[fileName] = nil }
}
OWSLogger.verbose(existingProfileUrl != nil ?
"Updating local profile on service with cleared avatar." : "Updating local profile on service with cleared avatar." :
"Updating local profile on service with no avatar." "Updating local profile on service with no avatar."
) )
let updatedProfile: Profile = try existingProfile
.with(
name: profileName,
profilePictureUrl: nil,
profilePictureFileName: nil,
profileEncryptionKey: (existingProfile.profilePictureUrl != nil ?
.update(newProfileKey) :
.existing
)
)
.saved(db)
try SessionUtil.update(
profile: updatedProfile,
in: .global(variant: .userProfile)
)
SNLog("Successfully updated service with profile.")
try success?(db, updatedProfile)
} }
return
} try ProfileManager.updateProfileIfNeeded(
db,
// Update user defaults publicKey: userPublicKey,
UserDefaults.standard[.lastProfilePictureUpload] = Date() name: profileName,
avatarUpdate: avatarUpdate,
// Update the profile sentTimestamp: Date().timeIntervalSince1970
Storage.shared.writeAsync { db in )
let profile: Profile = try Profile
.fetchOrCreateCurrentUser(db)
.with(
name: profileName,
profilePictureUrl: .update(downloadUrl),
profilePictureFileName: .update(fileName),
profileEncryptionKey: .update(newProfileKey)
)
.saved(db)
SNLog("Successfully updated service with profile.") SNLog("Successfully updated service with profile.")
try success?(db, profile) try success?(db)
} }
},
failure: failure case .uploadFilePath, .uploadImage:
) prepareAndUploadAvatarImage(
queue: queue,
image: avatarUpdate.image,
imageFilePath: avatarUpdate.filePath,
success: { downloadUrl, fileName, newProfileKey in
Storage.shared.writeAsync { db in
try ProfileManager.updateProfileIfNeeded(
db,
publicKey: userPublicKey,
name: profileName,
avatarUpdate: .updateTo(url: downloadUrl, key: newProfileKey, fileName: fileName),
sentTimestamp: Date().timeIntervalSince1970
)
SNLog("Successfully updated service with profile.")
try success?(db)
}
},
failure: failure
)
}
} }
public static func prepareAndUploadAvatarImage( private static func prepareAndUploadAvatarImage(
queue: DispatchQueue, queue: DispatchQueue,
image: UIImage?, image: UIImage?,
imageFilePath: String?, imageFilePath: String?,
success: @escaping ((downloadUrl: String, fileName: String)?, Data) -> (), success: @escaping ((downloadUrl: String, fileName: String, profileKey: Data)) -> (),
failure: ((ProfileManagerError) -> ())? = nil failure: ((ProfileManagerError) -> ())? = nil
) { ) {
queue.async { queue.async {
@ -348,7 +378,9 @@ public struct ProfileManager {
avatarImageData = try { avatarImageData = try {
guard var image: UIImage = image else { guard var image: UIImage = image else {
guard let imageFilePath: String = imageFilePath else { return nil } guard let imageFilePath: String = imageFilePath else {
throw ProfileManagerError.invalidCall
}
let data: Data = try Data(contentsOf: URL(fileURLWithPath: imageFilePath)) let data: Data = try Data(contentsOf: URL(fileURLWithPath: imageFilePath))
@ -397,20 +429,8 @@ public struct ProfileManager {
// If we have no image then we should succeed (database changes happen in the callback) // If we have no image then we should succeed (database changes happen in the callback)
guard let data: Data = avatarImageData else { guard let data: Data = avatarImageData else {
// Remove any cached avatar image value failure?(ProfileManagerError.invalidCall)
let maybeExistingFileName: String? = Storage.shared return
.read { db in
try Profile
.select(.profilePictureFileName)
.asRequest(of: String.self)
.fetchOne(db)
}
if let fileName: String = maybeExistingFileName {
profileAvatarCache.mutate { $0[fileName] = nil }
}
return success(nil, newProfileKey)
} }
// If we have a new avatar image, we must first: // If we have a new avatar image, we must first:
@ -469,9 +489,124 @@ public struct ProfileManager {
profileAvatarCache.mutate { $0[fileName] = data } profileAvatarCache.mutate { $0[fileName] = data }
SNLog("Successfully uploaded avatar image.") SNLog("Successfully uploaded avatar image.")
success((downloadUrl, fileName), newProfileKey) success((downloadUrl, fileName, newProfileKey))
} }
) )
} }
} }
public static func updateProfileIfNeeded(
_ db: Database,
publicKey: String,
name: String?,
avatarUpdate: AvatarUpdate,
sentTimestamp: TimeInterval,
calledFromConfigHandling: Bool = false,
dependencies: Dependencies = Dependencies()
) throws {
let isCurrentUser = (publicKey == getUserHexEncodedPublicKey(db, dependencies: dependencies))
let profile: Profile = Profile.fetchOrCreate(id: publicKey)
var profileChanges: [ColumnAssignment] = []
// Name
if let name: String = name, !name.isEmpty, name != profile.name {
let shouldUpdate: Bool
if isCurrentUser {
shouldUpdate = given(UserDefaults.standard[.lastDisplayNameUpdate]) {
sentTimestamp > $0.timeIntervalSince1970
}
.defaulting(to: true)
}
else {
shouldUpdate = true
}
if shouldUpdate {
if isCurrentUser {
UserDefaults.standard[.lastDisplayNameUpdate] = Date(timeIntervalSince1970: sentTimestamp)
}
profileChanges.append(Profile.Columns.name.set(to: name))
}
}
// Profile picture & profile key
var avatarNeedsDownload: Bool = false
let shouldUpdateAvatar: Bool = {
guard isCurrentUser else { return true }
return given(UserDefaults.standard[.lastProfilePictureUpdate]) {
sentTimestamp > $0.timeIntervalSince1970
}
.defaulting(to: true)
}()
if shouldUpdateAvatar {
switch avatarUpdate {
case .none: break
case .uploadImage, .uploadFilePath: preconditionFailure("Invalid options for this function")
case .remove:
if isCurrentUser {
UserDefaults.standard[.lastProfilePictureUpdate] = Date(timeIntervalSince1970: sentTimestamp)
}
profileChanges.append(Profile.Columns.profilePictureUrl.set(to: nil))
profileChanges.append(Profile.Columns.profileEncryptionKey.set(to: nil))
// Profile filename (this isn't synchronized between devices so can be immediately saved)
_ = try? Profile
.filter(id: publicKey)
.updateAll(db, Profile.Columns.profilePictureFileName.set(to: nil))
case .updateTo(let url, let key, let fileName):
if
(
url != profile.profilePictureUrl ||
key != profile.profileEncryptionKey
) &&
key.count == ProfileManager.avatarAES256KeyByteLength &&
key != profile.profileEncryptionKey
{
profileChanges.append(Profile.Columns.profilePictureUrl.set(to: url))
profileChanges.append(Profile.Columns.profileEncryptionKey.set(to: key))
avatarNeedsDownload = true
}
// Profile filename (this isn't synchronized between devices so can be immediately saved)
if let fileName: String = fileName {
_ = try? Profile
.filter(id: publicKey)
.updateAll(db, Profile.Columns.profilePictureFileName.set(to: fileName))
}
}
}
// Persist any changes
if !profileChanges.isEmpty {
try profile.save(db)
if calledFromConfigHandling {
try Profile
.filter(id: publicKey)
.updateAll( // Handling a config update so don't use `updateAllAndConfig`
db,
profileChanges
)
}
else {
try Profile
.filter(id: publicKey)
.updateAllAndConfig(db, profileChanges)
}
}
// Download the profile picture if needed
guard avatarNeedsDownload else { return }
db.afterNextTransaction { db in
// Need to refetch to ensure the db changes have occurred
ProfileManager.downloadAvatar(for: Profile.fetchOrCreate(id: publicKey))
}
}
} }

View File

@ -8,6 +8,7 @@ public enum ProfileManagerError: LocalizedError {
case avatarEncryptionFailed case avatarEncryptionFailed
case avatarUploadFailed case avatarUploadFailed
case avatarUploadMaxFileSizeExceeded case avatarUploadMaxFileSizeExceeded
case invalidCall
var localizedDescription: String { var localizedDescription: String {
switch self { switch self {
@ -16,6 +17,7 @@ public enum ProfileManagerError: LocalizedError {
case .avatarEncryptionFailed: return "Avatar encryption failed." case .avatarEncryptionFailed: return "Avatar encryption failed."
case .avatarUploadFailed: return "Avatar upload failed." case .avatarUploadFailed: return "Avatar upload failed."
case .avatarUploadMaxFileSizeExceeded: return "Maximum file size exceeded." case .avatarUploadMaxFileSizeExceeded: return "Maximum file size exceeded."
case .invalidCall: return "Attempted to remove avatar using the wrong method."
} }
} }
} }

View File

@ -143,7 +143,11 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
self.handleSuccessForIncomingCall(db, for: callMessage) self.handleSuccessForIncomingCall(db, for: callMessage)
case let sharedConfigMessage as SharedConfigMessage: case let sharedConfigMessage as SharedConfigMessage:
try SessionUtil.handleConfigMessages(db, messages: [sharedConfigMessage]) try SessionUtil.handleConfigMessages(
db,
messages: [sharedConfigMessage],
publicKey: (processedMessage.threadId ?? "")
)
default: break default: break
} }
@ -214,9 +218,7 @@ public final class NotificationServiceExtension: UNNotificationServiceExtension
// If we need a config sync then trigger it now // If we need a config sync then trigger it now
if needsConfigSync { if needsConfigSync {
Storage.shared.write { db in ConfigurationSyncJob.enqueue()
try MessageSender.syncConfiguration(db, forceSyncNow: true).sinkUntilComplete()
}
} }
checkIsAppReady() checkIsAppReady()

View File

@ -92,9 +92,7 @@ final class ShareNavController: UINavigationController, ShareViewDelegate {
// If we need a config sync then trigger it now // If we need a config sync then trigger it now
if needsConfigSync { if needsConfigSync {
Storage.shared.write { db in ConfigurationSyncJob.enqueue()
try? MessageSender.syncConfiguration(db, forceSyncNow: true).sinkUntilComplete()
}
} }
checkIsAppReady() checkIsAppReady()

View File

@ -18,13 +18,6 @@ public enum GetSnodePoolJob: JobExecutor {
failure: @escaping (Job, Error?, Bool) -> (), failure: @escaping (Job, Error?, Bool) -> (),
deferred: @escaping (Job) -> () deferred: @escaping (Job) -> ()
) { ) {
// If the user doesn't exist then don't do anything (when the user registers we run this
// job directly)
guard Identity.userExists() else {
deferred(job)
return
}
// If we already have cached Snodes then we still want to trigger the 'SnodeAPI.getSnodePool' // If we already have cached Snodes then we still want to trigger the 'SnodeAPI.getSnodePool'
// but we want to succeed this job immediately (since it's marked as blocking), this allows us // but we want to succeed this job immediately (since it's marked as blocking), this allows us
// to block if we have no Snode pool and prevent other jobs from failing but avoids having to // to block if we have no Snode pool and prevent other jobs from failing but avoids having to
@ -35,7 +28,10 @@ public enum GetSnodePoolJob: JobExecutor {
return return
} }
// If we don't have the snode pool cached then we should also try to build the path (this will
// speed up the onboarding process for new users because it can run before the user is created)
SnodeAPI.getSnodePool() SnodeAPI.getSnodePool()
.flatMap { _ in OnionRequestAPI.getPath(excluding: nil) }
.subscribe(on: queue) .subscribe(on: queue)
.receive(on: queue) .receive(on: queue)
.sinkUntilComplete( .sinkUntilComplete(

View File

@ -6,7 +6,7 @@ extension SnodeAPI {
public class SendMessageRequest: SnodeAuthenticatedRequestBody { public class SendMessageRequest: SnodeAuthenticatedRequestBody {
enum CodingKeys: String, CodingKey { enum CodingKeys: String, CodingKey {
case namespace case namespace
case signatureTimestamp = "sig_timestamp" case signatureTimestamp = "timestamp"//"sig_timestamp" // TODO: Add this back once the snodes are fixed!!
} }
let message: SnodeMessage let message: SnodeMessage

View File

@ -209,7 +209,7 @@ public enum OnionRequestAPI: OnionRequestAPIType {
} }
/// Returns a `Path` to be used for building an onion request. Builds new paths as needed. /// Returns a `Path` to be used for building an onion request. Builds new paths as needed.
private static func getPath(excluding snode: Snode?) -> AnyPublisher<[Snode], Error> { internal static func getPath(excluding snode: Snode?) -> AnyPublisher<[Snode], Error> {
guard pathSize >= 1 else { preconditionFailure("Can't build path of size zero.") } guard pathSize >= 1 else { preconditionFailure("Can't build path of size zero.") }
let paths: [[Snode]] = OnionRequestAPI.paths let paths: [[Snode]] = OnionRequestAPI.paths

View File

@ -7,6 +7,8 @@ import GRDB
import SessionUtilitiesKit import SessionUtilitiesKit
public final class SnodeAPI { public final class SnodeAPI {
public typealias TargetedMessage = (message: SnodeMessage, namespace: Namespace)
internal static let sodium: Atomic<Sodium> = Atomic(Sodium()) internal static let sodium: Atomic<Sodium> = Atomic(Sodium())
private static var hasLoadedSnodePool: Atomic<Bool> = Atomic(false) private static var hasLoadedSnodePool: Atomic<Bool> = Atomic(false)
@ -47,7 +49,6 @@ public final class SnodeAPI {
] ]
}() }()
private static let snodeFailureThreshold: Int = 3 private static let snodeFailureThreshold: Int = 3
private static let targetSwarmSnodeCount: Int = 2
private static let minSnodePoolCount: Int = 12 private static let minSnodePoolCount: Int = 12
private static func offsetTimestampMsNow() -> UInt64 { private static func offsetTimestampMsNow() -> UInt64 {
@ -269,13 +270,6 @@ public final class SnodeAPI {
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
public static func getTargetSnodes(for publicKey: String) -> AnyPublisher<[Snode], Error> {
// shuffled() uses the system's default random generator, which is cryptographically secure
return getSwarm(for: publicKey)
.map { Array($0.shuffled().prefix(targetSwarmSnodeCount)) }
.eraseToAnyPublisher()
}
public static func getSwarm( public static func getSwarm(
for publicKey: String, for publicKey: String,
using dependencies: SSKDependencies = SSKDependencies() using dependencies: SSKDependencies = SSKDependencies()
@ -422,19 +416,21 @@ public final class SnodeAPI {
) )
.decoded(as: responseTypes, using: dependencies) .decoded(as: responseTypes, using: dependencies)
.map { batchResponse -> [SnodeAPI.Namespace: (info: ResponseInfoType, data: (messages: [SnodeReceivedMessage], lastHash: String?)?)] in .map { batchResponse -> [SnodeAPI.Namespace: (info: ResponseInfoType, data: (messages: [SnodeReceivedMessage], lastHash: String?)?)] in
zip(namespaces, batchResponse) zip(namespaces, batchResponse.responses)
.reduce(into: [:]) { result, next in .reduce(into: [:]) { result, next in
guard let messageResponse: GetMessagesResponse = (next.1.1 as? HTTP.BatchSubResponse<GetMessagesResponse>)?.body else { guard
let subResponse: HTTP.BatchSubResponse<GetMessagesResponse> = (next.1 as? HTTP.BatchSubResponse<GetMessagesResponse>),
let messageResponse: GetMessagesResponse = subResponse.body
else {
return return
} }
let namespace: SnodeAPI.Namespace = next.0 let namespace: SnodeAPI.Namespace = next.0
let requestInfo: ResponseInfoType = next.1.0
result[namespace] = ( result[namespace] = (
requestInfo, info: subResponse.responseInfo,
( data: (
messageResponse.messages messages: messageResponse.messages
.compactMap { rawMessage -> SnodeReceivedMessage? in .compactMap { rawMessage -> SnodeReceivedMessage? in
SnodeReceivedMessage( SnodeReceivedMessage(
snode: snode, snode: snode,
@ -443,7 +439,7 @@ public final class SnodeAPI {
rawMessage: rawMessage rawMessage: rawMessage
) )
}, },
namespaceLastHash[namespace] lastHash: namespaceLastHash[namespace]
) )
) )
} }
@ -453,13 +449,13 @@ public final class SnodeAPI {
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
// MARK: Store // MARK: - Store
public static func sendMessage( public static func sendMessage(
_ message: SnodeMessage, _ message: SnodeMessage,
in namespace: Namespace, in namespace: Namespace,
using dependencies: SSKDependencies = SSKDependencies() using dependencies: SSKDependencies = SSKDependencies()
) -> AnyPublisher<(Result<(ResponseInfoType, SendMessagesResponse), Error>, Int), Error> { ) -> AnyPublisher<(ResponseInfoType, SendMessagesResponse), Error> {
let publicKey: String = (Features.useTestnet ? let publicKey: String = (Features.useTestnet ?
message.recipient.removingIdPrefixIfNeeded() : message.recipient.removingIdPrefixIfNeeded() :
message.recipient message.recipient
@ -511,27 +507,125 @@ public final class SnodeAPI {
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
return getTargetSnodes(for: publicKey) return getSwarm(for: publicKey)
.subscribe(on: Threading.workQueue) .subscribe(on: Threading.workQueue)
.flatMap { targetSnodes -> AnyPublisher<(Result<(ResponseInfoType, SendMessagesResponse), Error>, Int), Error> in .flatMap { swarm -> AnyPublisher<(ResponseInfoType, SendMessagesResponse), Error> in
Publishers guard let snode: Snode = swarm.randomElement() else {
.MergeMany( return Fail(error: SnodeAPIError.generic)
targetSnodes .eraseToAnyPublisher()
.map { targetSnode -> AnyPublisher<Result<(ResponseInfoType, SendMessagesResponse), Error>, Never> in }
return sendMessage(to: targetSnode)
.retry(maxRetryCount) return sendMessage(to: snode)
.eraseToAnyPublisher() .retry(maxRetryCount)
.asResult()
}
)
.map { result in (result, targetSnodes.count) }
.setFailureType(to: Error.self)
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
.retry(maxRetryCount)
.eraseToAnyPublisher() .eraseToAnyPublisher()
} }
// MARK: Edit public static func sendConfigMessages(
_ targetedMessages: [TargetedMessage],
oldHashes: [String],
using dependencies: SSKDependencies = SSKDependencies()
) -> AnyPublisher<HTTP.BatchResponse, Error> {
guard
!targetedMessages.isEmpty,
let recipient: String = targetedMessages.first?.message.recipient
else {
return Fail(error: SnodeAPIError.generic)
.eraseToAnyPublisher()
}
// TODO: Need to get either the closed group subKey or the userEd25519 key for auth
guard let userED25519KeyPair = Identity.fetchUserEd25519KeyPair() else {
return Fail(error: SnodeAPIError.noKeyPair)
.eraseToAnyPublisher()
}
let userX25519PublicKey: String = getUserHexEncodedPublicKey()
let publicKey: String = (Features.useTestnet ?
recipient.removingIdPrefixIfNeeded() :
recipient
)
var requests: [SnodeAPI.BatchRequest.Info] = targetedMessages
.map { message, namespace in
// Check if this namespace requires authentication
guard namespace.requiresReadAuthentication else {
return BatchRequest.Info(
request: SnodeRequest(
endpoint: .sendMessage,
body: LegacySendMessagesRequest(
message: message,
namespace: namespace
)
),
responseType: SendMessagesResponse.self
)
}
return BatchRequest.Info(
request: SnodeRequest(
endpoint: .sendMessage,
body: SendMessageRequest(
message: message,
namespace: namespace,
subkey: nil, // TODO: Need to get this
timestampMs: SnodeAPI.offsetTimestampMsNow(),
ed25519PublicKey: userED25519KeyPair.publicKey,
ed25519SecretKey: userED25519KeyPair.secretKey
)
),
responseType: SendMessagesResponse.self
)
}
// If we had any previous config messages then we should delete them
if !oldHashes.isEmpty {
requests.append(
BatchRequest.Info(
request: SnodeRequest(
endpoint: .deleteMessages,
body: DeleteMessagesRequest(
messageHashes: oldHashes,
requireSuccessfulDeletion: false,
pubkey: userX25519PublicKey,
ed25519PublicKey: userED25519KeyPair.publicKey,
ed25519SecretKey: userED25519KeyPair.secretKey
)
),
responseType: DeleteMessagesResponse.self
)
)
}
let responseTypes = requests.map { $0.responseType }
return getSwarm(for: publicKey)
.subscribe(on: Threading.workQueue)
.flatMap { swarm -> AnyPublisher<HTTP.BatchResponse, Error> in
guard let snode: Snode = swarm.randomElement() else {
return Fail(error: SnodeAPIError.generic)
.eraseToAnyPublisher()
}
return SnodeAPI
.send(
request: SnodeRequest(
endpoint: .sequence,
body: BatchRequest(requests: requests)
),
to: snode,
associatedWith: publicKey,
using: dependencies
)
.eraseToAnyPublisher()
.decoded(as: responseTypes, using: dependencies)
.eraseToAnyPublisher()
}
.retry(maxRetryCount)
.eraseToAnyPublisher()
}
// MARK: - Edit
public static func updateExpiry( public static func updateExpiry(
publicKey: String, publicKey: String,

View File

@ -3,10 +3,12 @@
import Foundation import Foundation
public extension SnodeAPI { public extension SnodeAPI {
enum Namespace: Int, Codable { enum Namespace: Int, Codable, Hashable {
case `default` = 0 case `default` = 0
case userProfileConfig = 2 case configUserProfile = 2
case configContacts = 3
case configClosedGroupInfo = 11
case legacyClosedGroup = -10 case legacyClosedGroup = -10

View File

@ -65,14 +65,12 @@ public extension Identity {
) )
} }
static func store(seed: Data, ed25519KeyPair: KeyPair, x25519KeyPair: KeyPair) { static func store(_ db: Database, seed: Data, ed25519KeyPair: KeyPair, x25519KeyPair: KeyPair) throws {
Storage.shared.write { db in try Identity(variant: .seed, data: seed).save(db)
try Identity(variant: .seed, data: seed).save(db) try Identity(variant: .ed25519SecretKey, data: Data(ed25519KeyPair.secretKey)).save(db)
try Identity(variant: .ed25519SecretKey, data: Data(ed25519KeyPair.secretKey)).save(db) try Identity(variant: .ed25519PublicKey, data: Data(ed25519KeyPair.publicKey)).save(db)
try Identity(variant: .ed25519PublicKey, data: Data(ed25519KeyPair.publicKey)).save(db) try Identity(variant: .x25519PrivateKey, data: Data(x25519KeyPair.secretKey)).save(db)
try Identity(variant: .x25519PrivateKey, data: Data(x25519KeyPair.secretKey)).save(db) try Identity(variant: .x25519PublicKey, data: Data(x25519KeyPair.publicKey)).save(db)
try Identity(variant: .x25519PublicKey, data: Data(x25519KeyPair.publicKey)).save(db)
}
} }
static func userExists(_ db: Database? = nil) -> Bool { static func userExists(_ db: Database? = nil) -> Bool {

View File

@ -102,6 +102,10 @@ public struct Job: Codable, Equatable, Identifiable, FetchableRecord, MutablePer
/// This is a job that runs once whenever an attachment is downloaded to attempt to decode and properly /// This is a job that runs once whenever an attachment is downloaded to attempt to decode and properly
/// download the attachment /// download the attachment
case attachmentDownload case attachmentDownload
/// This is a job that runs once whenever the user config or a closed group config changes, it retrieves the
/// state of all config objects and syncs any that are flagged as needing to be synced
case configurationSync
} }
public enum Behaviour: Int, Codable, DatabaseValueConvertible, CaseIterable { public enum Behaviour: Int, Codable, DatabaseValueConvertible, CaseIterable {

View File

@ -1,6 +1,10 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
@objc(SNFeatures) import Foundation
public final class Features : NSObject {
public static let useOnionRequests = true public final class Features {
public static let useTestnet = false public static let useOnionRequests: Bool = true
public static let useTestnet: Bool = false
public static let useSharedUtilForUserConfig: Bool = true
} }

View File

@ -65,7 +65,8 @@ public final class JobRunner {
jobVariants.remove(.attachmentUpload), jobVariants.remove(.attachmentUpload),
jobVariants.remove(.messageSend), jobVariants.remove(.messageSend),
jobVariants.remove(.notifyPushServer), jobVariants.remove(.notifyPushServer),
jobVariants.remove(.sendReadReceipts) jobVariants.remove(.sendReadReceipts),
jobVariants.remove(.configurationSync)
].compactMap { $0 } ].compactMap { $0 }
) )
let messageReceiveQueue: JobQueue = JobQueue( let messageReceiveQueue: JobQueue = JobQueue(

View File

@ -1,121 +0,0 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import Foundation
public enum Updatable<Wrapped>: ExpressibleByNilLiteral {
/// A cleared value.
///
/// In code, the cleared of a value is typically written using the `nil`
/// literal rather than the explicit `.remove` enumeration case.
case remove
/// The existing value, this will leave whatever value is currently available.
case existing
/// An updated value, stored as `Wrapped`.
case update(Wrapped)
// MARK: - ExpressibleByNilLiteral
public init(nilLiteral: ()) {
self = .remove
}
public static func updateIf(_ maybeValue: Wrapped?) -> Updatable<Wrapped> {
switch maybeValue {
case .some(let value): return .update(value)
default: return .existing
}
}
public static func updateTo(_ maybeValue: Wrapped?) -> Updatable<Wrapped> {
switch maybeValue {
case .some(let value): return .update(value)
default: return .remove
}
}
// MARK: - Functions
public func value(existing: Wrapped) -> Wrapped? {
switch self {
case .remove: return nil
case .existing: return existing
case .update(let newValue): return newValue
}
}
public func value(existing: Wrapped) -> Wrapped {
switch self {
case .remove: fatalError("Attempted to assign a 'removed' value to a non-null")
case .existing: return existing
case .update(let newValue): return newValue
}
}
}
// MARK: - Coalesing-nil operator
public func ?? <T>(updatable: Updatable<T>, existingValue: @autoclosure () throws -> T) rethrows -> T {
switch updatable {
case .remove: fatalError("Attempted to assign a 'removed' value to a non-null")
case .existing: return try existingValue()
case .update(let newValue): return newValue
}
}
public func ?? <T>(updatable: Updatable<T>, existingValue: @autoclosure () throws -> T?) rethrows -> T? {
switch updatable {
case .remove: return nil
case .existing: return try existingValue()
case .update(let newValue): return newValue
}
}
public func ?? <T>(updatable: Updatable<Optional<T>>, existingValue: @autoclosure () throws -> T?) rethrows -> T? {
switch updatable {
case .remove: return nil
case .existing: return try existingValue()
case .update(let newValue): return newValue
}
}
// MARK: - ExpressibleBy Conformance
extension Updatable {
public init(_ value: Wrapped) {
self = .update(value)
}
}
extension Updatable: ExpressibleByUnicodeScalarLiteral, ExpressibleByExtendedGraphemeClusterLiteral, ExpressibleByStringLiteral where Wrapped == String {
public init(stringLiteral value: Wrapped) {
self = .update(value)
}
public init(extendedGraphemeClusterLiteral value: Wrapped) {
self = .update(value)
}
public init(unicodeScalarLiteral value: Wrapped) {
self = .update(value)
}
}
extension Updatable: ExpressibleByIntegerLiteral where Wrapped == Int {
public init(integerLiteral value: Int) {
self = .update(value)
}
}
extension Updatable: ExpressibleByFloatLiteral where Wrapped == Double {
public init(floatLiteral value: Double) {
self = .update(value)
}
}
extension Updatable: ExpressibleByBooleanLiteral where Wrapped == Bool {
public init(booleanLiteral value: Bool) {
self = .update(value)
}
}

View File

@ -4,16 +4,20 @@ import Foundation
import Combine import Combine
public extension HTTP { public extension HTTP {
// MARK: - Convenience Aliases
typealias BatchResponse = [(ResponseInfoType, Codable?)]
typealias BatchResponseTypes = [Codable.Type] typealias BatchResponseTypes = [Codable.Type]
// MARK: - BatchResponse
struct BatchResponse {
public let info: ResponseInfoType
public let responses: [Codable]
}
// MARK: - BatchSubResponse<T> // MARK: - BatchSubResponse<T>
struct BatchSubResponse<T: Codable>: Codable { struct BatchSubResponse<T: Codable>: BatchSubResponseType {
/// The numeric http response code (e.g. 200 for success) /// The numeric http response code (e.g. 200 for success)
public let code: Int32 public let code: Int
/// Any headers returned by the request /// Any headers returned by the request
public let headers: [String: String] public let headers: [String: String]
@ -25,7 +29,7 @@ public extension HTTP {
public let failedToParseBody: Bool public let failedToParseBody: Bool
public init( public init(
code: Int32, code: Int,
headers: [String: String] = [:], headers: [String: String] = [:],
body: T? = nil, body: T? = nil,
failedToParseBody: Bool = false failedToParseBody: Bool = false
@ -38,13 +42,23 @@ public extension HTTP {
} }
} }
public protocol BatchSubResponseType: Codable {
var code: Int { get }
var headers: [String: String] { get }
var failedToParseBody: Bool { get }
}
extension BatchSubResponseType {
public var responseInfo: ResponseInfoType { HTTP.ResponseInfo(code: code, headers: headers) }
}
public extension HTTP.BatchSubResponse { public extension HTTP.BatchSubResponse {
init(from decoder: Decoder) throws { init(from decoder: Decoder) throws {
let container: KeyedDecodingContainer<CodingKeys> = try decoder.container(keyedBy: CodingKeys.self) let container: KeyedDecodingContainer<CodingKeys> = try decoder.container(keyedBy: CodingKeys.self)
let body: T? = try? container.decode(T.self, forKey: .body) let body: T? = try? container.decode(T.self, forKey: .body)
self = HTTP.BatchSubResponse( self = HTTP.BatchSubResponse(
code: try container.decode(Int32.self, forKey: .code), code: try container.decode(Int.self, forKey: .code),
headers: ((try? container.decode([String: String].self, forKey: .headers)) ?? [:]), headers: ((try? container.decode([String: String].self, forKey: .headers)) ?? [:]),
body: body, body: body,
failedToParseBody: ( failedToParseBody: (
@ -111,13 +125,15 @@ public extension AnyPublisher where Output == (ResponseInfoType, Data?), Failure
do { do {
// TODO: Remove the 'Swift.' // TODO: Remove the 'Swift.'
let result: HTTP.BatchResponse = try Swift.zip(dataArray, types) return Just(
.map { data, type in try type.decoded(from: data, using: dependencies) } HTTP.BatchResponse(
.map { data in (responseInfo, data) } info: responseInfo,
responses: try Swift.zip(dataArray, types)
return Just(result) .map { data, type in try type.decoded(from: data, using: dependencies) }
.setFailureType(to: Error.self) )
.eraseToAnyPublisher() )
.setFailureType(to: Error.self)
.eraseToAnyPublisher()
} }
catch { catch {
return Fail(error: HTTPError.parsingFailed) return Fail(error: HTTPError.parsingFailed)

View File

@ -77,9 +77,12 @@ public enum AppSetup {
// After the migrations have run but before the migration completion we load the // After the migrations have run but before the migration completion we load the
// SessionUtil state and update the 'needsConfigSync' flag based on whether the // SessionUtil state and update the 'needsConfigSync' flag based on whether the
// configs also need to be sync'ed // configs also need to be sync'ed
SessionUtil.loadState( if Identity.userExists() {
ed25519SecretKey: Identity.fetchUserEd25519KeyPair()?.secretKey SessionUtil.loadState(
) userPublicKey: getUserHexEncodedPublicKey(),
ed25519SecretKey: Identity.fetchUserEd25519KeyPair()?.secretKey
)
}
DispatchQueue.main.async { DispatchQueue.main.async {
migrationsCompletion(result, (needsConfigSync || SessionUtil.needsSync)) migrationsCompletion(result, (needsConfigSync || SessionUtil.needsSync))