From 1b52e978ea671d8ee082a0d7ccc913505561312e Mon Sep 17 00:00:00 2001 From: nielsandriesse Date: Wed, 17 Feb 2021 08:01:54 +1100 Subject: [PATCH] Hook up voice message sending logic --- .../ConversationVC+Interaction.swift | 67 +++++++++++++---- .../VoiceMessageRecordingView.swift | 75 ++++++++++++++++--- 2 files changed, 115 insertions(+), 27 deletions(-) diff --git a/Session/Conversations V2/ConversationVC+Interaction.swift b/Session/Conversations V2/ConversationVC+Interaction.swift index 95fa1c19c..305946443 100644 --- a/Session/Conversations V2/ConversationVC+Interaction.swift +++ b/Session/Conversations V2/ConversationVC+Interaction.swift @@ -23,17 +23,20 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc func handleDocumentButtonTapped() { // TODO: Implement } - + + private func showBlockedModalIfNeeded() -> Bool { + guard let thread = thread as? TSContactThread else { return false } + let publicKey = thread.contactIdentifier() + guard OWSBlockingManager.shared().isRecipientIdBlocked(publicKey) else { return false } + let blockedModal = BlockedModal(publicKey: publicKey) + blockedModal.modalPresentationStyle = .overFullScreen + blockedModal.modalTransitionStyle = .crossDissolve + present(blockedModal, animated: true, completion: nil) + return true + } + func handleSendButtonTapped() { - if let thread = thread as? TSContactThread { - let publicKey = thread.contactIdentifier() - guard !OWSBlockingManager.shared().isRecipientIdBlocked(publicKey) else { - let blockedModal = BlockedModal(publicKey: publicKey) - blockedModal.modalPresentationStyle = .overFullScreen - blockedModal.modalTransitionStyle = .crossDissolve - return present(blockedModal, animated: true, completion: nil) - } - } + guard !showBlockedModalIfNeeded() else { return } // TODO: Attachments let text = snInputView.text.trimmingCharacters(in: .whitespacesAndNewlines) let thread = self.thread @@ -55,14 +58,46 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc Storage.shared.write { transaction in MessageSender.send(message, with: [], in: thread, using: transaction as! YapDatabaseReadWriteTransaction) } - guard let self = self else { return } - self.snInputView.text = "" - self.snInputView.quoteDraftInfo = nil - self.markAllAsRead() - // TODO: Reset mentions + self?.handleMessageSent() }) } + func sendAttachments(_ attachments: [SignalAttachment], with text: String) { + guard !showBlockedModalIfNeeded() else { return } + for attachment in attachments { + if attachment.hasError { + let alert = UIAlertController(title: "Session", message: "An error occurred.", preferredStyle: .alert) + alert.addAction(UIAlertAction(title: "OK", style: .default, handler: nil)) + return present(alert, animated: true, completion: nil) + } + } + let thread = self.thread + let message = VisibleMessage() + message.sentTimestamp = NSDate.millisecondTimestamp() + message.text = text + let tsMessage = TSOutgoingMessage.from(message, associatedWith: thread) + Storage.write(with: { transaction in + tsMessage.save(with: transaction) + }, completion: { [weak self] in + Storage.write { transaction in + MessageSender.send(message, with: attachments, in: thread, using: transaction) + } + self?.handleMessageSent() + }) + } + + func handleMessageSent() { + // TODO: Reset mentions + self.snInputView.text = "" + self.snInputView.quoteDraftInfo = nil + self.markAllAsRead() + if Environment.shared.preferences.soundInForeground() { + let soundID = OWSSounds.systemSoundID(for: .messageSent, quiet: true) + AudioServicesPlaySystemSound(soundID) + } + SSKEnvironment.shared.typingIndicators.didSendOutgoingMessage(inThread: thread) + } + func handleViewItemLongPressed(_ viewItem: ConversationViewItem) { guard let index = viewItems.firstIndex(where: { $0 === viewItem }), let cell = messagesTableView.cellForRow(at: IndexPath(row: index, section: 0)) as? VisibleMessageCell, @@ -340,7 +375,7 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc return present(alert, animated: true, completion: nil) } // Send attachment - // TODO: Send the attachment + sendAttachments([ attachment ], with: "") } func cancelVoiceMessageRecording() { diff --git a/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift b/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift index a56b3b27d..5c8aa87b7 100644 --- a/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift +++ b/Session/Conversations V2/Input View/VoiceMessageRecordingView.swift @@ -250,13 +250,27 @@ final class VoiceMessageRecordingView : UIView { chevronImageView.transform = .identity slideToCancelLabel.transform = .identity } + if isValidLockViewLocation(location) { + if !lockView.isExpanded { + UIView.animate(withDuration: 0.25) { + self.lockViewBottomConstraint.constant = -Values.mediumSpacing + LockView.expansionMargin + } + } + lockView.expandIfNeeded() + } else { + if lockView.isExpanded { + UIView.animate(withDuration: 0.25) { + self.lockViewBottomConstraint.constant = -Values.mediumSpacing + } + } + lockView.collapseIfNeeded() + } } func handleLongPressEnded(at location: CGPoint) { - let lockViewHitMargin = VoiceMessageRecordingView.lockViewHitMargin if pulseView.frame.contains(location) { delegate.endVoiceMessageRecording() - } else if location.y < 0 && location.x > (lockView.frame.minX - lockViewHitMargin) && location.x < (lockView.frame.maxX + lockViewHitMargin) { + } else if isValidLockViewLocation(location) { let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleCircleViewTap)) circleView.addGestureRecognizer(tapGestureRecognizer) UIView.animate(withDuration: 0.25, delay: 0, options: .transitionCrossDissolve, animations: { @@ -279,14 +293,33 @@ final class VoiceMessageRecordingView : UIView { @objc private func handleCancelButtonTapped() { delegate.cancelVoiceMessageRecording() } + + // MARK: Convenience + private func isValidLockViewLocation(_ location: CGPoint) -> Bool { + let lockViewHitMargin = VoiceMessageRecordingView.lockViewHitMargin + return location.y < 0 && location.x > (lockView.frame.minX - lockViewHitMargin) && location.x < (lockView.frame.maxX + lockViewHitMargin) + } } // MARK: Lock View extension VoiceMessageRecordingView { fileprivate final class LockView : UIView { + private lazy var widthConstraint = set(.width, to: LockView.width) + private(set) var isExpanded = false + + private lazy var stackView: UIStackView = { + let result = UIStackView() + result.axis = .vertical + result.spacing = Values.smallSpacing + result.alignment = .center + result.isLayoutMarginsRelativeArrangement = true + result.layoutMargins = UIEdgeInsets(top: 12, leading: 0, bottom: 8, trailing: 0) + return result + }() private static let width: CGFloat = 44 + static let expansionMargin: CGFloat = 3 private static let lockIconSize: CGFloat = 20 private static let chevronIconSize: CGFloat = 20 @@ -312,9 +345,8 @@ extension VoiceMessageRecordingView { addSubview(blurView) blurView.pin(to: self) // Size & shape - let width = LockView.width - set(.width, to: width) - layer.cornerRadius = width / 2 + widthConstraint.isActive = true + layer.cornerRadius = LockView.width / 2 layer.masksToBounds = true // Border layer.borderWidth = 1 @@ -325,21 +357,42 @@ extension VoiceMessageRecordingView { let lockIconSize = LockView.lockIconSize lockIconImageView.set(.width, to: lockIconSize) lockIconImageView.set(.height, to: lockIconSize) + stackView.addArrangedSubview(lockIconImageView) // Chevron icon let chevronIconImageView = UIImageView(image: UIImage(named: "ic_chevron_up")!.withTint(iconTint)) let chevronIconSize = LockView.chevronIconSize chevronIconImageView.set(.width, to: chevronIconSize) chevronIconImageView.set(.height, to: chevronIconSize) + stackView.addArrangedSubview(chevronIconImageView) // Stack view - let stackView = UIStackView(arrangedSubviews: [ lockIconImageView, chevronIconImageView ]) - stackView.axis = .vertical - stackView.spacing = Values.smallSpacing - stackView.alignment = .center - stackView.isLayoutMarginsRelativeArrangement = true - stackView.layoutMargins = UIEdgeInsets(top: 12, leading: 0, bottom: 8, trailing: 0) addSubview(stackView) stackView.pin(to: self) } + + func expandIfNeeded() { + guard !isExpanded else { return } + isExpanded = true + let expansionMargin = LockView.expansionMargin + let newWidth = LockView.width + 2 * expansionMargin + widthConstraint.constant = newWidth + UIView.animate(withDuration: 0.25) { + self.layer.cornerRadius = newWidth / 2 + self.stackView.layoutMargins = UIEdgeInsets(top: 12 + expansionMargin, leading: 0, bottom: 8 + expansionMargin, trailing: 0) + self.layoutIfNeeded() + } + } + + func collapseIfNeeded() { + guard isExpanded else { return } + isExpanded = false + let newWidth = LockView.width + widthConstraint.constant = newWidth + UIView.animate(withDuration: 0.25) { + self.layer.cornerRadius = newWidth / 2 + self.stackView.layoutMargins = UIEdgeInsets(top: 12, leading: 0, bottom: 8, trailing: 0) + self.layoutIfNeeded() + } + } } }