Mostly implement voice message recording back-end

This commit is contained in:
nielsandriesse 2021-02-16 16:36:06 +11:00
parent c4a7c31672
commit 6504996c34
7 changed files with 265 additions and 23 deletions

View File

@ -729,6 +729,7 @@
C3A7222A2558C1E40043A11F /* DotNetAPI.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3A722292558C1E40043A11F /* DotNetAPI.swift */; };
C3A7225E2558C38D0043A11F /* Promise+Retaining.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3A7225D2558C38D0043A11F /* Promise+Retaining.swift */; };
C3A7229C2558E4310043A11F /* OpenGroupMessage+Conversion.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3A7229B2558E4310043A11F /* OpenGroupMessage+Conversion.swift */; };
C3A76A8D25DB83F90074CB90 /* PermissionMissingModal.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3A76A8C25DB83F90074CB90 /* PermissionMissingModal.swift */; };
C3AABDDF2553ECF00042FF4C /* Array+Description.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3C2A5D12553860800C340D1 /* Array+Description.swift */; };
C3AAFFE825AE975D0089E6DD /* ConfigurationMessage+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3AAFFDE25AE96FF0089E6DD /* ConfigurationMessage+Convenience.swift */; };
C3AAFFF225AE99710089E6DD /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = C3AAFFF125AE99710089E6DD /* AppDelegate.swift */; };
@ -1789,6 +1790,7 @@
C3A722292558C1E40043A11F /* DotNetAPI.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DotNetAPI.swift; sourceTree = "<group>"; };
C3A7225D2558C38D0043A11F /* Promise+Retaining.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Promise+Retaining.swift"; sourceTree = "<group>"; };
C3A7229B2558E4310043A11F /* OpenGroupMessage+Conversion.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "OpenGroupMessage+Conversion.swift"; sourceTree = "<group>"; };
C3A76A8C25DB83F90074CB90 /* PermissionMissingModal.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PermissionMissingModal.swift; sourceTree = "<group>"; };
C3AA6BB824CE8F1B002358B6 /* Migrating Translations from Android.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; name = "Migrating Translations from Android.md"; path = "Meta/Translations/Migrating Translations from Android.md"; sourceTree = "<group>"; };
C3AAFFCB25AE92150089E6DD /* OpenGroupManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OpenGroupManager.swift; sourceTree = "<group>"; };
C3AAFFDE25AE96FF0089E6DD /* ConfigurationMessage+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "ConfigurationMessage+Convenience.swift"; sourceTree = "<group>"; };
@ -2249,6 +2251,7 @@
isa = PBXGroup;
children = (
B897621B25D201F7004F83B2 /* ScrollToBottomButton.swift */,
C3A76A8C25DB83F90074CB90 /* PermissionMissingModal.swift */,
B821494525D4D6FF009C0F2A /* URLModal.swift */,
B821494E25D4E163009C0F2A /* BodyTextView.swift */,
B82149B725D60393009C0F2A /* BlockedModal.swift */,
@ -5033,6 +5036,7 @@
4CB5F26720F6E1E2004D1B42 /* MenuActionsViewController.swift in Sources */,
B85A68B12587141A008CC492 /* Storage+Resetting.swift in Sources */,
3496955E219B605E00DCFE74 /* PhotoLibrary.swift in Sources */,
C3A76A8D25DB83F90074CB90 /* PermissionMissingModal.swift in Sources */,
340FC8A9204DAC8D007AEB0F /* NotificationSettingsOptionsViewController.m in Sources */,
B849789625D4A2F500D0D0B3 /* LinkPreviewViewV2.swift in Sources */,
C3D0972B2510499C00F6E3E4 /* BackgroundPoller.swift in Sources */,

View File

@ -1,3 +1,4 @@
import CoreServices
extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuActionDelegate, ScrollToBottomButtonDelegate {
@ -240,4 +241,116 @@ extension ConversationVC : InputViewDelegate, MessageCellDelegate, ContextMenuAc
OWSBlockingManager.shared().removeBlockedPhoneNumber(publicKey)
})
}
func requestMicrophonePermissionIfNeeded() {
switch AVAudioSession.sharedInstance().recordPermission {
case .granted: break
case .denied:
cancelVoiceMessageRecording()
let modal = PermissionMissingModal(permission: "microphone") { [weak self] in
self?.cancelVoiceMessageRecording()
}
modal.modalPresentationStyle = .overFullScreen
modal.modalTransitionStyle = .crossDissolve
present(modal, animated: true, completion: nil)
case .undetermined:
cancelVoiceMessageRecording()
AVAudioSession.sharedInstance().requestRecordPermission { _ in }
default: break
}
}
func startVoiceMessageRecording() {
// Request permission if needed
requestMicrophonePermissionIfNeeded()
// Cancel any current audio playback
audioPlayer?.stop()
audioPlayer = nil
// Create URL
let directory = OWSTemporaryDirectory()
let fileName = "\(NSDate.millisecondTimestamp()).m4a"
let path = (directory as NSString).appendingPathComponent(fileName)
let url = URL(fileURLWithPath: path)
// Set up audio session
let isConfigured = audioSession.startAudioActivity(recordVoiceMessageActivity)
guard isConfigured else {
return cancelVoiceMessageRecording()
}
// Set up audio recorder
let settings: [String:NSNumber] = [
AVFormatIDKey : NSNumber(value: kAudioFormatMPEG4AAC),
AVSampleRateKey : NSNumber(value: 44100),
AVNumberOfChannelsKey : NSNumber(value: 2),
AVEncoderBitRateKey : NSNumber(value: 128 * 1024)
]
let audioRecorder: AVAudioRecorder
do {
audioRecorder = try AVAudioRecorder(url: url, settings: settings)
audioRecorder.isMeteringEnabled = true
self.audioRecorder = audioRecorder
} catch {
SNLog("Couldn't start audio recording due to error: \(error).")
return cancelVoiceMessageRecording()
}
// Limit voice messages to a minute
audioTimer = Timer.scheduledTimer(withTimeInterval: 60, repeats: false, block: { [weak self] _ in
self?.snInputView.hideVoiceMessageUI()
self?.endVoiceMessageRecording()
})
// Prepare audio recorder
guard audioRecorder.prepareToRecord() else {
SNLog("Couldn't prepare audio recorder.")
return cancelVoiceMessageRecording()
}
// Start recording
guard audioRecorder.record() else {
SNLog("Couldn't record audio.")
return cancelVoiceMessageRecording()
}
}
func endVoiceMessageRecording() {
// Hide the UI
snInputView.hideVoiceMessageUI()
// Cancel the timer
audioTimer?.invalidate()
// Check preconditions
guard let audioRecorder = audioRecorder else { return }
// Get duration
let duration = audioRecorder.currentTime
// Stop the recording
stopVoiceMessageRecording()
// Check for user misunderstanding
guard duration > 1 else {
self.audioRecorder = nil
// TODO: Show modal explaining what's up
return
}
// Get data
let dataSourceOrNil = DataSourcePath.dataSource(with: audioRecorder.url, shouldDeleteOnDeallocation: true)
self.audioRecorder = nil
guard let dataSource = dataSourceOrNil else { return SNLog("Couldn't load recorded data.") }
// Create attachment
let fileName = (NSLocalizedString("VOICE_MESSAGE_FILE_NAME", comment: "") as NSString).appendingPathExtension("m4a")
dataSource.sourceFilename = fileName
let attachment = SignalAttachment.voiceMessageAttachment(dataSource: dataSource, dataUTI: kUTTypeMPEG4Audio as String)
guard !attachment.hasError else {
// TODO: Show error UI
return
}
// Send attachment
// TODO: Send the attachment
}
func cancelVoiceMessageRecording() {
snInputView.hideVoiceMessageUI()
audioTimer?.invalidate()
stopVoiceMessageRecording()
audioRecorder = nil
}
func stopVoiceMessageRecording() {
audioRecorder?.stop()
audioSession.endAudioActivity(recordVoiceMessageActivity)
}
}

View File

@ -12,8 +12,11 @@
final class ConversationVC : BaseVC, ConversationViewModelDelegate, UITableViewDataSource, UITableViewDelegate {
let thread: TSThread
private let focusedMessageID: String?
var audioPlayer: OWSAudioPlayer?
private var didConstrainScrollButton = false
// Audio playback & recording
var audioPlayer: OWSAudioPlayer?
var audioRecorder: AVAudioRecorder?
var audioTimer: Timer?
// Context menu
var contextMenuWindow: ContextMenuWindow?
var contextMenuVC: ContextMenuVC?
@ -22,7 +25,8 @@ final class ConversationVC : BaseVC, ConversationViewModelDelegate, UITableViewD
private var hasPerformedInitialScroll = false
private var isLoadingMore = false
private var scrollDistanceToBottomBeforeUpdate: CGFloat?
var audioSession: OWSAudioSession { Environment.shared.audioSession }
private var dbConnection: YapDatabaseConnection { OWSPrimaryStorage.shared().uiDatabaseConnection }
var viewItems: [ConversationViewItem] { viewModel.viewState.viewItems }
func conversationStyle() -> ConversationStyle { return ConversationStyle(thread: thread) }
@ -45,6 +49,8 @@ final class ConversationVC : BaseVC, ConversationViewModelDelegate, UITableViewD
result.countLimit = 40
return result
}()
lazy var recordVoiceMessageActivity = AudioActivity(audioDescription: "Voice message", behavior: .playAndRecord)
// MARK: UI Components
private lazy var titleView = ConversationTitleViewV2(thread: thread)

View File

@ -25,9 +25,10 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
private lazy var voiceMessageButton = InputViewButton(icon: #imageLiteral(resourceName: "Microphone"), delegate: self)
private lazy var sendButton: InputViewButton = {
let result = InputViewButton(icon: #imageLiteral(resourceName: "ArrowUp"), isSendButton: true, delegate: self)
result.alpha = 0
result.isHidden = true
return result
}()
private lazy var voiceMessageButtonContainer = container(for: voiceMessageButton)
private lazy var inputTextView = InputTextView(delegate: self)
@ -73,14 +74,6 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
addSubview(separator)
separator.pin([ UIView.HorizontalEdge.leading, UIView.VerticalEdge.top, UIView.HorizontalEdge.trailing ], to: self)
// Buttons
func container(for button: InputViewButton) -> UIView {
let result = UIView()
result.addSubview(button)
result.set(.width, to: InputViewButton.expandedSize)
result.set(.height, to: InputViewButton.expandedSize)
button.center(in: result)
return result
}
let (cameraButtonContainer, libraryButtonContainer, gifButtonContainer, documentButtonContainer) = (container(for: cameraButton), container(for: libraryButton), container(for: gifButton), container(for: documentButton))
let buttonStackView = UIStackView(arrangedSubviews: [ cameraButtonContainer, libraryButtonContainer, gifButtonContainer, documentButtonContainer, UIView.hStretchingSpacer() ])
buttonStackView.axis = .horizontal
@ -101,7 +94,6 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
mainStackView.pin([ UIView.HorizontalEdge.leading, UIView.HorizontalEdge.trailing ], to: self)
mainStackView.pin(.bottom, to: .bottom, of: self, withInset: -2)
// Voice message button
let voiceMessageButtonContainer = container(for: voiceMessageButton)
addSubview(voiceMessageButtonContainer)
voiceMessageButtonContainer.center(in: sendButton)
}
@ -113,8 +105,8 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
func inputTextViewDidChangeContent(_ inputTextView: InputTextView) {
let hasText = !text.isEmpty
sendButton.alpha = hasText ? 1 : 0
voiceMessageButton.alpha = hasText ? 0 : 1
sendButton.isHidden = !hasText
voiceMessageButtonContainer.isHidden = hasText
autoGenerateLinkPreviewIfPossible()
}
@ -190,8 +182,17 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
if inputViewButton == sendButton { delegate.handleSendButtonTapped() }
}
func handleInputViewButtonLongPressed(_ inputViewButton: InputViewButton) {
if inputViewButton == voiceMessageButton { showVoiceMessageUI() }
func handleInputViewButtonLongPressBegan(_ inputViewButton: InputViewButton) {
if inputViewButton == voiceMessageButton {
delegate.startVoiceMessageRecording()
showVoiceMessageUI()
}
}
func handleInputViewButtonLongPressEnded(_ inputViewButton: InputViewButton, with touch: UITouch) {
guard let voiceMessageRecordingView = voiceMessageRecordingView else { return }
let location = touch.location(in: voiceMessageRecordingView)
voiceMessageRecordingView.handleLongPressEnded(at: location)
}
func handleQuoteViewCancelButtonTapped() {
@ -214,7 +215,7 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
@objc private func showVoiceMessageUI() {
voiceMessageRecordingView?.removeFromSuperview()
let voiceMessageButtonFrame = voiceMessageButton.superview!.convert(voiceMessageButton.frame, to: self)
let voiceMessageRecordingView = VoiceMessageRecordingView(voiceMessageButtonFrame: voiceMessageButtonFrame)
let voiceMessageRecordingView = VoiceMessageRecordingView(voiceMessageButtonFrame: voiceMessageButtonFrame, delegate: delegate)
voiceMessageRecordingView.alpha = 0
addSubview(voiceMessageRecordingView)
voiceMessageRecordingView.pin(to: self)
@ -225,10 +226,31 @@ final class InputView : UIView, InputViewButtonDelegate, InputTextViewDelegate,
allOtherViews.forEach { $0.alpha = 0 }
}
}
func hideVoiceMessageUI() {
let allOtherViews = [ cameraButton, libraryButton, gifButton, documentButton, sendButton, inputTextView, additionalContentContainer ]
UIView.animate(withDuration: 0.25, animations: {
allOtherViews.forEach { $0.alpha = 1 }
self.voiceMessageRecordingView?.alpha = 0
}, completion: { _ in
self.voiceMessageRecordingView?.removeFromSuperview()
self.voiceMessageRecordingView = nil
})
}
// MARK: Convenience
private func container(for button: InputViewButton) -> UIView {
let result = UIView()
result.addSubview(button)
result.set(.width, to: InputViewButton.expandedSize)
result.set(.height, to: InputViewButton.expandedSize)
button.center(in: result)
return result
}
}
// MARK: Delegate
protocol InputViewDelegate {
protocol InputViewDelegate : VoiceMessageRecordingViewDelegate {
func showLinkPreviewSuggestionModal()
func handleCameraButtonTapped()

View File

@ -79,14 +79,16 @@ final class InputViewButton : UIView {
longPressTimer = Timer.scheduledTimer(withTimeInterval: 0.5, repeats: false, block: { [weak self] _ in
guard let self = self else { return }
self.isLongPress = true
self.delegate.handleInputViewButtonLongPressed(self)
self.delegate.handleInputViewButtonLongPressBegan(self)
})
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
collapse()
if !isLongPress {
delegate.handleInputViewButtonTapped(self)
} else {
delegate.handleInputViewButtonLongPressEnded(self, with: touches.first!)
}
invalidateLongPressIfNeeded()
}
@ -106,5 +108,6 @@ final class InputViewButton : UIView {
protocol InputViewButtonDelegate {
func handleInputViewButtonTapped(_ inputViewButton: InputViewButton)
func handleInputViewButtonLongPressed(_ inputViewButton: InputViewButton)
func handleInputViewButtonLongPressBegan(_ inputViewButton: InputViewButton)
func handleInputViewButtonLongPressEnded(_ inputViewButton: InputViewButton, with touch: UITouch)
}

View File

@ -1,6 +1,7 @@
final class VoiceMessageRecordingView : UIView {
private let voiceMessageButtonFrame: CGRect
private let delegate: VoiceMessageRecordingViewDelegate
private lazy var slideToCancelStackViewRightConstraint = slideToCancelStackView.pin(.right, to: .right, of: self)
private lazy var slideToCancelLabelCenterHorizontalConstraint = slideToCancelLabel.center(.horizontal, in: self)
private lazy var pulseViewWidthConstraint = pulseView.set(.width, to: VoiceMessageRecordingView.circleSize)
@ -72,8 +73,9 @@ final class VoiceMessageRecordingView : UIView {
private static let dotSize: CGFloat = 16
// MARK: Lifecycle
init(voiceMessageButtonFrame: CGRect) {
init(voiceMessageButtonFrame: CGRect, delegate: VoiceMessageRecordingViewDelegate) {
self.voiceMessageButtonFrame = voiceMessageButtonFrame
self.delegate = delegate
super.init(frame: CGRect.zero)
setUpViewHierarchy()
recordingTimer = Timer.scheduledTimer(withTimeInterval: 1, repeats: true) { [weak self] _ in
@ -138,7 +140,7 @@ final class VoiceMessageRecordingView : UIView {
durationStackView.center(.vertical, in: iconImageView)
// Lock view
addSubview(lockView)
lockView.center(.horizontal, in: iconImageView)
lockView.centerXAnchor.constraint(equalTo: iconImageView.centerXAnchor, constant: 2).isActive = true
lockViewBottomConstraint.isActive = true
}
@ -204,6 +206,15 @@ final class VoiceMessageRecordingView : UIView {
self.pulse()
})
}
// MARK: Interaction
func handleLongPressEnded(at location: CGPoint) {
if pulseView.frame.contains(location) {
delegate.endVoiceMessageRecording()
} else if lockView.frame.contains(location) {
print("[Test] Lock view")
}
}
}
// MARK: Lock View
@ -267,3 +278,11 @@ extension VoiceMessageRecordingView {
}
}
}
// MARK: Delegate
protocol VoiceMessageRecordingViewDelegate {
func startVoiceMessageRecording()
func endVoiceMessageRecording()
func cancelVoiceMessageRecording()
}

View File

@ -0,0 +1,75 @@
final class PermissionMissingModal : Modal {
private let permission: String
private let onCancel: () -> Void
// MARK: Lifecycle
init(permission: String, onCancel: @escaping () -> Void) {
self.permission = permission
self.onCancel = onCancel
super.init(nibName: nil, bundle: nil)
}
override init(nibName: String?, bundle: Bundle?) {
preconditionFailure("Use init(permission:) instead.")
}
required init?(coder: NSCoder) {
preconditionFailure("Use init(permission:) instead.")
}
override func populateContentView() {
// Title
let titleLabel = UILabel()
titleLabel.textColor = Colors.text
titleLabel.font = .boldSystemFont(ofSize: Values.largeFontSize)
titleLabel.text = "Session"
titleLabel.textAlignment = .center
// Message
let messageLabel = UILabel()
messageLabel.textColor = Colors.text
messageLabel.font = .systemFont(ofSize: Values.smallFontSize)
let message = "Session needs \(permission) access to continue. You can enable access in the iOS settings."
let attributedMessage = NSMutableAttributedString(string: message)
attributedMessage.addAttributes([ .font : UIFont.boldSystemFont(ofSize: Values.smallFontSize) ], range: (message as NSString).range(of: permission))
messageLabel.attributedText = attributedMessage
messageLabel.numberOfLines = 0
messageLabel.lineBreakMode = .byWordWrapping
messageLabel.textAlignment = .center
// Settings button
let settingsButton = UIButton()
settingsButton.set(.height, to: Values.mediumButtonHeight)
settingsButton.layer.cornerRadius = Values.modalButtonCornerRadius
settingsButton.backgroundColor = Colors.buttonBackground
settingsButton.titleLabel!.font = .systemFont(ofSize: Values.smallFontSize)
settingsButton.setTitleColor(Colors.text, for: UIControl.State.normal)
settingsButton.setTitle("Settings", for: UIControl.State.normal)
settingsButton.addTarget(self, action: #selector(goToSettings), for: UIControl.Event.touchUpInside)
// Button stack view
let buttonStackView = UIStackView(arrangedSubviews: [ cancelButton, settingsButton ])
buttonStackView.axis = .horizontal
buttonStackView.spacing = Values.mediumSpacing
buttonStackView.distribution = .fillEqually
// Main stack view
let mainStackView = UIStackView(arrangedSubviews: [ titleLabel, messageLabel, buttonStackView ])
mainStackView.axis = .vertical
mainStackView.spacing = Values.largeSpacing
contentView.addSubview(mainStackView)
mainStackView.pin(.leading, to: .leading, of: contentView, withInset: Values.largeSpacing)
mainStackView.pin(.top, to: .top, of: contentView, withInset: Values.largeSpacing)
contentView.pin(.trailing, to: .trailing, of: mainStackView, withInset: Values.largeSpacing)
contentView.pin(.bottom, to: .bottom, of: mainStackView, withInset: Values.largeSpacing)
}
// MARK: Interaction
@objc private func goToSettings() {
presentingViewController?.dismiss(animated: true, completion: {
UIApplication.shared.open(URL(string: UIApplication.openSettingsURLString)!)
})
}
override func close() {
super.close()
onCancel()
}
}