Garther audio concerns, clean up session when done

- sync speakerphone state manipulated from system call screen
  - Revert audio session after call failure, ensures media plays out of
    speaker after placing a failing call.
  - Replace notification with delegate pattern since we're already using
    delegate pattern here.
- Fixes voiceover accessibility after voice memo
- Avoid audio blip after pressing hangup
- Rename CallAudioSession -> OWSAudioSession
  Going to start using it for other non-call things since we want to
  gather all our audio session concerns.
- Resume background audio when done playing video
  - Extract OWSVideoPlayer which ensures audio is in proper state before
    playback
  - Move recording session logic to shared OWSAudioSession
  - Deactivate audio session when complete

// FREEBIE
This commit is contained in:
Michael Kirk 2018-02-02 15:35:32 -08:00
parent fa9ac5aa46
commit c646f76335
18 changed files with 364 additions and 240 deletions

View file

@ -307,9 +307,10 @@
45DF5DF21DDB843F00C936C7 /* CompareSafetyNumbersActivity.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45DF5DF11DDB843F00C936C7 /* CompareSafetyNumbersActivity.swift */; }; 45DF5DF21DDB843F00C936C7 /* CompareSafetyNumbersActivity.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45DF5DF11DDB843F00C936C7 /* CompareSafetyNumbersActivity.swift */; };
45E5A6991F61E6DE001E4A8A /* MarqueeLabel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E5A6981F61E6DD001E4A8A /* MarqueeLabel.swift */; }; 45E5A6991F61E6DE001E4A8A /* MarqueeLabel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E5A6981F61E6DD001E4A8A /* MarqueeLabel.swift */; };
45E7A6A81E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E7A6A61E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift */; }; 45E7A6A81E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E7A6A61E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift */; };
45F170AC1E2F0351003FC1F2 /* CallAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170AB1E2F0351003FC1F2 /* CallAudioSession.swift */; };
45F170BB1E2FC5D3003FC1F2 /* CallAudioService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */; }; 45F170BB1E2FC5D3003FC1F2 /* CallAudioService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */; };
45F170D61E315310003FC1F2 /* Weak.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170D51E315310003FC1F2 /* Weak.swift */; }; 45F170D61E315310003FC1F2 /* Weak.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170D51E315310003FC1F2 /* Weak.swift */; };
45F59A082028E4FB00E8D2B0 /* OWSAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170AB1E2F0351003FC1F2 /* OWSAudioSession.swift */; };
45F59A0A2029140500E8D2B0 /* OWSVideoPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F59A092029140500E8D2B0 /* OWSVideoPlayer.swift */; };
45F659731E1BD99C00444429 /* CallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F659721E1BD99C00444429 /* CallKitCallUIAdaptee.swift */; }; 45F659731E1BD99C00444429 /* CallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F659721E1BD99C00444429 /* CallKitCallUIAdaptee.swift */; };
45F659821E1BE77000444429 /* NonCallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F659811E1BE77000444429 /* NonCallKitCallUIAdaptee.swift */; }; 45F659821E1BE77000444429 /* NonCallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F659811E1BE77000444429 /* NonCallKitCallUIAdaptee.swift */; };
45FBC5C81DF8575700E9B410 /* CallKitCallManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45FBC59A1DF8575700E9B410 /* CallKitCallManager.swift */; }; 45FBC5C81DF8575700E9B410 /* CallKitCallManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45FBC59A1DF8575700E9B410 /* CallKitCallManager.swift */; };
@ -855,11 +856,12 @@
45E2E91E1E13EE3500457AA0 /* OWSCallNotificationsAdaptee.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; lineEnding = 0; name = OWSCallNotificationsAdaptee.h; path = UserInterface/OWSCallNotificationsAdaptee.h; sourceTree = "<group>"; xcLanguageSpecificationIdentifier = xcode.lang.objcpp; }; 45E2E91E1E13EE3500457AA0 /* OWSCallNotificationsAdaptee.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; lineEnding = 0; name = OWSCallNotificationsAdaptee.h; path = UserInterface/OWSCallNotificationsAdaptee.h; sourceTree = "<group>"; xcLanguageSpecificationIdentifier = xcode.lang.objcpp; };
45E5A6981F61E6DD001E4A8A /* MarqueeLabel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MarqueeLabel.swift; sourceTree = "<group>"; }; 45E5A6981F61E6DD001E4A8A /* MarqueeLabel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MarqueeLabel.swift; sourceTree = "<group>"; };
45E7A6A61E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DisplayableTextFilterTest.swift; sourceTree = "<group>"; }; 45E7A6A61E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DisplayableTextFilterTest.swift; sourceTree = "<group>"; };
45F170AB1E2F0351003FC1F2 /* CallAudioSession.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallAudioSession.swift; sourceTree = "<group>"; }; 45F170AB1E2F0351003FC1F2 /* OWSAudioSession.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OWSAudioSession.swift; sourceTree = "<group>"; };
45F170B31E2F0A6A003FC1F2 /* RTCAudioSession.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCAudioSession.h; sourceTree = "<group>"; }; 45F170B31E2F0A6A003FC1F2 /* RTCAudioSession.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RTCAudioSession.h; sourceTree = "<group>"; };
45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallAudioService.swift; sourceTree = "<group>"; }; 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallAudioService.swift; sourceTree = "<group>"; };
45F170D51E315310003FC1F2 /* Weak.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Weak.swift; sourceTree = "<group>"; }; 45F170D51E315310003FC1F2 /* Weak.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Weak.swift; sourceTree = "<group>"; };
45F3AEB51DFDE7900080CE33 /* AvatarImageView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AvatarImageView.swift; sourceTree = "<group>"; }; 45F3AEB51DFDE7900080CE33 /* AvatarImageView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AvatarImageView.swift; sourceTree = "<group>"; };
45F59A092029140500E8D2B0 /* OWSVideoPlayer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OWSVideoPlayer.swift; sourceTree = "<group>"; };
45F659721E1BD99C00444429 /* CallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallUIAdaptee.swift; sourceTree = "<group>"; }; 45F659721E1BD99C00444429 /* CallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallUIAdaptee.swift; sourceTree = "<group>"; };
45F659811E1BE77000444429 /* NonCallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NonCallKitCallUIAdaptee.swift; sourceTree = "<group>"; }; 45F659811E1BE77000444429 /* NonCallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NonCallKitCallUIAdaptee.swift; sourceTree = "<group>"; };
45FBC59A1DF8575700E9B410 /* CallKitCallManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallManager.swift; sourceTree = "<group>"; }; 45FBC59A1DF8575700E9B410 /* CallKitCallManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallManager.swift; sourceTree = "<group>"; };
@ -1200,6 +1202,7 @@
346129921FD1E30000532771 /* migrations */, 346129921FD1E30000532771 /* migrations */,
347850671FD9B78A007B8332 /* NoopCallMessageHandler.swift */, 347850671FD9B78A007B8332 /* NoopCallMessageHandler.swift */,
347850681FD9B78A007B8332 /* NoopNotificationsManager.swift */, 347850681FD9B78A007B8332 /* NoopNotificationsManager.swift */,
45F170AB1E2F0351003FC1F2 /* OWSAudioSession.swift */,
346129561FD1D74B00532771 /* Release.h */, 346129561FD1D74B00532771 /* Release.h */,
346129571FD1D74B00532771 /* Release.m */, 346129571FD1D74B00532771 /* Release.m */,
346129581FD1D74B00532771 /* SignalKeyingStorage.h */, 346129581FD1D74B00532771 /* SignalKeyingStorage.h */,
@ -1587,6 +1590,7 @@
34CA1C281F7164F700E51C51 /* MediaMessageView.swift */, 34CA1C281F7164F700E51C51 /* MediaMessageView.swift */,
45BC829C1FD9C4B400011CF3 /* ShareViewDelegate.swift */, 45BC829C1FD9C4B400011CF3 /* ShareViewDelegate.swift */,
453034AA200289F50018945D /* VideoPlayerView.swift */, 453034AA200289F50018945D /* VideoPlayerView.swift */,
45F59A092029140500E8D2B0 /* OWSVideoPlayer.swift */,
); );
path = attachments; path = attachments;
sourceTree = "<group>"; sourceTree = "<group>";
@ -1699,7 +1703,6 @@
45FBC5D01DF8592E00E9B410 /* SignalCall.swift */, 45FBC5D01DF8592E00E9B410 /* SignalCall.swift */,
458DE9D51DEE3FD00071BB03 /* PeerConnectionClient.swift */, 458DE9D51DEE3FD00071BB03 /* PeerConnectionClient.swift */,
4574A5D51DD6704700C6B692 /* CallService.swift */, 4574A5D51DD6704700C6B692 /* CallService.swift */,
45F170AB1E2F0351003FC1F2 /* CallAudioSession.swift */,
45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */, 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */,
452C468E1E427E200087B011 /* OutboundCallInitiator.swift */, 452C468E1E427E200087B011 /* OutboundCallInitiator.swift */,
); );
@ -2768,6 +2771,7 @@
isa = PBXSourcesBuildPhase; isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
45F59A0A2029140500E8D2B0 /* OWSVideoPlayer.swift in Sources */,
344F249B200FD03300CFB4F4 /* SharingThreadPickerViewController.m in Sources */, 344F249B200FD03300CFB4F4 /* SharingThreadPickerViewController.m in Sources */,
45194F951FD7216600333B2C /* TSUnreadIndicatorInteraction.m in Sources */, 45194F951FD7216600333B2C /* TSUnreadIndicatorInteraction.m in Sources */,
45BE4EA22012AD2000935E59 /* DisappearingTimerConfigurationView.swift in Sources */, 45BE4EA22012AD2000935E59 /* DisappearingTimerConfigurationView.swift in Sources */,
@ -2831,6 +2835,7 @@
451F8A481FD715BA005CB9DA /* OWSContactAvatarBuilder.m in Sources */, 451F8A481FD715BA005CB9DA /* OWSContactAvatarBuilder.m in Sources */,
346129A61FD1F09100532771 /* OWSContactsManager.m in Sources */, 346129A61FD1F09100532771 /* OWSContactsManager.m in Sources */,
346129D21FD2085A00532771 /* CommonStrings.swift in Sources */, 346129D21FD2085A00532771 /* CommonStrings.swift in Sources */,
45F59A082028E4FB00E8D2B0 /* OWSAudioSession.swift in Sources */,
34612A071FD7238600532771 /* OWSContactsSyncing.m in Sources */, 34612A071FD7238600532771 /* OWSContactsSyncing.m in Sources */,
346129DF1FD5C02A00532771 /* LockInteractionController.m in Sources */, 346129DF1FD5C02A00532771 /* LockInteractionController.m in Sources */,
451F8A471FD715BA005CB9DA /* OWSAvatarBuilder.m in Sources */, 451F8A471FD715BA005CB9DA /* OWSAvatarBuilder.m in Sources */,
@ -2926,7 +2931,6 @@
34330AA31E79686200DF2FB9 /* OWSProgressView.m in Sources */, 34330AA31E79686200DF2FB9 /* OWSProgressView.m in Sources */,
34CA1C271F7156F300E51C51 /* MessageDetailViewController.swift in Sources */, 34CA1C271F7156F300E51C51 /* MessageDetailViewController.swift in Sources */,
34D5CCA91EAE3D30005515DB /* AvatarViewHelper.m in Sources */, 34D5CCA91EAE3D30005515DB /* AvatarViewHelper.m in Sources */,
45F170AC1E2F0351003FC1F2 /* CallAudioSession.swift in Sources */,
34D1F0B71F87F8850066283D /* OWSGenericAttachmentView.m in Sources */, 34D1F0B71F87F8850066283D /* OWSGenericAttachmentView.m in Sources */,
34B3F8801E8DF1700035BE1A /* InviteFlow.swift in Sources */, 34B3F8801E8DF1700035BE1A /* InviteFlow.swift in Sources */,
34B3F8871E8DF1700035BE1A /* NotificationSettingsViewController.m in Sources */, 34B3F8871E8DF1700035BE1A /* NotificationSettingsViewController.m in Sources */,

View file

@ -10,7 +10,7 @@ import SignalMessaging
// TODO: Add category so that button handlers can be defined where button is created. // TODO: Add category so that button handlers can be defined where button is created.
// TODO: Ensure buttons enabled & disabled as necessary. // TODO: Ensure buttons enabled & disabled as necessary.
class CallViewController: OWSViewController, CallObserver, CallServiceObserver { class CallViewController: OWSViewController, CallObserver, CallServiceObserver, CallAudioServiceDelegate {
let TAG = "[CallViewController]" let TAG = "[CallViewController]"
@ -140,6 +140,9 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
self.call = call self.call = call
self.thread = TSContactThread.getOrCreateThread(contactId: call.remotePhoneNumber) self.thread = TSContactThread.getOrCreateThread(contactId: call.remotePhoneNumber)
super.init(nibName: nil, bundle: nil) super.init(nibName: nil, bundle: nil)
assert(callUIAdapter.audioService.delegate == nil)
callUIAdapter.audioService.delegate = self
observeNotifications() observeNotifications()
} }
@ -148,10 +151,6 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
selector:#selector(didBecomeActive), selector:#selector(didBecomeActive),
name:NSNotification.Name.OWSApplicationDidBecomeActive, name:NSNotification.Name.OWSApplicationDidBecomeActive,
object:nil) object:nil)
NotificationCenter.default.addObserver(forName: CallAudioServiceSessionChanged, object: nil, queue: nil) { [weak self] _ in
self?.didChangeAudioSession()
}
} }
deinit { deinit {
@ -379,21 +378,6 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
]) ])
} }
func didChangeAudioSession() {
AssertIsOnMainThread()
// Which sources are available depends on the state of your Session.
// When the audio session is not yet in PlayAndRecord none are available
// Then if we're in speakerphone, bluetooth isn't available.
// So we acrew all possible audio sources in a set, and that list lives as longs as the CallViewController
// The downside of this is that if you e.g. unpair your bluetooth mid call, it will still appear as an option
// until your next call.
// FIXME: There's got to be a better way, but this is where I landed after a bit of work, and seems to work
// pretty well in practrice.
let availableInputs = callUIAdapter.audioService.availableInputs
self.allAudioSources.formUnion(availableInputs)
}
func presentAudioSourcePicker() { func presentAudioSourcePicker() {
AssertIsOnMainThread() AssertIsOnMainThread()
@ -724,7 +708,7 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
return return
} }
// Marquee scrolling is distractingn during a video call, disable it. // Marquee scrolling is distracting during a video call, disable it.
contactNameLabel.labelize = call.hasLocalVideo contactNameLabel.labelize = call.hasLocalVideo
audioModeMuteButton.isSelected = call.isMuted audioModeMuteButton.isSelected = call.isMuted
@ -779,8 +763,6 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
audioSourceButton.isHidden = false audioSourceButton.isHidden = false
} else { } else {
// No bluetooth audio detected // No bluetooth audio detected
audioSourceButton.isSelected = call.isSpeakerphoneEnabled
audioSourceButton.setImage(#imageLiteral(resourceName: "audio-call-speaker-inactive"), for: .normal) audioSourceButton.setImage(#imageLiteral(resourceName: "audio-call-speaker-inactive"), for: .normal)
audioSourceButton.setImage(#imageLiteral(resourceName: "audio-call-speaker-active"), for: .selected) audioSourceButton.setImage(#imageLiteral(resourceName: "audio-call-speaker-active"), for: .selected)
@ -820,6 +802,29 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
updateCallStatusLabel(callState: call.state) updateCallStatusLabel(callState: call.state)
} }
// We update the audioSourceButton outside of the main `updateCallUI`
// because `updateCallUI` is intended to be idempotent, which isn't possible
// with external speaker state because:
// - the system API which enables the external speaker is a (somewhat slow) asyncronous
// operation
// - we want to give immediate UI feedback by marking the pressed button as selected
// before the operation completes.
func updateAudioSourceButtonIsSelected() {
guard callUIAdapter.audioService.isSpeakerphoneEnabled else {
self.audioSourceButton.isSelected = false
return
}
// VideoChat mode enables the output speaker, but we don't
// want to highlight the speaker button in that case.
guard !call.hasLocalVideo else {
self.audioSourceButton.isSelected = false
return
}
self.audioSourceButton.isSelected = true
}
// MARK: - Actions // MARK: - Actions
/** /**
@ -852,13 +857,9 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
func didPressSpeakerphone(sender button: UIButton) { func didPressSpeakerphone(sender button: UIButton) {
Logger.info("\(TAG) called \(#function)") Logger.info("\(TAG) called \(#function)")
button.isSelected = !button.isSelected button.isSelected = !button.isSelected
if button.isSelected { callUIAdapter.audioService.requestSpeakerphone(isEnabled: button.isSelected)
callUIAdapter.setAudioSource(call: call, audioSource: AudioSource.builtInSpeaker)
} else {
// use default audio source
callUIAdapter.setAudioSource(call: call, audioSource: nil)
}
} }
func didPressTextMessage(sender button: UIButton) { func didPressTextMessage(sender button: UIButton) {
@ -961,6 +962,29 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
self.updateCallUI(callState: call.state) self.updateCallUI(callState: call.state)
} }
// MARK: CallAudioServiceDelegate
func callAudioService(_ callAudioService: CallAudioService, didUpdateIsSpeakerphoneEnabled isSpeakerphoneEnabled: Bool) {
AssertIsOnMainThread()
updateAudioSourceButtonIsSelected()
}
func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService) {
AssertIsOnMainThread()
// Which sources are available depends on the state of your Session.
// When the audio session is not yet in PlayAndRecord none are available
// Then if we're in speakerphone, bluetooth isn't available.
// So we accrue all possible audio sources in a set, and that list lives as longs as the CallViewController
// The downside of this is that if you e.g. unpair your bluetooth mid call, it will still appear as an option
// until your next call.
// FIXME: There's got to be a better way, but this is where I landed after a bit of work, and seems to work
// pretty well in practice.
let availableInputs = callAudioService.availableInputs
self.allAudioSources.formUnion(availableInputs)
}
// MARK: - Video // MARK: - Video
internal func updateLocalVideoTrack(localVideoTrack: RTCVideoTrack?) { internal func updateLocalVideoTrack(localVideoTrack: RTCVideoTrack?) {
@ -979,6 +1003,7 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
localVideoView.isHidden = isHidden localVideoView.isHidden = isHidden
updateLocalVideoLayout() updateLocalVideoLayout()
updateAudioSourceButtonIsSelected()
} }
var hasRemoteVideoTrack: Bool { var hasRemoteVideoTrack: Bool {
@ -1002,6 +1027,8 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver {
} }
internal func dismissIfPossible(shouldDelay: Bool, ignoreNag: Bool = false, completion: (() -> Swift.Void)? = nil) { internal func dismissIfPossible(shouldDelay: Bool, ignoreNag: Bool = false, completion: (() -> Swift.Void)? = nil) {
callUIAdapter.audioService.delegate = nil
if hasDismissed { if hasDismissed {
// Don't dismiss twice. // Don't dismiss twice.
return return

View file

@ -3174,17 +3174,14 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
NSURL *fileURL = [NSURL fileURLWithPath:filepath]; NSURL *fileURL = [NSURL fileURLWithPath:filepath];
// Setup audio session // Setup audio session
AVAudioSession *session = [AVAudioSession sharedInstance]; BOOL configuredAudio = [OWSAudioSession.shared setRecordCategory];
OWSAssert(session.recordPermission == AVAudioSessionRecordPermissionGranted); if (!configuredAudio) {
OWSFail(@"%@ Couldn't configure audio session", self.logTag);
NSError *error;
[session setCategory:AVAudioSessionCategoryRecord error:&error];
if (error) {
OWSFail(@"%@ Couldn't configure audio session: %@", self.logTag, error);
[self cancelVoiceMemo]; [self cancelVoiceMemo];
return; return;
} }
NSError *error;
// Initiate and prepare the recorder // Initiate and prepare the recorder
self.audioRecorder = [[AVAudioRecorder alloc] initWithURL:fileURL self.audioRecorder = [[AVAudioRecorder alloc] initWithURL:fileURL
settings:@{ settings:@{
@ -3232,7 +3229,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
NSTimeInterval durationSeconds = self.audioRecorder.currentTime; NSTimeInterval durationSeconds = self.audioRecorder.currentTime;
[self.audioRecorder stop]; [self stopRecording];
const NSTimeInterval kMinimumRecordingTimeSeconds = 1.f; const NSTimeInterval kMinimumRecordingTimeSeconds = 1.f;
if (durationSeconds < kMinimumRecordingTimeSeconds) { if (durationSeconds < kMinimumRecordingTimeSeconds) {
@ -3279,20 +3276,18 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
} }
} }
- (void)stopRecording
{
[self.audioRecorder stop];
[OWSAudioSession.shared endAudioActivity];
}
- (void)cancelRecordingVoiceMemo - (void)cancelRecordingVoiceMemo
{ {
OWSAssertIsOnMainThread(); OWSAssertIsOnMainThread();
DDLogDebug(@"cancelRecordingVoiceMemo"); DDLogDebug(@"cancelRecordingVoiceMemo");
[self resetRecordingVoiceMemo]; [self stopRecording];
}
- (void)resetRecordingVoiceMemo
{
OWSAssertIsOnMainThread();
[self.audioRecorder stop];
self.audioRecorder = nil; self.audioRecorder = nil;
self.voiceMessageUUID = nil; self.voiceMessageUUID = nil;
} }

View file

@ -16,6 +16,7 @@
#import <AVKit/AVKit.h> #import <AVKit/AVKit.h>
#import <MediaPlayer/MPMoviePlayerViewController.h> #import <MediaPlayer/MPMoviePlayerViewController.h>
#import <MediaPlayer/MediaPlayer.h> #import <MediaPlayer/MediaPlayer.h>
#import <SignalMessaging/SignalMessaging-Swift.h>
#import <SignalServiceKit/NSData+Image.h> #import <SignalServiceKit/NSData+Image.h>
#import <YYImage/YYImage.h> #import <YYImage/YYImage.h>
@ -46,7 +47,10 @@ NS_ASSUME_NONNULL_BEGIN
#pragma mark - #pragma mark -
@interface MediaDetailViewController () <UIScrollViewDelegate, UIGestureRecognizerDelegate, PlayerProgressBarDelegate> @interface MediaDetailViewController () <UIScrollViewDelegate,
UIGestureRecognizerDelegate,
PlayerProgressBarDelegate,
OWSVideoPlayerDelegate>
@property (nonatomic) UIScrollView *scrollView; @property (nonatomic) UIScrollView *scrollView;
@property (nonatomic) UIView *mediaView; @property (nonatomic) UIView *mediaView;
@ -64,7 +68,7 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic) UIToolbar *footerBar; @property (nonatomic) UIToolbar *footerBar;
@property (nonatomic) BOOL areToolbarsHidden; @property (nonatomic) BOOL areToolbarsHidden;
@property (nonatomic, nullable) AVPlayer *videoPlayer; @property (nonatomic, nullable) OWSVideoPlayer *videoPlayer;
@property (nonatomic, nullable) UIButton *playVideoButton; @property (nonatomic, nullable) UIButton *playVideoButton;
@property (nonatomic, nullable) PlayerProgressBar *videoProgressBar; @property (nonatomic, nullable) PlayerProgressBar *videoProgressBar;
@property (nonatomic, nullable) UIBarButtonItem *videoPlayBarButton; @property (nonatomic, nullable) UIBarButtonItem *videoPlayBarButton;
@ -306,7 +310,7 @@ NS_ASSUME_NONNULL_BEGIN
if (@available(iOS 9, *)) { if (@available(iOS 9, *)) {
PlayerProgressBar *videoProgressBar = [PlayerProgressBar new]; PlayerProgressBar *videoProgressBar = [PlayerProgressBar new];
videoProgressBar.delegate = self; videoProgressBar.delegate = self;
videoProgressBar.player = self.videoPlayer; videoProgressBar.player = self.videoPlayer.avPlayer;
self.videoProgressBar = videoProgressBar; self.videoProgressBar = videoProgressBar;
[self.view addSubview:videoProgressBar]; [self.view addSubview:videoProgressBar];
@ -435,17 +439,13 @@ NS_ASSUME_NONNULL_BEGIN
} }
if (@available(iOS 9.0, *)) { if (@available(iOS 9.0, *)) {
AVPlayer *player = [[AVPlayer alloc] initWithURL:self.attachmentUrl]; OWSVideoPlayer *player = [[OWSVideoPlayer alloc] initWithUrl:self.attachmentUrl];
[player seekToTime:kCMTimeZero]; [player seekToTime:kCMTimeZero];
player.delegate = self;
self.videoPlayer = player; self.videoPlayer = player;
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playerItemDidPlayToCompletion:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:player.currentItem];
VideoPlayerView *playerView = [VideoPlayerView new]; VideoPlayerView *playerView = [VideoPlayerView new];
playerView.player = player; playerView.player = player.avPlayer;
[NSLayoutConstraint autoSetPriority:UILayoutPriorityDefaultLow [NSLayoutConstraint autoSetPriority:UILayoutPriorityDefaultLow
forConstraints:^{ forConstraints:^{
@ -892,20 +892,12 @@ NS_ASSUME_NONNULL_BEGIN
{ {
if (@available(iOS 9, *)) { if (@available(iOS 9, *)) {
OWSAssert(self.videoPlayer); OWSAssert(self.videoPlayer);
AVPlayer *player = self.videoPlayer;
[self updateFooterBarButtonItemsWithIsPlayingVideo:YES]; [self updateFooterBarButtonItemsWithIsPlayingVideo:YES];
self.playVideoButton.hidden = YES; self.playVideoButton.hidden = YES;
self.areToolbarsHidden = YES; self.areToolbarsHidden = YES;
OWSAssert(player.currentItem); [self.videoPlayer play];
AVPlayerItem *item = player.currentItem;
if (CMTIME_COMPARE_INLINE(item.currentTime, ==, item.duration)) {
// Rewind for repeated plays
[player seekToTime:kCMTimeZero];
}
[player play];
} else { } else {
[self legacyPlayVideo]; [self legacyPlayVideo];
return; return;
@ -921,7 +913,9 @@ NS_ASSUME_NONNULL_BEGIN
[self.videoPlayer pause]; [self.videoPlayer pause];
} }
- (void)playerItemDidPlayToCompletion:(NSNotification *)notification #pragma mark - OWSVideoPlayer
- (void)videoPlayerDidPlayToCompletion:(OWSVideoPlayer *)videoPlayer
{ {
OWSAssert(self.isVideo); OWSAssert(self.isVideo);
OWSAssert(self.videoPlayer); OWSAssert(self.videoPlayer);
@ -933,6 +927,8 @@ NS_ASSUME_NONNULL_BEGIN
[self updateFooterBarButtonItemsWithIsPlayingVideo:NO]; [self updateFooterBarButtonItemsWithIsPlayingVideo:NO];
} }
#pragma mark - PlayerProgressBarDelegate
- (void)playerProgressBarDidStartScrubbing:(PlayerProgressBar *)playerProgressBar - (void)playerProgressBarDidStartScrubbing:(PlayerProgressBar *)playerProgressBar
{ {
OWSAssert(self.videoPlayer); OWSAssert(self.videoPlayer);

View file

@ -88,8 +88,6 @@ class MessageDetailViewController: OWSViewController, UIScrollViewDelegate, Medi
override func viewWillAppear(_ animated: Bool) { override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated) super.viewWillAppear(animated)
mediaMessageView?.viewWillAppear(animated)
updateTextLayout() updateTextLayout()
if mode == .focusOnMetadata { if mode == .focusOnMetadata {
@ -118,12 +116,6 @@ class MessageDetailViewController: OWSViewController, UIScrollViewDelegate, Medi
} }
} }
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
mediaMessageView?.viewWillDisappear(animated)
}
// MARK: - Create Views // MARK: - Create Views
private func createViews() { private func createViews() {

View file

@ -1,5 +1,5 @@
// //
// Copyright (c) 2017 Open Whisper Systems. All rights reserved. // Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// //
import Foundation import Foundation
@ -7,8 +7,6 @@ import AVFoundation
import SignalServiceKit import SignalServiceKit
import SignalMessaging import SignalMessaging
public let CallAudioServiceSessionChanged = Notification.Name("CallAudioServiceSessionChanged")
struct AudioSource: Hashable { struct AudioSource: Hashable {
let image: UIImage let image: UIImage
@ -88,11 +86,21 @@ struct AudioSource: Hashable {
} }
} }
protocol CallAudioServiceDelegate: class {
func callAudioService(_ callAudioService: CallAudioService, didUpdateIsSpeakerphoneEnabled isEnabled: Bool)
func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService)
}
@objc class CallAudioService: NSObject, CallObserver { @objc class CallAudioService: NSObject, CallObserver {
private var vibrateTimer: Timer? private var vibrateTimer: Timer?
private let audioPlayer = AVAudioPlayer() private let audioPlayer = AVAudioPlayer()
private let handleRinging: Bool private let handleRinging: Bool
weak var delegate: CallAudioServiceDelegate? {
willSet {
assert(newValue == nil || delegate == nil)
}
}
class Sound: NSObject { class Sound: NSObject {
@ -137,8 +145,11 @@ struct AudioSource: Hashable {
// `pulseDuration` is the small pause between the two vibrations in the pair. // `pulseDuration` is the small pause between the two vibrations in the pair.
private let pulseDuration = 0.2 private let pulseDuration = 0.2
var audioSession: CallAudioSession { var audioSession: OWSAudioSession {
return CallAudioSession.shared return OWSAudioSession.shared
}
var avAudioSession: AVAudioSession {
return AVAudioSession.sharedInstance()
} }
// MARK: - Initializers // MARK: - Initializers
@ -151,7 +162,12 @@ struct AudioSource: Hashable {
SwiftSingletons.register(self) SwiftSingletons.register(self)
// Configure audio session so we don't prompt user with Record permission until call is connected. // Configure audio session so we don't prompt user with Record permission until call is connected.
audioSession.configure()
audioSession.configureRTCAudio()
NotificationCenter.default.addObserver(forName: .AVAudioSessionRouteChange, object: avAudioSession, queue: nil) { _ in
assert(!Thread.isMainThread)
self.updateIsSpeakerphoneEnabled()
}
} }
// MARK: - CallObserver // MARK: - CallObserver
@ -177,6 +193,12 @@ struct AudioSource: Hashable {
AssertIsOnMainThread() AssertIsOnMainThread()
ensureProperAudioSession(call: call) ensureProperAudioSession(call: call)
if let audioSource = audioSource, audioSource.isBuiltInSpeaker {
self.isSpeakerphoneEnabled = true
} else {
self.isSpeakerphoneEnabled = false
}
} }
internal func hasLocalVideoDidChange(call: SignalCall, hasLocalVideo: Bool) { internal func hasLocalVideoDidChange(call: SignalCall, hasLocalVideo: Bool) {
@ -185,11 +207,43 @@ struct AudioSource: Hashable {
ensureProperAudioSession(call: call) ensureProperAudioSession(call: call)
} }
// Speakerphone can be manipulated by the in-app callscreen or via the system callscreen (CallKit).
// Unlike other CallKit CallScreen buttons, enabling doesn't trigger a CXAction, so it's not as simple
// to track state changes. Instead we never store the state and directly access the ground-truth in the
// AVAudioSession.
private(set) var isSpeakerphoneEnabled: Bool = false {
didSet {
self.delegate?.callAudioService(self, didUpdateIsSpeakerphoneEnabled: isSpeakerphoneEnabled)
}
}
public func requestSpeakerphone(isEnabled: Bool) {
// This is a little too slow to execute on the main thread and the results are not immediately available after execution
// anyway, so we dispatch async. If you need to know the new value, you'll need to check isSpeakerphoneEnabled and take
// advantage of the CallAudioServiceDelegate.callAudioService(_:didUpdateIsSpeakerphoneEnabled:)
DispatchQueue.global().async {
do {
try self.avAudioSession.overrideOutputAudioPort( isEnabled ? .speaker : .none )
} catch {
owsFail("\(self.logTag) failed to set \(#function) = \(isEnabled) with error: \(error)")
}
}
}
private func updateIsSpeakerphoneEnabled() {
let value = avAudioSession.currentRoute.outputs.contains { (portDescription: AVAudioSessionPortDescription) -> Bool in
return portDescription.portName == AVAudioSessionPortBuiltInSpeaker
}
DispatchQueue.main.async {
self.isSpeakerphoneEnabled = value
}
}
private func ensureProperAudioSession(call: SignalCall?) { private func ensureProperAudioSession(call: SignalCall?) {
AssertIsOnMainThread() AssertIsOnMainThread()
guard let call = call else { guard let call = call else {
setAudioSession(category: AVAudioSessionCategoryPlayback, setAudioSession(category: AVAudioSessionCategorySoloAmbient,
mode: AVAudioSessionModeDefault) mode: AVAudioSessionModeDefault)
return return
} }
@ -206,7 +260,7 @@ struct AudioSource: Hashable {
// SoloAmbient plays through speaker, but respects silent switch // SoloAmbient plays through speaker, but respects silent switch
setAudioSession(category: AVAudioSessionCategorySoloAmbient, setAudioSession(category: AVAudioSessionCategorySoloAmbient,
mode: AVAudioSessionModeDefault) mode: AVAudioSessionModeDefault)
} else if call.state == .connected, call.hasLocalVideo { } else if call.hasLocalVideo {
// Because ModeVideoChat affects gain, we don't want to apply it until the call is connected. // Because ModeVideoChat affects gain, we don't want to apply it until the call is connected.
// otherwise sounds like ringing will be extra loud for video vs. speakerphone // otherwise sounds like ringing will be extra loud for video vs. speakerphone
@ -227,26 +281,15 @@ struct AudioSource: Hashable {
options: options) options: options)
} }
let session = AVAudioSession.sharedInstance()
do { do {
// It's important to set preferred input *after* ensuring properAudioSession // It's important to set preferred input *after* ensuring properAudioSession
// because some sources are only valid for certain category/option combinations. // because some sources are only valid for certain category/option combinations.
let existingPreferredInput = session.preferredInput let existingPreferredInput = avAudioSession.preferredInput
if existingPreferredInput != call.audioSource?.portDescription { if existingPreferredInput != call.audioSource?.portDescription {
Logger.info("\(self.logTag) changing preferred input: \(String(describing: existingPreferredInput)) -> \(String(describing: call.audioSource?.portDescription))") Logger.info("\(self.logTag) changing preferred input: \(String(describing: existingPreferredInput)) -> \(String(describing: call.audioSource?.portDescription))")
try session.setPreferredInput(call.audioSource?.portDescription) try avAudioSession.setPreferredInput(call.audioSource?.portDescription)
} }
if call.isSpeakerphoneEnabled || (call.hasLocalVideo && call.state != .connected) {
// We want consistent ringer-volume between speaker-phone and video chat.
// But because using VideoChat mode has noticeably higher output gain, we treat
// video chat like speakerphone mode until the call is connected.
Logger.verbose("\(self.logTag) enabling speakerphone overrideOutputAudioPort(.speaker)")
try session.overrideOutputAudioPort(.speaker)
} else {
Logger.verbose("\(self.logTag) disabling spearkerphone overrideOutputAudioPort(.none) ")
try session.overrideOutputAudioPort(.none)
}
} catch { } catch {
owsFail("\(self.logTag) failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)") owsFail("\(self.logTag) failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)")
} }
@ -328,6 +371,7 @@ struct AudioSource: Hashable {
AssertIsOnMainThread() AssertIsOnMainThread()
play(sound: Sound.failure) play(sound: Sound.failure)
handleCallEnded(call: call)
} }
private func handleLocalHangup(call: SignalCall) { private func handleLocalHangup(call: SignalCall) {
@ -363,7 +407,8 @@ struct AudioSource: Hashable {
AssertIsOnMainThread() AssertIsOnMainThread()
// Stop solo audio, revert to default. // Stop solo audio, revert to default.
setAudioSession(category: AVAudioSessionCategoryAmbient) isSpeakerphoneEnabled = false
setAudioSession(category: AVAudioSessionCategorySoloAmbient)
} }
// MARK: Playing Sounds // MARK: Playing Sounds
@ -439,9 +484,7 @@ struct AudioSource: Hashable {
// Specifically if you call it while speakerphone is enabled you won't see // Specifically if you call it while speakerphone is enabled you won't see
// any connected bluetooth routes. // any connected bluetooth routes.
var availableInputs: [AudioSource] { var availableInputs: [AudioSource] {
let session = AVAudioSession.sharedInstance() guard let availableInputs = avAudioSession.availableInputs else {
guard let availableInputs = session.availableInputs else {
// I'm not sure why this would happen, but it may indicate an error. // I'm not sure why this would happen, but it may indicate an error.
// In practice, I haven't seen it on iOS9+. // In practice, I haven't seen it on iOS9+.
// //
@ -468,8 +511,7 @@ struct AudioSource: Hashable {
// system state to determine the current audio source. // system state to determine the current audio source.
// If a bluetooth is connected, this will be bluetooth, otherwise // If a bluetooth is connected, this will be bluetooth, otherwise
// this will be the receiver. // this will be the receiver.
let session = AVAudioSession.sharedInstance() guard let portDescription = avAudioSession.currentRoute.inputs.first else {
guard let portDescription = session.currentRoute.inputs.first else {
return nil return nil
} }
@ -482,13 +524,12 @@ struct AudioSource: Hashable {
AssertIsOnMainThread() AssertIsOnMainThread()
let session = AVAudioSession.sharedInstance()
var audioSessionChanged = false var audioSessionChanged = false
do { do {
if #available(iOS 10.0, *), let mode = mode { if #available(iOS 10.0, *), let mode = mode {
let oldCategory = session.category let oldCategory = avAudioSession.category
let oldMode = session.mode let oldMode = avAudioSession.mode
let oldOptions = session.categoryOptions let oldOptions = avAudioSession.categoryOptions
guard oldCategory != category || oldMode != mode || oldOptions != options else { guard oldCategory != category || oldMode != mode || oldOptions != options else {
return return
@ -505,13 +546,13 @@ struct AudioSource: Hashable {
if oldOptions != options { if oldOptions != options {
Logger.debug("\(self.logTag) audio session changed options: \(oldOptions) -> \(options) ") Logger.debug("\(self.logTag) audio session changed options: \(oldOptions) -> \(options) ")
} }
try session.setCategory(category, mode: mode, options: options) try avAudioSession.setCategory(category, mode: mode, options: options)
} else { } else {
let oldCategory = session.category let oldCategory = avAudioSession.category
let oldOptions = session.categoryOptions let oldOptions = avAudioSession.categoryOptions
guard session.category != category || session.categoryOptions != options else { guard avAudioSession.category != category || avAudioSession.categoryOptions != options else {
return return
} }
@ -523,7 +564,7 @@ struct AudioSource: Hashable {
if oldOptions != options { if oldOptions != options {
Logger.debug("\(self.logTag) audio session changed options: \(oldOptions) -> \(options) ") Logger.debug("\(self.logTag) audio session changed options: \(oldOptions) -> \(options) ")
} }
try session.setCategory(category, with: options) try avAudioSession.setCategory(category, with: options)
} }
} catch { } catch {
@ -533,8 +574,7 @@ struct AudioSource: Hashable {
if audioSessionChanged { if audioSessionChanged {
Logger.info("\(self.logTag) in \(#function)") Logger.info("\(self.logTag) in \(#function)")
// Update call view synchronously; already on main thread. self.delegate?.callAudioServiceDidChangeAudioSession(self)
NotificationCenter.default.post(name:CallAudioServiceSessionChanged, object: nil)
} }
} }
} }

View file

@ -1,51 +0,0 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
import Foundation
import WebRTC
/**
* By default WebRTC starts the audio session (PlayAndRecord) immediately upon creating the peer connection
* but we want to create the peer connection and set up all the signaling channels before we prompt the user
* for an incoming call. Without manually handling the session, this would result in the user seeing a recording
* permission requested (and recording banner) before they even know they have an incoming call.
*
* By using the `useManualAudio` and `isAudioEnabled` attributes of the RTCAudioSession we can delay recording until
* it makes sense.
*/
class CallAudioSession {
let TAG = "[CallAudioSession]"
// Force singleton access
static let shared = CallAudioSession()
private init() {}
/**
* The private class that manages AVAudioSession for WebRTC
*/
private let rtcAudioSession = RTCAudioSession.sharedInstance()
/**
* This must be called before any audio tracks are added to the peerConnection, else we'll start recording before all
* our signaling is set up.
*/
func configure() {
Logger.info("\(TAG) in \(#function)")
rtcAudioSession.useManualAudio = true
}
/**
* Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves.
* See header for details on manual audio.
*/
var isRTCAudioEnabled: Bool {
get {
return rtcAudioSession.isAudioEnabled
}
set {
rtcAudioSession.isAudioEnabled = newValue
}
}
}

View file

@ -1024,6 +1024,9 @@ protocol CallServiceObserver: class {
// this.messageSender.cancelInFlightRequests(); // this.messageSender.cancelInFlightRequests();
if let peerConnectionClient = self.peerConnectionClient { if let peerConnectionClient = self.peerConnectionClient {
// Stop audio capture ASAP
ensureAudioState(call: call, peerConnectionClient: peerConnectionClient)
// If the call is connected, we can send the hangup via the data channel for faster hangup. // If the call is connected, we can send the hangup via the data channel for faster hangup.
let message = DataChannelMessage.forHangup(callId: call.signalingId) let message = DataChannelMessage.forHangup(callId: call.signalingId)
peerConnectionClient.sendDataChannelMessage(data: message.asData(), description: "hangup", isCritical: true) peerConnectionClient.sendDataChannelMessage(data: message.asData(), description: "hangup", isCritical: true)

View file

@ -1,5 +1,5 @@
// //
// Copyright (c) 2017 Open Whisper Systems. All rights reserved. // Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// //
import Foundation import Foundation
@ -89,7 +89,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
return return
} }
CallAudioSession.shared.isRTCAudioEnabled = true OWSAudioSession.shared.isRTCAudioEnabled = true
self.callService.handleAnswerCall(call) self.callService.handleAnswerCall(call)
} }
@ -123,7 +123,7 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
func recipientAcceptedCall(_ call: SignalCall) { func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread() AssertIsOnMainThread()
CallAudioSession.shared.isRTCAudioEnabled = true OWSAudioSession.shared.isRTCAudioEnabled = true
} }
func localHangupCall(_ call: SignalCall) { func localHangupCall(_ call: SignalCall) {

View file

@ -1,5 +1,5 @@
// //
// Copyright (c) 2017 Open Whisper Systems. All rights reserved. // Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// //
import Foundation import Foundation
@ -345,14 +345,14 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
Logger.debug("\(TAG) Received \(#function)") Logger.debug("\(TAG) Received \(#function)")
CallAudioSession.shared.isRTCAudioEnabled = true OWSAudioSession.shared.isRTCAudioEnabled = true
} }
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) { func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
AssertIsOnMainThread() AssertIsOnMainThread()
Logger.debug("\(TAG) Received \(#function)") Logger.debug("\(TAG) Received \(#function)")
CallAudioSession.shared.isRTCAudioEnabled = false OWSAudioSession.shared.isRTCAudioEnabled = false
} }
// MARK: - Util // MARK: - Util

View file

@ -13,7 +13,7 @@ public protocol AttachmentApprovalViewControllerDelegate: class {
} }
@objc @objc
public class AttachmentApprovalViewController: OWSViewController, CaptioningToolbarDelegate, PlayerProgressBarDelegate { public class AttachmentApprovalViewController: OWSViewController, CaptioningToolbarDelegate, PlayerProgressBarDelegate, OWSVideoPlayerDelegate {
let TAG = "[AttachmentApprovalViewController]" let TAG = "[AttachmentApprovalViewController]"
weak var delegate: AttachmentApprovalViewControllerDelegate? weak var delegate: AttachmentApprovalViewControllerDelegate?
@ -27,7 +27,7 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool
// MARK: Properties // MARK: Properties
let attachment: SignalAttachment let attachment: SignalAttachment
private var videoPlayer: AVPlayer? private var videoPlayer: OWSVideoPlayer?
private(set) var bottomToolbar: UIView! private(set) var bottomToolbar: UIView!
private(set) var mediaMessageView: MediaMessageView! private(set) var mediaMessageView: MediaMessageView!
@ -79,8 +79,6 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool
super.viewWillAppear(animated) super.viewWillAppear(animated)
CurrentAppContext().setStatusBarHidden(true, animated: animated) CurrentAppContext().setStatusBarHidden(true, animated: animated)
mediaMessageView.viewWillAppear(animated)
} }
override public func viewDidAppear(_ animated: Bool) { override public func viewDidAppear(_ animated: Bool) {
@ -92,8 +90,6 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool
Logger.debug("\(logTag) in \(#function)") Logger.debug("\(logTag) in \(#function)")
super.viewWillDisappear(animated) super.viewWillDisappear(animated)
mediaMessageView.viewWillDisappear(animated)
// Since this VC is being dismissed, the "show status bar" animation would feel like // Since this VC is being dismissed, the "show status bar" animation would feel like
// it's occuring on the presenting view controller - it's better not to animate at all. // it's occuring on the presenting view controller - it's better not to animate at all.
CurrentAppContext().setStatusBarHidden(false, animated: false) CurrentAppContext().setStatusBarHidden(false, animated: false)
@ -182,16 +178,12 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool
return return
} }
let player = AVPlayer(url: videoURL) let player = OWSVideoPlayer(url: videoURL)
self.videoPlayer = player self.videoPlayer = player
player.delegate = self
NotificationCenter.default.addObserver(self,
selector: #selector(playerItemDidPlayToCompletion(_:)),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: player.currentItem)
let playerView = VideoPlayerView() let playerView = VideoPlayerView()
playerView.player = player playerView.player = player.avPlayer
self.mediaMessageView.addSubview(playerView) self.mediaMessageView.addSubview(playerView)
playerView.autoPinEdgesToSuperviewEdges() playerView.autoPinEdgesToSuperviewEdges()
@ -199,7 +191,7 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool
playerView.addGestureRecognizer(pauseGesture) playerView.addGestureRecognizer(pauseGesture)
let progressBar = PlayerProgressBar() let progressBar = PlayerProgressBar()
progressBar.player = player progressBar.player = player.avPlayer
progressBar.delegate = self progressBar.delegate = self
// we don't want the progress bar to zoom during "pinch-to-zoom" // we don't want the progress bar to zoom during "pinch-to-zoom"
@ -300,17 +292,6 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool
UIView.animate(withDuration: 0.1) { UIView.animate(withDuration: 0.1) {
playVideoButton.alpha = 0.0 playVideoButton.alpha = 0.0
} }
guard let item = videoPlayer.currentItem else {
owsFail("\(TAG) video player item was unexpectedly nil")
return
}
if item.currentTime() == item.duration {
// Rewind for repeated plays, but only if it previously played to end.
videoPlayer.seek(to: kCMTimeZero)
}
videoPlayer.play() videoPlayer.play()
} else { } else {
self.playLegacyVideo() self.playLegacyVideo()
@ -353,11 +334,12 @@ public class AttachmentApprovalViewController: OWSViewController, CaptioningTool
} }
@objc @objc
private func playerItemDidPlayToCompletion(_ notification: Notification) { public func videoPlayerDidPlayToCompletion(_ videoPlayer: OWSVideoPlayer) {
guard let playVideoButton = self.playVideoButton else { guard let playVideoButton = self.playVideoButton else {
owsFail("\(TAG) playVideoButton was unexpectedly nil") owsFail("\(TAG) playVideoButton was unexpectedly nil")
return return
} }
UIView.animate(withDuration: 0.1) { UIView.animate(withDuration: 0.1) {
playVideoButton.alpha = 1.0 playVideoButton.alpha = 1.0
} }

View file

@ -87,18 +87,6 @@ public class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate {
NotificationCenter.default.removeObserver(self) NotificationCenter.default.removeObserver(self)
} }
// MARK: View Lifecycle
@objc
public func viewWillAppear(_ animated: Bool) {
OWSAudioAttachmentPlayer.setAudioIgnoresHardwareMuteSwitch(true)
}
@objc
public func viewWillDisappear(_ animated: Bool) {
OWSAudioAttachmentPlayer.setAudioIgnoresHardwareMuteSwitch(false)
}
// MARK: - Create Views // MARK: - Create Views
private func createViews() { private func createViews() {

View file

@ -1,5 +1,5 @@
// //
// Copyright (c) 2017 Open Whisper Systems. All rights reserved. // Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// //
NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_BEGIN
@ -25,8 +25,6 @@ typedef NS_ENUM(NSInteger, AudioPlaybackState) {
@interface OWSAudioAttachmentPlayer : NSObject @interface OWSAudioAttachmentPlayer : NSObject
+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore;
@property (nonatomic, readonly, weak) id<OWSAudioAttachmentPlayerDelegate> delegate; @property (nonatomic, readonly, weak) id<OWSAudioAttachmentPlayerDelegate> delegate;
// This property can be used to associate instances of the player with view // This property can be used to associate instances of the player with view

View file

@ -22,17 +22,6 @@ NS_ASSUME_NONNULL_BEGIN
@implementation OWSAudioAttachmentPlayer @implementation OWSAudioAttachmentPlayer
+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore
{
NSError *error = nil;
BOOL success = [[AVAudioSession sharedInstance]
setCategory:(shouldIgnore ? AVAudioSessionCategoryPlayback : AVAudioSessionCategoryPlayAndRecord)error:&error];
OWSAssert(!error);
if (!success || error) {
DDLogError(@"%@ Error in setAudioIgnoresHardwareMuteSwitch: %d", self.logTag, shouldIgnore);
}
}
- (instancetype)initWithMediaUrl:(NSURL *)mediaUrl delegate:(id<OWSAudioAttachmentPlayerDelegate>)delegate - (instancetype)initWithMediaUrl:(NSURL *)mediaUrl delegate:(id<OWSAudioAttachmentPlayerDelegate>)delegate
{ {
self = [super init]; self = [super init];
@ -76,7 +65,7 @@ NS_ASSUME_NONNULL_BEGIN
OWSAssert(self.mediaUrl); OWSAssert(self.mediaUrl);
OWSAssert([self.delegate audioPlaybackState] != AudioPlaybackState_Playing); OWSAssert([self.delegate audioPlaybackState] != AudioPlaybackState_Playing);
[[self class] setAudioIgnoresHardwareMuteSwitch:YES]; [OWSAudioSession.shared setPlaybackCategory];
[self.audioPlayerPoller invalidate]; [self.audioPlayerPoller invalidate];
@ -101,7 +90,6 @@ NS_ASSUME_NONNULL_BEGIN
self.audioPlayer.delegate = self; self.audioPlayer.delegate = self;
} }
[self.audioPlayer prepareToPlay];
[self.audioPlayer play]; [self.audioPlayer play];
[self.audioPlayerPoller invalidate]; [self.audioPlayerPoller invalidate];
self.audioPlayerPoller = [NSTimer weakScheduledTimerWithTimeInterval:.05f self.audioPlayerPoller = [NSTimer weakScheduledTimerWithTimeInterval:.05f
@ -123,6 +111,7 @@ NS_ASSUME_NONNULL_BEGIN
[self.audioPlayerPoller invalidate]; [self.audioPlayerPoller invalidate];
[self.delegate setAudioProgress:[self.audioPlayer currentTime] duration:[self.audioPlayer duration]]; [self.delegate setAudioProgress:[self.audioPlayer currentTime] duration:[self.audioPlayer duration]];
[OWSAudioSession.shared endAudioActivity];
[DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self]; [DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self];
} }
@ -135,6 +124,7 @@ NS_ASSUME_NONNULL_BEGIN
[self.audioPlayerPoller invalidate]; [self.audioPlayerPoller invalidate];
[self.delegate setAudioProgress:0 duration:0]; [self.delegate setAudioProgress:0 duration:0];
[OWSAudioSession.shared endAudioActivity];
[DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self]; [DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self];
} }

View file

@ -0,0 +1,72 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import Foundation
import AVFoundation
@objc
protocol OWSVideoPlayerDelegate: class {
@available(iOSApplicationExtension 9.0, *)
func videoPlayerDidPlayToCompletion(_ videoPlayer: OWSVideoPlayer)
}
@objc
public class OWSVideoPlayer: NSObject {
let avPlayer: AVPlayer
weak var delegate: OWSVideoPlayerDelegate?
@available(iOS 9.0, *)
init(url: URL) {
self.avPlayer = AVPlayer(url: url)
super.init()
NotificationCenter.default.addObserver(self,
selector: #selector(playerItemDidPlayToCompletion(_:)),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: avPlayer.currentItem)
}
// MARK: Playback Controls
@available(iOS 9.0, *)
public func pause() {
avPlayer.pause()
OWSAudioSession.shared.endAudioActivity()
}
@available(iOS 9.0, *)
public func play() {
OWSAudioSession.shared.setPlaybackCategory()
guard let item = avPlayer.currentItem else {
owsFail("\(logTag) video player item was unexpectedly nil")
return
}
if item.currentTime() == item.duration {
// Rewind for repeated plays, but only if it previously played to end.
avPlayer.seek(to: kCMTimeZero)
}
avPlayer.play()
}
@available(iOS 9.0, *)
@objc(seekToTime:)
public func seek(to time: CMTime) {
avPlayer.seek(to: time)
}
// MARK: private
@objc
@available(iOS 9.0, *)
private func playerItemDidPlayToCompletion(_ notification: Notification) {
self.delegate?.videoPlayerDidPlayToCompletion(self)
OWSAudioSession.shared.endAudioActivity()
}
}

View file

@ -18,8 +18,6 @@ NS_ASSUME_NONNULL_BEGIN
replacementString:(NSString *)insertionText replacementString:(NSString *)insertionText
countryCode:(NSString *)countryCode; countryCode:(NSString *)countryCode;
+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore;
+ (NSString *)examplePhoneNumberForCountryCode:(NSString *)countryCode callingCode:(NSString *)callingCode; + (NSString *)examplePhoneNumberForCountryCode:(NSString *)countryCode callingCode:(NSString *)callingCode;
@end @end

View file

@ -74,17 +74,6 @@ NS_ASSUME_NONNULL_BEGIN
[textField setSelectedTextRange:[textField textRangeFromPosition:pos toPosition:pos]]; [textField setSelectedTextRange:[textField textRangeFromPosition:pos toPosition:pos]];
} }
+ (void)setAudioIgnoresHardwareMuteSwitch:(BOOL)shouldIgnore
{
NSError *error = nil;
BOOL success = [[AVAudioSession sharedInstance]
setCategory:(shouldIgnore ? AVAudioSessionCategoryPlayback : AVAudioSessionCategoryPlayAndRecord)error:&error];
OWSAssert(!error);
if (!success || error) {
DDLogError(@"%@ Error in setAudioIgnoresHardwareMuteSwitch: %d", self.logTag, shouldIgnore);
}
}
+ (NSString *)examplePhoneNumberForCountryCode:(NSString *)countryCode callingCode:(NSString *)callingCode + (NSString *)examplePhoneNumberForCountryCode:(NSString *)countryCode callingCode:(NSString *)callingCode
{ {
OWSAssert(countryCode.length > 0); OWSAssert(countryCode.length > 0);

View file

@ -0,0 +1,101 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import Foundation
import WebRTC
@objc
public class OWSAudioSession: NSObject {
// Force singleton access
public static let shared = OWSAudioSession()
private override init() {}
private let avAudioSession = AVAudioSession.sharedInstance()
// Ignores hardware mute switch, plays through external speaker
public func setPlaybackCategory() {
Logger.debug("\(logTag) in \(#function)")
// In general, we should have put the audio session back to it's default
// category when we were done with whatever activity required it to be modified
assert(avAudioSession.category == AVAudioSessionCategorySoloAmbient)
do {
try avAudioSession.setCategory(AVAudioSessionCategoryPlayback)
} catch {
owsFail("\(logTag) in \(#function) failed with error: \(error)")
}
}
public func setRecordCategory() -> Bool {
Logger.debug("\(logTag) in \(#function)")
// In general, we should have put the audio session back to it's default
// category when we were done with whatever activity required it to be modified
assert(avAudioSession.category == AVAudioSessionCategorySoloAmbient)
assert(avAudioSession.recordPermission() == .granted)
do {
try avAudioSession.setCategory(AVAudioSessionCategoryRecord)
return true
} catch {
owsFail("\(logTag) in \(#function) failed with error: \(error)")
return false
}
}
public func endAudioActivity() {
Logger.debug("\(logTag) in \(#function)")
do {
try avAudioSession.setCategory(AVAudioSessionCategorySoloAmbient)
// When playing audio in Signal, other apps audio (e.g. Music) is paused.
// By notifying when we deactivate, the other app can resume playback.
try avAudioSession.setActive(false, with: [.notifyOthersOnDeactivation])
} catch {
owsFail("\(logTag) in \(#function) failed with error: \(error)")
}
}
// MARK: - WebRTC Audio
/**
* By default WebRTC starts the audio session (PlayAndRecord) immediately upon creating the peer connection
* but we want to create the peer connection and set up all the signaling channels before we prompt the user
* for an incoming call. Without manually handling the session, this would result in the user seeing a recording
* permission requested (and recording banner) before they even know they have an incoming call.
*
* By using the `useManualAudio` and `isAudioEnabled` attributes of the RTCAudioSession we can delay recording until
* it makes sense.
*/
/**
* The private class that manages AVAudioSession for WebRTC
*/
private let rtcAudioSession = RTCAudioSession.sharedInstance()
/**
* This must be called before any audio tracks are added to the peerConnection, else we'll start recording before all
* our signaling is set up.
*/
public func configureRTCAudio() {
Logger.info("\(logTag) in \(#function)")
rtcAudioSession.useManualAudio = true
}
/**
* Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves.
* See header for details on manual audio.
*/
public var isRTCAudioEnabled: Bool {
get {
return rtcAudioSession.isAudioEnabled
}
set {
rtcAudioSession.isAudioEnabled = newValue
}
}
}