Merge branch 'mkirk/call-inaudible'

This commit is contained in:
Michael Kirk 2018-02-16 14:00:24 -08:00
commit 8fde4a3a6f
11 changed files with 107 additions and 24 deletions

View file

@ -248,6 +248,7 @@
4523149E1F7E916B003A428C /* SlideOffAnimatedTransition.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4523149D1F7E916B003A428C /* SlideOffAnimatedTransition.swift */; };
452314A01F7E9E18003A428C /* DirectionalPanGestureRecognizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4523149F1F7E9E18003A428C /* DirectionalPanGestureRecognizer.swift */; };
452C468F1E427E200087B011 /* OutboundCallInitiator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 452C468E1E427E200087B011 /* OutboundCallInitiator.swift */; };
452C7CA72037628B003D51A5 /* Weak.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170D51E315310003FC1F2 /* Weak.swift */; };
452D1EE81DCA90D100A57EC4 /* MesssagesBubblesSizeCalculatorTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 452D1EE71DCA90D100A57EC4 /* MesssagesBubblesSizeCalculatorTest.swift */; };
452EA09E1EA7ABE00078744B /* AttachmentPointerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 452EA09D1EA7ABE00078744B /* AttachmentPointerView.swift */; };
452ECA4D1E087E7200E2F016 /* MessageFetcherJob.swift in Sources */ = {isa = PBXBuildFile; fileRef = 452ECA4C1E087E7200E2F016 /* MessageFetcherJob.swift */; };
@ -308,7 +309,6 @@
45E5A6991F61E6DE001E4A8A /* MarqueeLabel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E5A6981F61E6DD001E4A8A /* MarqueeLabel.swift */; };
45E7A6A81E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45E7A6A61E71CA7E00D44FB5 /* DisplayableTextFilterTest.swift */; };
45F170BB1E2FC5D3003FC1F2 /* CallAudioService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170BA1E2FC5D3003FC1F2 /* CallAudioService.swift */; };
45F170D61E315310003FC1F2 /* Weak.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170D51E315310003FC1F2 /* Weak.swift */; };
45F59A082028E4FB00E8D2B0 /* OWSAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F170AB1E2F0351003FC1F2 /* OWSAudioSession.swift */; };
45F59A0A2029140500E8D2B0 /* OWSVideoPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F59A092029140500E8D2B0 /* OWSVideoPlayer.swift */; };
45F659731E1BD99C00444429 /* CallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = 45F659721E1BD99C00444429 /* CallKitCallUIAdaptee.swift */; };
@ -1138,6 +1138,7 @@
346129BE1FD2068600532771 /* ThreadUtil.m */,
B97940251832BD2400BD66CB /* UIUtil.h */,
B97940261832BD2400BD66CB /* UIUtil.m */,
45F170D51E315310003FC1F2 /* Weak.swift */,
346129751FD1E0B500532771 /* WeakTimer.swift */,
);
path = utils;
@ -1756,7 +1757,6 @@
FCFA64B11A24F29E0007FB87 /* UI Categories */,
FCC81A961A44558300DFEC7D /* UIDevice+TSHardwareVersion.h */,
FCC81A971A44558300DFEC7D /* UIDevice+TSHardwareVersion.m */,
45F170D51E315310003FC1F2 /* Weak.swift */,
);
path = util;
sourceTree = "<group>";
@ -2803,6 +2803,7 @@
3478506B1FD9B78A007B8332 /* NoopCallMessageHandler.swift in Sources */,
451F8A3D1FD713CA005CB9DA /* ThreadViewHelper.m in Sources */,
346129AD1FD1F34E00532771 /* ImageCache.swift in Sources */,
452C7CA72037628B003D51A5 /* Weak.swift in Sources */,
451F8A341FD710C3005CB9DA /* ConversationSearcher.swift in Sources */,
346129341FD1A88700532771 /* OWSSwiftUtils.swift in Sources */,
346129FE1FD5F31400532771 /* OWS106EnsureProfileComplete.swift in Sources */,
@ -2977,7 +2978,6 @@
4574A5D61DD6704700C6B692 /* CallService.swift in Sources */,
34B3F8721E8DF1700035BE1A /* AdvancedSettingsTableViewController.m in Sources */,
3461299C1FD1EA9E00532771 /* NotificationsManager.m in Sources */,
45F170D61E315310003FC1F2 /* Weak.swift in Sources */,
4521C3C01F59F3BA00B4C582 /* TextFieldHelper.swift in Sources */,
34B3F8891E8DF1700035BE1A /* OWSConversationSettingsViewController.m in Sources */,
34B3F87E1E8DF1700035BE1A /* InboxTableViewCell.m in Sources */,

View file

@ -151,6 +151,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
@property (nonatomic) TSThread *thread;
@property (nonatomic) YapDatabaseConnection *editingDatabaseConnection;
@property (nonatomic, readonly) AudioActivity *voiceNoteAudioActivity;
// These two properties must be updated in lockstep.
//
@ -277,6 +278,8 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
_networkManager = [TSNetworkManager sharedManager];
_blockingManager = [OWSBlockingManager sharedManager];
_contactsViewHelper = [[ContactsViewHelper alloc] initWithDelegate:self];
NSString *audioActivityDescription = [NSString stringWithFormat:@"%@ voice note", self.logTag];
_voiceNoteAudioActivity = [[AudioActivity alloc] initWithAudioDescription:audioActivityDescription];
}
- (void)addNotificationListeners
@ -3173,7 +3176,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
NSURL *fileURL = [NSURL fileURLWithPath:filepath];
// Setup audio session
BOOL configuredAudio = [OWSAudioSession.shared setRecordCategory];
BOOL configuredAudio = [OWSAudioSession.shared setRecordCategoryWithAudioActivity:self.voiceNoteAudioActivity];
if (!configuredAudio) {
OWSFail(@"%@ Couldn't configure audio session", self.logTag);
[self cancelVoiceMemo];
@ -3278,7 +3281,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
- (void)stopRecording
{
[self.audioRecorder stop];
[OWSAudioSession.shared endAudioActivity];
[OWSAudioSession.shared endAudioActivity:self.voiceNoteAudioActivity];
}
- (void)cancelRecordingVoiceMemo

View file

@ -1500,7 +1500,9 @@ protocol CallServiceObserver: class {
self.peerConnectionClient = nil
self.call?.removeAllObservers()
self.callUIAdapter.didTerminateCall(self.call)
self.call = nil
self.sendIceUpdatesImmediately = true
Logger.info("\(self.logTag) clearing pendingIceUpdateMessages")
self.pendingIceUpdateMessages = []

View file

@ -35,6 +35,9 @@ class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
let call = SignalCall.outgoingCall(localId: UUID(), remotePhoneNumber: handle)
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
self.callService.handleOutgoingCall(call).then {
Logger.debug("\(self.TAG) handleOutgoingCall succeeded")
}.catch { error in

View file

@ -1,5 +1,5 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import Foundation
@ -106,6 +106,8 @@ protocol CallObserver: class {
}
}
let audioActivity: AudioActivity
var audioSource: AudioSource? = nil {
didSet {
AssertIsOnMainThread()
@ -148,8 +150,8 @@ protocol CallObserver: class {
self.signalingId = signalingId
self.state = state
self.remotePhoneNumber = remotePhoneNumber
self.thread = TSContactThread.getOrCreateThread(contactId: remotePhoneNumber)
self.audioActivity = AudioActivity(audioDescription: "[SignalCall] with \(remotePhoneNumber)")
}
// A string containing the three identifiers for this call.

View file

@ -76,6 +76,9 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
let call = SignalCall.outgoingCall(localId: UUID(), remotePhoneNumber: handle)
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
// Add the new outgoing call to the app's list of calls.
// So we can find it in the provider delegate callbacks.
callManager.addCall(call)

View file

@ -1,5 +1,5 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import Foundation
@ -111,6 +111,9 @@ extension CallUIAdaptee {
internal func reportIncomingCall(_ call: SignalCall, thread: TSContactThread) {
AssertIsOnMainThread()
// make sure we don't terminate audio session during call
OWSAudioSession.shared.startAudioActivity(call.audioActivity)
let callerName = self.contactsManager.displayName(forPhoneIdentifier: call.remotePhoneNumber)
adaptee.reportIncomingCall(call, callerName: callerName)
}
@ -153,6 +156,14 @@ extension CallUIAdaptee {
adaptee.declineCall(call)
}
internal func didTerminateCall(_ call: SignalCall?) {
AssertIsOnMainThread()
if let call = call {
OWSAudioSession.shared.endAudioActivity(call.audioActivity)
}
}
internal func startAndShowOutgoingCall(recipientId: String) {
AssertIsOnMainThread()

View file

@ -15,6 +15,7 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic, readonly) NSURL *mediaUrl;
@property (nonatomic, nullable) AVAudioPlayer *audioPlayer;
@property (nonatomic, nullable) NSTimer *audioPlayerPoller;
@property (nonatomic, readonly) AudioActivity *audioActivity;
@end
@ -35,6 +36,9 @@ NS_ASSUME_NONNULL_BEGIN
_delegate = delegate;
_mediaUrl = mediaUrl;
NSString *audioActivityDescription = [NSString stringWithFormat:@"%@ %@", self.logTag, self.mediaUrl];
_audioActivity = [[AudioActivity alloc] initWithAudioDescription:audioActivityDescription];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(applicationDidEnterBackground:)
name:OWSApplicationDidEnterBackgroundNotification
@ -65,7 +69,7 @@ NS_ASSUME_NONNULL_BEGIN
OWSAssert(self.mediaUrl);
OWSAssert([self.delegate audioPlaybackState] != AudioPlaybackState_Playing);
[OWSAudioSession.shared setPlaybackCategory];
[OWSAudioSession.shared setPlaybackCategoryWithAudioActivity:self.audioActivity];
[self.audioPlayerPoller invalidate];
@ -111,7 +115,7 @@ NS_ASSUME_NONNULL_BEGIN
[self.audioPlayerPoller invalidate];
[self.delegate setAudioProgress:[self.audioPlayer currentTime] duration:[self.audioPlayer duration]];
[OWSAudioSession.shared endAudioActivity];
[OWSAudioSession.shared endAudioActivity:self.audioActivity];
[DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self];
}
@ -124,7 +128,7 @@ NS_ASSUME_NONNULL_BEGIN
[self.audioPlayerPoller invalidate];
[self.delegate setAudioProgress:0 duration:0];
[OWSAudioSession.shared endAudioActivity];
[OWSAudioSession.shared endAudioActivity:self.audioActivity];
[DeviceSleepManager.sharedInstance removeBlockWithBlockObject:self];
}

View file

@ -15,12 +15,14 @@ protocol OWSVideoPlayerDelegate: class {
public class OWSVideoPlayer: NSObject {
let avPlayer: AVPlayer
let audioActivity: AudioActivity
weak var delegate: OWSVideoPlayerDelegate?
@available(iOS 9.0, *)
init(url: URL) {
self.avPlayer = AVPlayer(url: url)
self.audioActivity = AudioActivity(audioDescription: "[OWSVideoPlayer] url:\(url)")
super.init()
@ -35,12 +37,12 @@ public class OWSVideoPlayer: NSObject {
@available(iOS 9.0, *)
public func pause() {
avPlayer.pause()
OWSAudioSession.shared.endAudioActivity()
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
}
@available(iOS 9.0, *)
public func play() {
OWSAudioSession.shared.setPlaybackCategory()
OWSAudioSession.shared.setPlaybackCategory(audioActivity: self.audioActivity)
guard let item = avPlayer.currentItem else {
owsFail("\(logTag) video player item was unexpectedly nil")
@ -67,6 +69,6 @@ public class OWSVideoPlayer: NSObject {
@available(iOS 9.0, *)
private func playerItemDidPlayToCompletion(_ notification: Notification) {
self.delegate?.videoPlayerDidPlayToCompletion(self)
OWSAudioSession.shared.endAudioActivity()
OWSAudioSession.shared.endAudioActivity(self.audioActivity)
}
}

View file

@ -5,6 +5,24 @@
import Foundation
import WebRTC
@objc
public class AudioActivity: NSObject {
let audioDescription: String
override public var description: String {
return "<\(self.logTag) audioDescription: \"\(audioDescription)\">"
}
public
init(audioDescription: String) {
self.audioDescription = audioDescription
}
deinit {
OWSAudioSession.shared.ensureAudioSessionActivationState()
}
}
@objc
public class OWSAudioSession: NSObject {
@ -13,14 +31,18 @@ public class OWSAudioSession: NSObject {
private override init() {}
private let avAudioSession = AVAudioSession.sharedInstance()
private var currentActivities: [Weak<AudioActivity>] = []
// Ignores hardware mute switch, plays through external speaker
public func setPlaybackCategory() {
public func setPlaybackCategory(audioActivity: AudioActivity) {
Logger.debug("\(logTag) in \(#function)")
// In general, we should have put the audio session back to it's default
// category when we were done with whatever activity required it to be modified
assert(avAudioSession.category == AVAudioSessionCategorySoloAmbient)
startAudioActivity(audioActivity)
do {
try avAudioSession.setCategory(AVAudioSessionCategoryPlayback)
} catch {
@ -28,7 +50,7 @@ public class OWSAudioSession: NSObject {
}
}
public func setRecordCategory() -> Bool {
public func setRecordCategory(audioActivity: AudioActivity) -> Bool {
Logger.debug("\(logTag) in \(#function)")
// In general, we should have put the audio session back to it's default
@ -37,6 +59,8 @@ public class OWSAudioSession: NSObject {
assert(avAudioSession.recordPermission() == .granted)
startAudioActivity(audioActivity)
do {
try avAudioSession.setCategory(AVAudioSessionCategoryRecord)
return true
@ -46,8 +70,38 @@ public class OWSAudioSession: NSObject {
}
}
public func endAudioActivity() {
Logger.debug("\(logTag) in \(#function)")
public func startAudioActivity(_ audioActivity: AudioActivity) {
Logger.debug("\(logTag) in \(#function) with \(audioActivity)")
self.currentActivities.append(Weak(value: audioActivity))
}
public func endAudioActivity(_ audioActivity: AudioActivity) {
Logger.debug("\(logTag) in \(#function) with audioActivity: \(audioActivity)")
currentActivities = currentActivities.filter { return $0.value != audioActivity }
ensureAudioSessionActivationState()
}
fileprivate func ensureAudioSessionActivationState() {
// Cull any stale activities
currentActivities = currentActivities.flatMap { oldActivity in
guard oldActivity.value != nil else {
// Normally we should be explicitly stopping an audio activity, but this allows
// for recovery if the owner of the AudioAcivity was GC'd without ending it's
// audio activity
Logger.warn("\(logTag) an old activity has been gc'd")
return nil
}
// return any still-active activities
return oldActivity
}
guard currentActivities.count == 0 else {
Logger.debug("\(logTag) not deactivating due to currentActivities: \(currentActivities)")
return
}
do {
try avAudioSession.setCategory(AVAudioSessionCategorySoloAmbient)

View file

@ -1,20 +1,19 @@
//
// Copyright © 2017 Open Whisper Systems. All rights reserved.
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
/**
* Container for a weakly referenced object.
*
* Only use this for |T| with reference-semantic entities
* e.g. inheriting from AnyObject or Class-only protocols, but not structs or enums.
*
* That is - <T> should inherit from AnyObject or Class-only protocols, but not structs or enums.
*
* Based on https://devforums.apple.com/message/981472#981472, but also supports class-only protocols
*/
struct Weak<T> {
public struct Weak<T> {
private weak var _value: AnyObject?
var value: T? {
public var value: T? {
get {
return _value as? T
}
@ -23,7 +22,7 @@ struct Weak<T> {
}
}
init(value: T) {
public init(value: T) {
self.value = value
}
}