session-ios/Signal/src/call/CallAudioService.swift

571 lines
22 KiB
Swift
Raw Normal View History

//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import Foundation
import AVFoundation
import SignalServiceKit
import SignalMessaging
struct AudioSource: Hashable {
let image: UIImage
let localizedName: String
let portDescription: AVAudioSessionPortDescription?
// The built-in loud speaker / aka speakerphone
let isBuiltInSpeaker: Bool
// The built-in quiet speaker, aka the normal phone handset receiver earpiece
let isBuiltInEarPiece: Bool
init(localizedName: String, image: UIImage, isBuiltInSpeaker: Bool, isBuiltInEarPiece: Bool, portDescription: AVAudioSessionPortDescription? = nil) {
self.localizedName = localizedName
self.image = image
self.isBuiltInSpeaker = isBuiltInSpeaker
self.isBuiltInEarPiece = isBuiltInEarPiece
self.portDescription = portDescription
}
init(portDescription: AVAudioSessionPortDescription) {
2019-03-30 14:22:31 +01:00
let isBuiltInEarPiece = convertFromAVAudioSessionPort(portDescription.portType) == convertFromAVAudioSessionPort(AVAudioSession.Port.builtInMic)
// portDescription.portName works well for BT linked devices, but if we are using
// the built in mic, we have "iPhone Microphone" which is a little awkward.
// In that case, instead we prefer just the model name e.g. "iPhone" or "iPad"
let localizedName = isBuiltInEarPiece ? UIDevice.current.localizedModel : portDescription.portName
self.init(localizedName: localizedName,
2018-02-22 03:37:32 +01:00
image: #imageLiteral(resourceName: "button_phone_white"), // TODO
isBuiltInSpeaker: false,
isBuiltInEarPiece: isBuiltInEarPiece,
portDescription: portDescription)
}
// Speakerphone is handled separately from the other audio routes as it doesn't appear as an "input"
static var builtInSpeaker: AudioSource {
return self.init(localizedName: NSLocalizedString("AUDIO_ROUTE_BUILT_IN_SPEAKER", comment: "action sheet button title to enable built in speaker during a call"),
image: #imageLiteral(resourceName: "button_phone_white"), //TODO
isBuiltInSpeaker: true,
isBuiltInEarPiece: false)
}
// MARK: Hashable
static func ==(lhs: AudioSource, rhs: AudioSource) -> Bool {
// Simply comparing the `portDescription` vs the `portDescription.uid`
// caused multiple instances of the built in mic to turn up in a set.
if lhs.isBuiltInSpeaker && rhs.isBuiltInSpeaker {
return true
}
if lhs.isBuiltInSpeaker || rhs.isBuiltInSpeaker {
return false
}
guard let lhsPortDescription = lhs.portDescription else {
2018-08-27 16:27:48 +02:00
owsFailDebug("only the built in speaker should lack a port description")
return false
}
guard let rhsPortDescription = rhs.portDescription else {
2018-08-27 16:27:48 +02:00
owsFailDebug("only the built in speaker should lack a port description")
return false
}
return lhsPortDescription.uid == rhsPortDescription.uid
}
var hashValue: Int {
guard let portDescription = self.portDescription else {
assert(self.isBuiltInSpeaker)
return "Built In Speaker".hashValue
}
return portDescription.uid.hash
}
}
protocol CallAudioServiceDelegate: class {
func callAudioService(_ callAudioService: CallAudioService, didUpdateIsSpeakerphoneEnabled isEnabled: Bool)
func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService)
}
@objc class CallAudioService: NSObject, CallObserver {
private var vibrateTimer: Timer?
private let audioPlayer = AVAudioPlayer()
private let handleRinging: Bool
weak var delegate: CallAudioServiceDelegate? {
willSet {
assert(newValue == nil || delegate == nil)
}
}
// MARK: Vibration config
private let vibrateRepeatDuration = 1.6
// Our ring buzz is a pair of vibrations.
// `pulseDuration` is the small pause between the two vibrations in the pair.
private let pulseDuration = 0.2
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
var avAudioSession: AVAudioSession {
return AVAudioSession.sharedInstance()
}
// MARK: - Initializers
init(handleRinging: Bool) {
self.handleRinging = handleRinging
super.init()
// We cannot assert singleton here, because this class gets rebuilt when the user changes relevant call settings
2017-12-07 16:33:27 +01:00
// Configure audio session so we don't prompt user with Record permission until call is connected.
audioSession.configureRTCAudio()
2019-03-30 14:22:31 +01:00
NotificationCenter.default.addObserver(forName: AVAudioSession.routeChangeNotification, object: avAudioSession, queue: nil) { _ in
assert(!Thread.isMainThread)
self.updateIsSpeakerphoneEnabled()
}
}
2018-02-23 20:36:21 +01:00
deinit {
NotificationCenter.default.removeObserver(self)
}
// MARK: - CallObserver
internal func stateDidChange(call: SignalCall, state: CallState) {
AssertIsOnMainThread()
2018-02-22 03:37:32 +01:00
self.handleState(call: call)
}
internal func muteDidChange(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
}
internal func holdDidChange(call: SignalCall, isOnHold: Bool) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
}
internal func audioSourceDidChange(call: SignalCall, audioSource: AudioSource?) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
if let audioSource = audioSource, audioSource.isBuiltInSpeaker {
self.isSpeakerphoneEnabled = true
} else {
self.isSpeakerphoneEnabled = false
}
}
internal func hasLocalVideoDidChange(call: SignalCall, hasLocalVideo: Bool) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
}
// Speakerphone can be manipulated by the in-app callscreen or via the system callscreen (CallKit).
// Unlike other CallKit CallScreen buttons, enabling doesn't trigger a CXAction, so it's not as simple
// to track state changes. Instead we never store the state and directly access the ground-truth in the
// AVAudioSession.
private(set) var isSpeakerphoneEnabled: Bool = false {
didSet {
self.delegate?.callAudioService(self, didUpdateIsSpeakerphoneEnabled: isSpeakerphoneEnabled)
}
}
public func requestSpeakerphone(isEnabled: Bool) {
// This is a little too slow to execute on the main thread and the results are not immediately available after execution
// anyway, so we dispatch async. If you need to know the new value, you'll need to check isSpeakerphoneEnabled and take
// advantage of the CallAudioServiceDelegate.callAudioService(_:didUpdateIsSpeakerphoneEnabled:)
DispatchQueue.global().async {
do {
try self.avAudioSession.overrideOutputAudioPort( isEnabled ? .speaker : .none )
} catch {
2018-08-27 16:27:48 +02:00
owsFailDebug("failed to set \(#function) = \(isEnabled) with error: \(error)")
}
}
}
private func updateIsSpeakerphoneEnabled() {
let value = avAudioSession.currentRoute.outputs.contains { (portDescription: AVAudioSessionPortDescription) -> Bool in
2019-03-30 14:22:31 +01:00
return portDescription.portName == convertFromAVAudioSessionPort(AVAudioSession.Port.builtInSpeaker)
}
DispatchQueue.main.async {
self.isSpeakerphoneEnabled = value
}
}
private func ensureProperAudioSession(call: SignalCall?) {
AssertIsOnMainThread()
Use recommended approach for speakerphone mode From: https://developer.apple.com/library/content/qa/qa1754/_index.html Q: Can you explain the difference between calling the AVAudioSession method overrideOutputAudioPort: with the value AVAudioSessionPortOverrideSpeaker and using the category option AVAudioSessionCategoryOptionDefaultToSpeaker with setCategory:withOptions:error:. A: The difference is that setting the AVAudioSessionPortOverride by calling overrideOutputAudioPort: is more transient than using the category option AVAudioSessionCategoryOptionDefaultToSpeaker. Important: The use of both AVAudioSessionPortOverrideSpeaker and AVAudioSessionCategoryOptionDefaultToSpeaker are only applicable to the AVAudioSessionCategoryPlayAndRecord category. Calling overrideOutputAudioPort: and setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker is a way of temporarily overriding the output to play to the speaker. Any route change or interruption will cause the audio to be routed back to its normal route, following the last-in wins rule. Think of using overrideOutputAudioPort: in terms of what you might use to implement a Speakerphone button where you want to be able to toggle between the speaker (AVAudioSessionPortOverrideSpeaker) and the normal output route (AVAudioSessionPortOverrideNone). Note: This property is intended to allow 3rd party applications to mimic the behavior of a Speakerphone button and therefore may change the input route as well as output route. For example, setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker while a headset is plugged in will cause the route to change to built-in mic / built-in speaker. // FREEBIE
2017-07-14 00:42:35 +02:00
guard let call = call, !call.isTerminated else {
// Revert to default audio
2019-03-30 14:22:31 +01:00
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.soloAmbient),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.default))
return
}
// Disallow bluetooth while (and only while) the user has explicitly chosen the built in receiver.
//
// NOTE: I'm actually not sure why this is required - it seems like we should just be able
// to setPreferredInput to call.audioSource.portDescription in this case,
// but in practice I'm seeing the call revert to the bluetooth headset.
// Presumably something else (in WebRTC?) is touching our shared AudioSession. - mjk
2019-03-30 14:22:31 +01:00
let options: AVAudioSession.CategoryOptions = call.audioSource?.isBuiltInEarPiece == true ? [] : [.allowBluetooth]
if call.state == .localRinging {
// SoloAmbient plays through speaker, but respects silent switch
2019-03-30 14:22:31 +01:00
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.soloAmbient),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.default))
} else if call.hasLocalVideo {
// Because ModeVideoChat affects gain, we don't want to apply it until the call is connected.
// otherwise sounds like ringing will be extra loud for video vs. speakerphone
2017-07-14 22:13:30 +02:00
// Apple Docs say that setting mode to AVAudioSessionModeVideoChat has the
// side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// option, and inspect AVAudioSession.sharedInstance.categoryOptions == 0. And availableInputs
// does not include my linked bluetooth device
2019-03-30 14:22:31 +01:00
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.playAndRecord),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.videoChat),
options: options)
} else {
2017-07-14 22:13:30 +02:00
// Apple Docs say that setting mode to AVAudioSessionModeVoiceChat has the
// side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// option, and inspect AVAudioSession.sharedInstance.categoryOptions == 0. And availableInputs
// does not include my linked bluetooth device
2019-03-30 14:22:31 +01:00
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.playAndRecord),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.voiceChat),
options: options)
}
Use recommended approach for speakerphone mode From: https://developer.apple.com/library/content/qa/qa1754/_index.html Q: Can you explain the difference between calling the AVAudioSession method overrideOutputAudioPort: with the value AVAudioSessionPortOverrideSpeaker and using the category option AVAudioSessionCategoryOptionDefaultToSpeaker with setCategory:withOptions:error:. A: The difference is that setting the AVAudioSessionPortOverride by calling overrideOutputAudioPort: is more transient than using the category option AVAudioSessionCategoryOptionDefaultToSpeaker. Important: The use of both AVAudioSessionPortOverrideSpeaker and AVAudioSessionCategoryOptionDefaultToSpeaker are only applicable to the AVAudioSessionCategoryPlayAndRecord category. Calling overrideOutputAudioPort: and setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker is a way of temporarily overriding the output to play to the speaker. Any route change or interruption will cause the audio to be routed back to its normal route, following the last-in wins rule. Think of using overrideOutputAudioPort: in terms of what you might use to implement a Speakerphone button where you want to be able to toggle between the speaker (AVAudioSessionPortOverrideSpeaker) and the normal output route (AVAudioSessionPortOverrideNone). Note: This property is intended to allow 3rd party applications to mimic the behavior of a Speakerphone button and therefore may change the input route as well as output route. For example, setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker while a headset is plugged in will cause the route to change to built-in mic / built-in speaker. // FREEBIE
2017-07-14 00:42:35 +02:00
do {
// It's important to set preferred input *after* ensuring properAudioSession
// because some sources are only valid for certain category/option combinations.
let existingPreferredInput = avAudioSession.preferredInput
if existingPreferredInput != call.audioSource?.portDescription {
2018-08-23 16:37:34 +02:00
Logger.info("changing preferred input: \(String(describing: existingPreferredInput)) -> \(String(describing: call.audioSource?.portDescription))")
try avAudioSession.setPreferredInput(call.audioSource?.portDescription)
}
Use recommended approach for speakerphone mode From: https://developer.apple.com/library/content/qa/qa1754/_index.html Q: Can you explain the difference between calling the AVAudioSession method overrideOutputAudioPort: with the value AVAudioSessionPortOverrideSpeaker and using the category option AVAudioSessionCategoryOptionDefaultToSpeaker with setCategory:withOptions:error:. A: The difference is that setting the AVAudioSessionPortOverride by calling overrideOutputAudioPort: is more transient than using the category option AVAudioSessionCategoryOptionDefaultToSpeaker. Important: The use of both AVAudioSessionPortOverrideSpeaker and AVAudioSessionCategoryOptionDefaultToSpeaker are only applicable to the AVAudioSessionCategoryPlayAndRecord category. Calling overrideOutputAudioPort: and setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker is a way of temporarily overriding the output to play to the speaker. Any route change or interruption will cause the audio to be routed back to its normal route, following the last-in wins rule. Think of using overrideOutputAudioPort: in terms of what you might use to implement a Speakerphone button where you want to be able to toggle between the speaker (AVAudioSessionPortOverrideSpeaker) and the normal output route (AVAudioSessionPortOverrideNone). Note: This property is intended to allow 3rd party applications to mimic the behavior of a Speakerphone button and therefore may change the input route as well as output route. For example, setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker while a headset is plugged in will cause the route to change to built-in mic / built-in speaker. // FREEBIE
2017-07-14 00:42:35 +02:00
} catch {
2018-08-27 16:27:48 +02:00
owsFailDebug("failed setting audio source with error: \(error) isSpeakerPhoneEnabled: \(call.isSpeakerphoneEnabled)")
Use recommended approach for speakerphone mode From: https://developer.apple.com/library/content/qa/qa1754/_index.html Q: Can you explain the difference between calling the AVAudioSession method overrideOutputAudioPort: with the value AVAudioSessionPortOverrideSpeaker and using the category option AVAudioSessionCategoryOptionDefaultToSpeaker with setCategory:withOptions:error:. A: The difference is that setting the AVAudioSessionPortOverride by calling overrideOutputAudioPort: is more transient than using the category option AVAudioSessionCategoryOptionDefaultToSpeaker. Important: The use of both AVAudioSessionPortOverrideSpeaker and AVAudioSessionCategoryOptionDefaultToSpeaker are only applicable to the AVAudioSessionCategoryPlayAndRecord category. Calling overrideOutputAudioPort: and setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker is a way of temporarily overriding the output to play to the speaker. Any route change or interruption will cause the audio to be routed back to its normal route, following the last-in wins rule. Think of using overrideOutputAudioPort: in terms of what you might use to implement a Speakerphone button where you want to be able to toggle between the speaker (AVAudioSessionPortOverrideSpeaker) and the normal output route (AVAudioSessionPortOverrideNone). Note: This property is intended to allow 3rd party applications to mimic the behavior of a Speakerphone button and therefore may change the input route as well as output route. For example, setting the AVAudioSessionPortOverride to AVAudioSessionPortOverrideSpeaker while a headset is plugged in will cause the route to change to built-in mic / built-in speaker. // FREEBIE
2017-07-14 00:42:35 +02:00
}
}
// MARK: - Service action handlers
public func didUpdateVideoTracks(call: SignalCall?) {
2018-08-23 16:37:34 +02:00
Logger.verbose("")
self.ensureProperAudioSession(call: call)
}
public func handleState(call: SignalCall) {
2017-01-23 22:17:55 +01:00
assert(Thread.isMainThread)
2018-08-23 16:37:34 +02:00
Logger.verbose("new state: \(call.state)")
// Stop playing sounds while switching audio session so we don't
// get any blips across a temporary unintended route.
stopPlayingAnySounds()
self.ensureProperAudioSession(call: call)
switch call.state {
case .idle: handleIdle(call: call)
case .dialing: handleDialing(call: call)
case .answering: handleAnswering(call: call)
case .remoteRinging: handleRemoteRinging(call: call)
case .localRinging: handleLocalRinging(call: call)
case .connected: handleConnected(call: call)
case .reconnecting: handleReconnecting(call: call)
case .localFailure: handleLocalFailure(call: call)
case .localHangup: handleLocalHangup(call: call)
case .remoteHangup: handleRemoteHangup(call: call)
case .remoteBusy: handleBusy(call: call)
}
}
private func handleIdle(call: SignalCall) {
2018-08-23 16:37:34 +02:00
Logger.debug("")
}
private func handleDialing(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
// HACK: Without this async, dialing sound only plays once. I don't really understand why. Does the audioSession
// need some time to settle? Is somethign else interrupting our session?
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.2) {
2018-02-22 03:37:32 +01:00
self.play(sound: OWSSound.callConnecting)
}
}
private func handleAnswering(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
}
private func handleRemoteRinging(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
2018-02-22 03:37:32 +01:00
self.play(sound: OWSSound.callOutboundRinging)
}
private func handleLocalRinging(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
startRinging(call: call)
}
private func handleConnected(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
}
private func handleReconnecting(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
}
private func handleLocalFailure(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
2018-02-22 03:37:32 +01:00
play(sound: OWSSound.callFailure)
handleCallEnded(call: call)
}
private func handleLocalHangup(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
handleCallEnded(call: call)
}
private func handleRemoteHangup(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
vibrate()
2018-02-22 03:37:32 +01:00
handleCallEnded(call: call)
}
private func handleBusy(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
2018-02-22 03:37:32 +01:00
play(sound: OWSSound.callBusy)
// Let the busy sound play for 4 seconds. The full file is longer than necessary
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 4.0) {
2017-02-27 20:46:19 +01:00
self.handleCallEnded(call: call)
}
}
private func handleCallEnded(call: SignalCall) {
AssertIsOnMainThread()
2018-08-23 16:37:34 +02:00
Logger.debug("")
// Stop solo audio, revert to default.
isSpeakerphoneEnabled = false
2019-03-30 14:22:31 +01:00
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.soloAmbient))
}
// MARK: Playing Sounds
2018-02-23 21:44:46 +01:00
var currentPlayer: OWSAudioPlayer?
private func stopPlayingAnySounds() {
currentPlayer?.stop()
stopAnyRingingVibration()
}
2018-02-22 03:37:32 +01:00
private func play(sound: OWSSound) {
2018-10-23 16:40:09 +02:00
guard let newPlayer = OWSSounds.audioPlayer(for: sound, audioBehavior: .call) else {
2018-08-27 16:27:48 +02:00
owsFailDebug("unable to build player for sound: \(OWSSounds.displayName(for: sound))")
return
}
2018-08-23 16:37:34 +02:00
Logger.info("playing sound: \(OWSSounds.displayName(for: sound))")
2017-07-05 18:55:00 +02:00
// It's important to stop the current player **before** starting the new player. In the case that
// we're playing the same sound, since the player is memoized on the sound instance, we'd otherwise
// stop the sound we just started.
self.currentPlayer?.stop()
2018-10-23 16:40:09 +02:00
newPlayer.play()
self.currentPlayer = newPlayer
}
// MARK: - Ringing
private func startRinging(call: SignalCall) {
guard handleRinging else {
2018-08-23 16:37:34 +02:00
Logger.debug("ignoring \(#function) since CallKit handles it's own ringing state")
return
}
2017-01-26 16:05:41 +01:00
vibrateTimer = WeakTimer.scheduledTimer(timeInterval: vibrateRepeatDuration, target: self, userInfo: nil, repeats: true) {[weak self] _ in
2017-01-19 16:57:07 +01:00
self?.ringVibration()
}
vibrateTimer?.fire()
2018-02-23 20:36:21 +01:00
play(sound: .defaultiOSIncomingRingtone)
}
private func stopAnyRingingVibration() {
guard handleRinging else {
2018-08-23 16:37:34 +02:00
Logger.debug("ignoring \(#function) since CallKit handles it's own ringing state")
return
}
2018-08-23 16:37:34 +02:00
Logger.debug("")
// Stop vibrating
vibrateTimer?.invalidate()
vibrateTimer = nil
}
// public so it can be called by timer via selector
public func ringVibration() {
// Since a call notification is more urgent than a message notifaction, we
// vibrate twice, like a pulse, to differentiate from a normal notification vibration.
vibrate()
2018-10-13 21:21:46 +02:00
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + pulseDuration) {
self.vibrate()
}
}
func vibrate() {
// TODO implement HapticAdapter for iPhone7 and up
2017-02-27 20:46:19 +01:00
AudioServicesPlaySystemSound(kSystemSoundID_Vibrate)
}
2018-08-23 16:37:34 +02:00
// MARK: - AudioSession MGMT
// TODO move this to CallAudioSession?
// Note this method is sensitive to the current audio session configuration.
// Specifically if you call it while speakerphone is enabled you won't see
// any connected bluetooth routes.
var availableInputs: [AudioSource] {
guard let availableInputs = avAudioSession.availableInputs else {
// I'm not sure why this would happen, but it may indicate an error.
2018-08-27 16:27:48 +02:00
owsFailDebug("No available inputs or inputs not ready")
return [AudioSource.builtInSpeaker]
}
2018-08-23 16:37:34 +02:00
Logger.info("availableInputs: \(availableInputs)")
return [AudioSource.builtInSpeaker] + availableInputs.map { portDescription in
return AudioSource(portDescription: portDescription)
}
}
func currentAudioSource(call: SignalCall) -> AudioSource? {
2017-07-13 22:04:39 +02:00
if let audioSource = call.audioSource {
return audioSource
}
2017-07-13 22:04:39 +02:00
// Before the user has specified an audio source on the call, we rely on the existing
// system state to determine the current audio source.
// If a bluetooth is connected, this will be bluetooth, otherwise
// this will be the receiver.
guard let portDescription = avAudioSession.currentRoute.inputs.first else {
2017-07-13 22:04:39 +02:00
return nil
}
2017-07-13 22:04:39 +02:00
return AudioSource(portDescription: portDescription)
}
private func setAudioSession(category: String,
mode: String? = nil,
2019-03-30 14:22:31 +01:00
options: AVAudioSession.CategoryOptions = AVAudioSession.CategoryOptions(rawValue: 0)) {
AssertIsOnMainThread()
2017-09-21 22:03:30 +02:00
var audioSessionChanged = false
do {
if #available(iOS 10.0, *), let mode = mode {
2019-03-30 14:22:31 +01:00
let oldCategory = convertFromAVAudioSessionCategory(avAudioSession.category)
let oldMode = convertFromAVAudioSessionMode(avAudioSession.mode)
let oldOptions = avAudioSession.categoryOptions
guard oldCategory != category || oldMode != mode || oldOptions != options else {
return
}
audioSessionChanged = true
if oldCategory != category {
2018-08-23 16:37:34 +02:00
Logger.debug("audio session changed category: \(oldCategory) -> \(category) ")
}
if oldMode != mode {
2018-08-23 16:37:34 +02:00
Logger.debug("audio session changed mode: \(oldMode) -> \(mode) ")
}
if oldOptions != options {
2018-08-23 16:37:34 +02:00
Logger.debug("audio session changed options: \(oldOptions) -> \(options) ")
}
2019-03-30 14:22:31 +01:00
try avAudioSession.setCategory(convertToAVAudioSessionCategory(category), mode: AVAudioSession.Mode(rawValue: mode), options: options)
} else {
2019-03-30 14:22:31 +01:00
let oldCategory = convertFromAVAudioSessionCategory(avAudioSession.category)
let oldOptions = avAudioSession.categoryOptions
2019-03-30 14:22:31 +01:00
guard convertFromAVAudioSessionCategory(avAudioSession.category) != category || avAudioSession.categoryOptions != options else {
return
}
audioSessionChanged = true
if oldCategory != category {
2018-08-23 16:37:34 +02:00
Logger.debug("audio session changed category: \(oldCategory) -> \(category) ")
}
if oldOptions != options {
2018-08-23 16:37:34 +02:00
Logger.debug("audio session changed options: \(oldOptions) -> \(options) ")
}
try avAudioSession.setCategory(category, with: options)
}
} catch {
2018-08-23 16:37:34 +02:00
let message = "failed to set category: \(category) mode: \(String(describing: mode)), options: \(options) with error: \(error)"
2018-08-27 16:27:48 +02:00
owsFailDebug(message)
}
if audioSessionChanged {
2018-08-23 16:37:34 +02:00
Logger.info("")
self.delegate?.callAudioServiceDidChangeAudioSession(self)
}
}
}
2019-03-30 14:22:31 +01:00
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionPort(_ input: AVAudioSession.Port) -> String {
return input.rawValue
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionCategory(_ input: AVAudioSession.Category) -> String {
return input.rawValue
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionMode(_ input: AVAudioSession.Mode) -> String {
return input.rawValue
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertToAVAudioSessionCategory(_ input: String) -> AVAudioSession.Category {
return AVAudioSession.Category(rawValue: input)
}