Add video-related views.

// FREEBIE
This commit is contained in:
Matthew Chen 2017-01-26 10:05:41 -05:00
parent e556a369bb
commit a65d3b7c4e
8 changed files with 512 additions and 72 deletions

View File

@ -72,7 +72,7 @@
<key>NSAppleMusicUsageDescription</key>
<string>Signal needs to use Apple Music to play media attachments.</string>
<key>NSCameraUsageDescription</key>
<string>Signal will let you take a photo to send to your contacts. You can review it before sending.</string>
<string>Signal uses your camera to take photos and for video calls.</string>
<key>NSContactsUsageDescription</key>
<string>Signal uses your contacts to find users you know. We do not store your contacts on the server.</string>
<key>NSMicrophoneUsageDescription</key>

View File

@ -5,11 +5,13 @@
#import <Foundation/Foundation.h>
#import "AppAudioManager.h"
#import "Asserts.h"
#import "Environment.h"
#import "NotificationsManager.h"
#import "OWSCallNotificationsAdaptee.h"
#import "OWSContactAvatarBuilder.h"
#import "OWSContactsManager.h"
#import "OWSDispatch.h"
#import "OWSLogger.h"
#import "OWSWebRTCDataProtos.pb.h"
#import "PhoneNumber.h"
@ -54,3 +56,5 @@
#import <SignalServiceKit/TSStorageManager+keyingMaterial.h>
#import <SignalServiceKit/TSThread.h>
#import <WebRTC/RTCAudioSession.h>
#import <WebRTC/RTCCameraPreviewView.h>
#import <WebRTC/RTCEAGLVideoView.h>

View File

@ -1,5 +1,5 @@
//
// Copyright © 2017 Open Whisper Systems. All rights reserved.
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
import Foundation
@ -31,16 +31,17 @@ import Foundation
// MARK: - CallObserver
internal func stateDidChange(call: SignalCall, state: CallState) {
DispatchQueue.main.async {
self.handleState(state)
}
AssertIsOnMainThread()
self.handleState(state)
}
internal func muteDidChange(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
Logger.verbose("\(TAG) in \(#function) is no-op")
}
internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) {
AssertIsOnMainThread()
if isEnabled {
setAudioSession(category: AVAudioSessionCategoryPlayAndRecord, options: .defaultToSpeaker)
} else {
@ -49,6 +50,7 @@ import Foundation
}
internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) {
AssertIsOnMainThread()
// no-op
}
@ -131,7 +133,7 @@ import Foundation
return
}
vibrateTimer = WeakTimer.scheduledTimer(timeInterval: vibrateRepeatDuration, target: self, userInfo: nil, repeats: true) {[weak self] timer in
vibrateTimer = WeakTimer.scheduledTimer(timeInterval: vibrateRepeatDuration, target: self, userInfo: nil, repeats: true) {[weak self] _ in
self?.ringVibration()
}
vibrateTimer?.fire()

View File

@ -80,12 +80,25 @@ enum CallError: Error {
// FIXME TODO do we need to timeout?
fileprivate let timeoutSeconds = 60
@objc class CallService: NSObject, PeerConnectionClientDelegate {
// All Observer methods will be invoked from the main thread.
protocol CallServiceObserver: class {
/**
* Fired whenever the local or remote video track become active or inactive.
*/
func didUpdateVideoTracks(localVideoTrack: RTCVideoTrack?,
remoteVideoTrack: RTCVideoTrack?)
}
// This class' state should only be accessed on the signaling queue, _except_
// the observer-related state which only be accessed on the main thread.
@objc class CallService: NSObject, CallObserver, PeerConnectionClientDelegate {
// MARK: - Properties
let TAG = "[CallService]"
var observers = [Weak<CallServiceObserver>]()
// MARK: Dependencies
let accountManager: AccountManager
@ -104,7 +117,18 @@ fileprivate let timeoutSeconds = 60
var peerConnectionClient: PeerConnectionClient?
// TODO code cleanup: move thread into SignalCall? Or refactor messageSender to take SignalRecipient identifier.
var thread: TSContactThread?
var call: SignalCall?
var call: SignalCall? {
didSet {
assertOnSignalingQueue()
oldValue?.removeObserver(self)
call?.addObserverAndSyncState(observer: self)
DispatchQueue.main.async { [weak self] in
self?.updateIsVideoEnabled()
}
}
}
/**
* In the process of establishing a connection between the clients (ICE process) we must exchange ICE updates.
@ -123,6 +147,26 @@ fileprivate let timeoutSeconds = 60
// Used to coordinate promises across delegate methods
var fulfillCallConnectedPromise: (() -> Void)?
weak var localVideoTrack: RTCVideoTrack? {
didSet {
assertOnSignalingQueue()
Logger.info("\(self.TAG) \(#function)")
fireDidUpdateVideoTracks()
}
}
weak var remoteVideoTrack: RTCVideoTrack? {
didSet {
assertOnSignalingQueue()
Logger.info("\(self.TAG) \(#function)")
fireDidUpdateVideoTracks()
}
}
required init(accountManager: AccountManager, contactsManager: OWSContactsManager, messageSender: MessageSender, notificationsAdapter: CallNotificationsAdapter) {
self.accountManager = accountManager
self.messageSender = messageSender
@ -591,7 +635,7 @@ fileprivate let timeoutSeconds = 60
// We don't risk transmitting any media until the remote client has admitted to being connected.
peerConnectionClient.setAudioEnabled(enabled: !call.isMuted)
peerConnectionClient.setVideoEnabled(enabled: call.hasVideo)
peerConnectionClient.setLocalVideoEnabled(enabled: shouldHaveLocalVideoTrack())
}
/**
@ -731,7 +775,7 @@ fileprivate let timeoutSeconds = 60
}
call.hasVideo = hasVideo
peerConnectionClient.setVideoEnabled(enabled: hasVideo)
peerConnectionClient.setLocalVideoEnabled(enabled: shouldHaveLocalVideoTrack())
}
func handleCallKitStartVideo() {
@ -804,7 +848,7 @@ fileprivate let timeoutSeconds = 60
/**
* The connection has been established. The clients can now communicate.
*/
func peerConnectionClientIceConnected(_ peerconnectionClient: PeerConnectionClient) {
internal func peerConnectionClientIceConnected(_ peerconnectionClient: PeerConnectionClient) {
CallService.signalingQueue.async {
self.handleIceConnected()
}
@ -813,7 +857,7 @@ fileprivate let timeoutSeconds = 60
/**
* The connection failed to establish. The clients will not be able to communicate.
*/
func peerConnectionClientIceFailed(_ peerconnectionClient: PeerConnectionClient) {
internal func peerConnectionClientIceFailed(_ peerconnectionClient: PeerConnectionClient) {
CallService.signalingQueue.async {
self.handleFailedCall(error: CallError.disconnected)
}
@ -824,7 +868,7 @@ fileprivate let timeoutSeconds = 60
* reach the local client via the internet. The delegate must shuttle these IceCandates to the other (remote) client
* out of band, as part of establishing a connection over WebRTC.
*/
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, addedLocalIceCandidate iceCandidate: RTCIceCandidate) {
internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, addedLocalIceCandidate iceCandidate: RTCIceCandidate) {
CallService.signalingQueue.async {
self.handleLocalAddedIceCandidate(iceCandidate)
}
@ -833,17 +877,38 @@ fileprivate let timeoutSeconds = 60
/**
* Once the peerconnection is established, we can receive messages via the data channel, and notify the delegate.
*/
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, received dataChannelMessage: OWSWebRTCProtosData) {
internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, received dataChannelMessage: OWSWebRTCProtosData) {
CallService.signalingQueue.async {
self.handleDataChannelMessage(dataChannelMessage)
}
}
internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateLocal videoTrack: RTCVideoTrack?) {
CallService.signalingQueue.async { [weak self] in
if let strongSelf = self {
strongSelf.localVideoTrack = videoTrack
strongSelf.fireDidUpdateVideoTracks()
}
}
}
internal func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateRemote videoTrack: RTCVideoTrack?) {
CallService.signalingQueue.async { [weak self] in
if let strongSelf = self {
strongSelf.remoteVideoTrack = videoTrack
strongSelf.fireDidUpdateVideoTracks()
}
}
}
// MARK: Helpers
/**
* Ensure that all `SignalCall` and `CallService` state is synchronized by only mutating signaling state in
* handleXXX methods, and putting those methods on the signaling queue.
*
* TODO: We might want to move this queue and method to OWSDispatch so that we can assert this in
* other classes like SignalCall as well.
*/
private func assertOnSignalingQueue() {
if #available(iOS 10.0, *) {
@ -902,6 +967,7 @@ fileprivate let timeoutSeconds = 60
*/
private func terminateCall() {
assertOnSignalingQueue()
Logger.debug("\(TAG) in \(#function)")
PeerConnectionClient.stopAudioSession()
@ -909,12 +975,121 @@ fileprivate let timeoutSeconds = 60
peerConnectionClient?.terminate()
peerConnectionClient = nil
localVideoTrack = nil
remoteVideoTrack = nil
call?.removeAllObservers()
call = nil
thread = nil
incomingCallPromise = nil
sendIceUpdatesImmediately = true
pendingIceUpdateMessages = []
fireDidUpdateVideoTracks()
}
// MARK: - CallObserver
internal func stateDidChange(call: SignalCall, state: CallState) {
AssertIsOnMainThread()
Logger.info("\(self.TAG) \(#function): \(state)")
self.updateIsVideoEnabled()
}
internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) {
AssertIsOnMainThread()
Logger.info("\(self.TAG) \(#function): \(hasVideo)")
self.updateIsVideoEnabled()
}
internal func muteDidChange(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
// Do nothing
}
internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) {
AssertIsOnMainThread()
// Do nothing
}
// MARK: - Video
private func shouldHaveLocalVideoTrack() -> Bool {
assertOnSignalingQueue()
// The iOS simulator doesn't provide any sort of camera capture
// support or emulation (http://goo.gl/rHAnC1) so don't bother
// trying to open a local stream.
return (!Platform.isSimulator &&
call != nil &&
call!.state == .connected &&
call!.hasVideo)
}
private func updateIsVideoEnabled() {
AssertIsOnMainThread()
// It's only safe to access the class properties on the signaling queue, so
// we dispatch there...
CallService.signalingQueue.async {
Logger.info("\(self.TAG) \(#function): \(self.shouldHaveLocalVideoTrack())")
self.peerConnectionClient?.setLocalVideoEnabled(enabled: self.shouldHaveLocalVideoTrack())
}
}
// MARK: - Observers
// The observer-related methods should be invoked on the main thread.
func addObserverAndSyncState(observer: CallServiceObserver) {
AssertIsOnMainThread()
observers.append(Weak(value: observer))
// Synchronize observer with current call state
// It's only safe to access the video track properties on the signaling queue, so
// we dispatch there...
CallService.signalingQueue.async {
let localVideoTrack = self.localVideoTrack
let remoteVideoTrack = self.remoteVideoTrack
// Then dispatch back to the main thread.
DispatchQueue.main.async {
observer.didUpdateVideoTracks(localVideoTrack:localVideoTrack,
remoteVideoTrack:remoteVideoTrack)
}
}
}
// The observer-related methods should be invoked on the main thread.
func removeObserver(_ observer: CallServiceObserver) {
AssertIsOnMainThread()
while let index = observers.index(where: { $0.value === observer }) {
observers.remove(at: index)
}
}
// The observer-related methods should be invoked on the main thread.
func removeAllObservers() {
AssertIsOnMainThread()
observers = []
}
func fireDidUpdateVideoTracks() {
assertOnSignalingQueue()
let localVideoTrack = self.localVideoTrack
let remoteVideoTrack = self.remoteVideoTrack
DispatchQueue.main.async { [weak self] in
if let strongSelf = self {
for observer in strongSelf.observers {
observer.value?.didUpdateVideoTracks(localVideoTrack:localVideoTrack,
remoteVideoTrack:remoteVideoTrack)
}
}
}
}
}

View File

@ -116,7 +116,9 @@ class NonCallKitCallUIAdaptee: CallUIAdaptee {
func localHangupCall(_ call: SignalCall) {
CallService.signalingQueue.async {
guard call.localId == self.callService.call?.localId else {
// If both parties hang up at the same moment,
// call might already be nil.
guard self.callService.call == nil || call.localId == self.callService.call?.localId else {
assertionFailure("\(self.TAG) in \(#function) localId does not match current call")
return
}

View File

@ -35,6 +35,16 @@ protocol PeerConnectionClientDelegate: class {
* Once the peerconnection is established, we can receive messages via the data channel, and notify the delegate.
*/
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, received dataChannelMessage: OWSWebRTCProtosData)
/**
* Fired whenever the local video track become active or inactive.
*/
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateLocal videoTrack: RTCVideoTrack?)
/**
* Fired whenever the remote video track become active or inactive.
*/
func peerConnectionClient(_ peerconnectionClient: PeerConnectionClient, didUpdateRemote videoTrack: RTCVideoTrack?)
}
/**
@ -80,7 +90,8 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
// Video
private var videoSender: RTCRtpSender?
private var videoTrack: RTCVideoTrack?
private var localVideoTrack: RTCVideoTrack?
private var remoteVideoTrack: RTCVideoTrack?
private var cameraConstraints: RTCMediaConstraints
init(iceServers: [RTCIceServer], delegate: PeerConnectionClientDelegate) {
@ -92,7 +103,6 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
configuration.bundlePolicy = .maxBundle
configuration.rtcpMuxPolicy = .require
let connectionConstraintsDict = ["DtlsSrtpKeyAgreement": "true"]
connectionConstraints = RTCMediaConstraints(mandatoryConstraints: nil, optionalConstraints: connectionConstraintsDict)
@ -131,35 +141,45 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
return
}
// TODO: What are the best values to use here?
let mediaConstraintsDictionary = [
kRTCMediaConstraintsMinWidth: "240",
kRTCMediaConstraintsMinHeight: "320",
kRTCMediaConstraintsMaxWidth: "240",
kRTCMediaConstraintsMaxHeight: "320"
]
let cameraConstraints = RTCMediaConstraints(mandatoryConstraints:nil,
optionalConstraints:mediaConstraintsDictionary)
// TODO: Revisit the cameraConstraints.
let videoSource = factory.avFoundationVideoSource(with: cameraConstraints)
let videoTrack = factory.videoTrack(with: videoSource, trackId: Identifiers.videoTrack.rawValue)
self.videoTrack = videoTrack
videoSource.useBackCamera = false
let localVideoTrack = factory.videoTrack(with: videoSource, trackId: Identifiers.videoTrack.rawValue)
self.localVideoTrack = localVideoTrack
// Disable by default until call is connected.
// FIXME - do we require mic permissions at this point?
// if so maybe it would be better to not even add the track until the call is connected
// instead of creating it and disabling it.
videoTrack.isEnabled = false
// Occasionally seeing this crash on the next line, after a *second* call:
// -[__NSCFNumber length]: unrecognized selector sent to instance 0x1562c610
// Seems like either videoKind or videoStreamId (both of which are Strings) is being GC'd prematurely.
// Not sure why, but assigned the value to local vars above in hopes of avoiding it.
// let videoKind = kRTCMediaStreamTrackKindVideo
localVideoTrack.isEnabled = false
let videoSender = peerConnection.sender(withKind: kVideoTrackType, streamId: Identifiers.mediaStream.rawValue)
videoSender.track = videoTrack
videoSender.track = localVideoTrack
self.videoSender = videoSender
}
public func setVideoEnabled(enabled: Bool) {
guard let videoTrack = self.videoTrack else {
public func setLocalVideoEnabled(enabled: Bool) {
guard let localVideoTrack = self.localVideoTrack else {
let action = enabled ? "enable" : "disable"
Logger.error("\(TAG)) trying to \(action) videoTrack which doesn't exist")
return
}
videoTrack.isEnabled = enabled
localVideoTrack.isEnabled = enabled
if let delegate = delegate {
delegate.peerConnectionClient(self, didUpdateLocal: enabled ? localVideoTrack : nil)
}
}
// MARK: Audio
@ -294,7 +314,8 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
// we are likely to crash if we retain any peer connection properties when the peerconnection is released
Logger.debug("\(TAG) in \(#function)")
audioTrack = nil
videoTrack = nil
localVideoTrack = nil
remoteVideoTrack = nil
dataChannel = nil
audioSender = nil
videoSender = nil
@ -332,7 +353,9 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
return
}
delegate.peerConnectionClient(self, received: dataChannelMessage)
if let delegate = delegate {
delegate.peerConnectionClient(self, received: dataChannelMessage)
}
}
/** The data channel's |bufferedAmount| changed. */
@ -349,7 +372,14 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
/** Called when media is received on a new stream from remote peer. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didAdd stream: RTCMediaStream) {
Logger.debug("\(TAG) didAdd stream:\(stream)")
Logger.debug("\(TAG) didAdd stream:\(stream) video tracks: \(stream.videoTracks.count) audio tracks: \(stream.audioTracks.count)")
if stream.videoTracks.count > 0 {
remoteVideoTrack = stream.videoTracks[0]
if let delegate = delegate {
delegate.peerConnectionClient(self, didUpdateRemote: remoteVideoTrack)
}
}
}
/** Called when a remote peer closes a stream. */
@ -367,10 +397,14 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
Logger.debug("\(TAG) didChange IceConnectionState:\(newState.debugDescription)")
switch newState {
case .connected, .completed:
self.delegate.peerConnectionClientIceConnected(self)
if let delegate = delegate {
delegate.peerConnectionClientIceConnected(self)
}
case .failed:
Logger.warn("\(self.TAG) RTCIceConnection failed.")
self.delegate.peerConnectionClientIceFailed(self)
if let delegate = delegate {
delegate.peerConnectionClientIceFailed(self)
}
case .disconnected:
Logger.warn("\(self.TAG) RTCIceConnection disconnected.")
default:
@ -386,7 +420,9 @@ class PeerConnectionClient: NSObject, RTCPeerConnectionDelegate, RTCDataChannelD
/** New ice candidate has been found. */
public func peerConnection(_ peerConnection: RTCPeerConnection, didGenerate candidate: RTCIceCandidate) {
Logger.debug("\(TAG) didGenerate IceCandidate:\(candidate.sdp)")
self.delegate.peerConnectionClient(self, addedLocalIceCandidate: candidate)
if let delegate = delegate {
delegate.peerConnectionClient(self, addedLocalIceCandidate: candidate)
}
}
/** Called when a group of local Ice candidates have been removed. */

View File

@ -17,6 +17,7 @@ enum CallState: String {
case remoteBusy // terminal
}
// All Observer methods will be invoked from the main thread.
protocol CallObserver: class {
func stateDidChange(call: SignalCall, state: CallState)
func hasVideoDidChange(call: SignalCall, hasVideo: Bool)
@ -26,6 +27,8 @@ protocol CallObserver: class {
/**
* Data model for a WebRTC backed voice/video call.
*
* This class' state should only be accessed on the signaling queue.
*/
@objc class SignalCall: NSObject {
@ -39,51 +42,97 @@ protocol CallObserver: class {
// Distinguishes between calls locally, e.g. in CallKit
let localId: UUID
var hasVideo = false {
didSet {
Logger.debug("\(TAG) hasVideo changed: \(oldValue) -> \(hasVideo)")
for observer in observers {
observer.value?.hasVideoDidChange(call: self, hasVideo: hasVideo)
// This should only occur on the signaling queue.
objc_sync_enter(self)
let observers = self.observers
let call = self
let hasVideo = self.hasVideo
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers {
observer.value?.hasVideoDidChange(call: call, hasVideo: hasVideo)
}
}
}
}
var state: CallState {
didSet {
Logger.debug("\(TAG) state changed: \(oldValue) -> \(state)")
// This should only occur on the signaling queue.
objc_sync_enter(self)
Logger.debug("\(TAG) state changed: \(oldValue) -> \(self.state)")
// Update connectedDate
if state == .connected {
if self.state == .connected {
if connectedDate == nil {
connectedDate = NSDate()
}
} else {
connectedDate = nil
}
for observer in observers {
observer.value?.stateDidChange(call: self, state: state)
let observers = self.observers
let call = self
let state = self.state
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers {
observer.value?.stateDidChange(call: call, state: state)
}
}
}
}
var isMuted = false {
didSet {
Logger.debug("\(TAG) muted changed: \(oldValue) -> \(isMuted)")
for observer in observers {
observer.value?.muteDidChange(call: self, isMuted: isMuted)
// This should only occur on the signaling queue.
objc_sync_enter(self)
Logger.debug("\(TAG) muted changed: \(oldValue) -> \(self.isMuted)")
let observers = self.observers
let call = self
let isMuted = self.isMuted
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers {
observer.value?.muteDidChange(call: call, isMuted: isMuted)
}
}
}
}
var isSpeakerphoneEnabled = false {
didSet {
Logger.debug("\(TAG) isSpeakerphoneEnabled changed: \(oldValue) -> \(isSpeakerphoneEnabled)")
for observer in observers {
observer.value?.speakerphoneDidChange(call: self, isEnabled: isSpeakerphoneEnabled)
// This should only occur on the signaling queue.
objc_sync_enter(self)
Logger.debug("\(TAG) isSpeakerphoneEnabled changed: \(oldValue) -> \(self.isSpeakerphoneEnabled)")
let observers = self.observers
let call = self
let isSpeakerphoneEnabled = self.isSpeakerphoneEnabled
objc_sync_exit(self)
DispatchQueue.main.async {
for observer in observers {
observer.value?.speakerphoneDidChange(call: call, isEnabled: isSpeakerphoneEnabled)
}
}
}
}
var connectedDate: NSDate?
var error: CallError?
@ -108,26 +157,49 @@ protocol CallObserver: class {
// -
func addObserverAndSyncState(observer: CallObserver) {
objc_sync_enter(self)
observers.append(Weak(value: observer))
// Synchronize observer with current call state
observer.stateDidChange(call: self, state: self.state)
}
let call = self
let state = self.state
func removeObserver(_ observer: CallObserver) {
while let index = observers.index(where: { $0.value === observer }) {
observers.remove(at: index)
objc_sync_exit(self)
DispatchQueue.main.async {
// Synchronize observer with current call state
observer.stateDidChange(call: call, state: state)
}
}
func removeObserver(_ observer: CallObserver) {
objc_sync_enter(self)
while let index = observers.index(where: { $0.value === observer }) {
observers.remove(at: index)
}
objc_sync_exit(self)
}
func removeAllObservers() {
objc_sync_enter(self)
observers = []
objc_sync_exit(self)
}
// MARK: Equatable
static func == (lhs: SignalCall, rhs: SignalCall) -> Bool {
return lhs.localId == rhs.localId
objc_sync_enter(self)
let result = lhs.localId == rhs.localId
objc_sync_exit(self)
return result
}
static func newCallSignalingId() -> UInt64 {

View File

@ -7,10 +7,9 @@ import WebRTC
import PromiseKit
// TODO: Add category so that button handlers can be defined where button is created.
// TODO: Add logic to button handlers.
// TODO: Ensure buttons enabled & disabled as necessary.
@objc(OWSCallViewController)
class CallViewController: UIViewController, CallObserver {
class CallViewController: UIViewController, CallObserver, CallServiceObserver, RTCEAGLVideoViewDelegate {
enum CallDirection {
case unspecified, outgoing, incoming
@ -25,7 +24,6 @@ class CallViewController: UIViewController, CallObserver {
// MARK: Properties
var peerConnectionClient: PeerConnectionClient?
var callDirection: CallDirection = .unspecified
var thread: TSContactThread!
var call: SignalCall!
@ -60,6 +58,15 @@ class CallViewController: UIViewController, CallObserver {
var acceptIncomingButton: UIButton!
var declineIncomingButton: UIButton!
// MARK: Video Views
var remoteVideoView: RTCEAGLVideoView!
var localVideoView: RTCCameraPreviewView!
weak var localVideoTrack: RTCVideoTrack?
weak var remoteVideoTrack: RTCVideoTrack?
var remoteVideoSize: CGSize! = CGSize.zero
var videoViewConstraints: [NSLayoutConstraint] = []
// MARK: Control Groups
var allControls: [UIView] {
@ -132,19 +139,36 @@ class CallViewController: UIViewController, CallObserver {
// Subscribe for future call updates
call.addObserverAndSyncState(observer: self)
Environment.getCurrent().callService.addObserverAndSyncState(observer:self)
}
// MARK: - Create Views
func createViews() {
// Dark blurred background.
let blurEffect = UIBlurEffect(style: .dark)
blurView = UIVisualEffectView(effect: blurEffect)
self.view.addSubview(blurView)
// Create the video views first, as they are under the other views.
createVideoViews()
createContactViews()
createOngoingCallControls()
createIncomingCallControls()
}
func createVideoViews() {
remoteVideoView = RTCEAGLVideoView()
remoteVideoView.delegate = self
localVideoView = RTCCameraPreviewView()
remoteVideoView.isHidden = true
localVideoView.isHidden = true
self.view.addSubview(remoteVideoView)
self.view.addSubview(localVideoView)
}
func createContactViews() {
contactNameLabel = UILabel()
contactNameLabel.font = UIFont.ows_lightFont(withSize:ScaleFromIPhone5To7Plus(32, 40))
@ -291,6 +315,8 @@ class CallViewController: UIViewController, CallObserver {
return row
}
// MARK: - Layout
override func updateViewConstraints() {
if !hasConstraints {
// We only want to create our constraints once.
@ -310,10 +336,20 @@ class CallViewController: UIViewController, CallObserver {
// The buttons have built-in 10% margins, so to appear centered
// the avatar's bottom spacing should be a bit less.
let avatarBottomSpacing = ScaleFromIPhone5To7Plus(18, 41)
// Layout of the local video view is a bit unusual because
// although the view is square, it will be used
let videoPreviewHMargin = CGFloat(0)
// Dark blurred background.
blurView.autoPinEdgesToSuperviewEdges()
// TODO: Prevent overlap of localVideoView and contact views.
localVideoView.autoPinEdge(toSuperviewEdge:.right, withInset:videoPreviewHMargin)
localVideoView.autoPinEdge(toSuperviewEdge:.top, withInset:topMargin)
let localVideoSize = ScaleFromIPhone5To7Plus(80, 100)
localVideoView.autoSetDimension(.width, toSize:localVideoSize)
localVideoView.autoSetDimension(.height, toSize:localVideoSize)
contactNameLabel.autoPinEdge(toSuperviewEdge:.top, withInset:topMargin)
contactNameLabel.autoPinWidthToSuperview(withMargin:contactHMargin)
contactNameLabel.setContentHuggingVerticalHigh()
@ -342,9 +378,60 @@ class CallViewController: UIViewController, CallObserver {
incomingCallView.setContentHuggingVerticalHigh()
}
updateVideoViewLayout()
super.updateViewConstraints()
}
internal func updateVideoViewLayout() {
NSLayoutConstraint.deactivate(self.videoViewConstraints)
var constraints: [NSLayoutConstraint] = []
// We fill the screen with the remote video. The remote video's
// aspect ratio may not (and in fact will very rarely) match the
// aspect ratio of the current device, so parts of the remote
// video will be hidden offscreen.
//
// It's better to trim the remote video than to adopt a letterboxed
// layout.
if remoteVideoSize.width > 0 && remoteVideoSize.height > 0 &&
self.view.bounds.size.width > 0 && self.view.bounds.size.height > 0 {
var remoteVideoWidth = self.view.bounds.size.width
var remoteVideoHeight = self.view.bounds.size.height
if remoteVideoSize.width / self.view.bounds.size.width > remoteVideoSize.height / self.view.bounds.size.height {
remoteVideoWidth = round(self.view.bounds.size.height * remoteVideoSize.width / remoteVideoSize.height)
} else {
remoteVideoHeight = round(self.view.bounds.size.width * remoteVideoSize.height / remoteVideoSize.width)
}
constraints.append(remoteVideoView.autoSetDimension(.width, toSize:remoteVideoWidth))
constraints.append(remoteVideoView.autoSetDimension(.height, toSize:remoteVideoHeight))
constraints += remoteVideoView.autoCenterInSuperview()
remoteVideoView.frame = CGRect(origin:CGPoint.zero,
size:CGSize(width:remoteVideoWidth,
height:remoteVideoHeight))
remoteVideoView.isHidden = false
} else {
constraints += remoteVideoView.autoPinEdgesToSuperviewEdges()
remoteVideoView.isHidden = true
}
self.videoViewConstraints = constraints
}
func traverseViewHierarchy(view: UIView!, visitor: (UIView) -> Void) {
visitor(view)
for subview in view.subviews {
traverseViewHierarchy(view:subview, visitor:visitor)
}
}
// MARK: - Methods
// objc accessible way to set our swift enum.
func setOutgoingCallDirection() {
callDirection = .outgoing
@ -360,6 +447,8 @@ class CallViewController: UIViewController, CallObserver {
Logger.error("\(TAG) call failed with error: \(error)")
}
// MARK: - View State
func localizedTextForCallState(_ callState: CallState) -> String {
assert(Thread.isMainThread)
@ -541,27 +630,87 @@ class CallViewController: UIViewController, CallObserver {
// MARK: - CallObserver
internal func stateDidChange(call: SignalCall, state: CallState) {
DispatchQueue.main.async {
Logger.info("\(self.TAG) new call status: \(state)")
self.updateCallUI(callState: state)
}
AssertIsOnMainThread()
Logger.info("\(self.TAG) new call status: \(state)")
self.updateCallUI(callState: state)
}
internal func hasVideoDidChange(call: SignalCall, hasVideo: Bool) {
DispatchQueue.main.async {
self.updateCallUI(callState: call.state)
}
AssertIsOnMainThread()
self.updateCallUI(callState: call.state)
}
internal func muteDidChange(call: SignalCall, isMuted: Bool) {
DispatchQueue.main.async {
self.updateCallUI(callState: call.state)
}
AssertIsOnMainThread()
self.updateCallUI(callState: call.state)
}
internal func speakerphoneDidChange(call: SignalCall, isEnabled: Bool) {
DispatchQueue.main.async {
self.updateCallUI(callState: call.state)
AssertIsOnMainThread()
self.updateCallUI(callState: call.state)
}
// MARK: - Video
internal func updateLocalVideoTrack(localVideoTrack: RTCVideoTrack?) {
AssertIsOnMainThread()
if self.localVideoTrack == localVideoTrack {
return
}
self.localVideoTrack = localVideoTrack
var source: RTCAVFoundationVideoSource?
if localVideoTrack?.source is RTCAVFoundationVideoSource {
source = localVideoTrack?.source as! RTCAVFoundationVideoSource
}
localVideoView.captureSession = source?.captureSession
let isHidden = source == nil
Logger.info("\(TAG) \(#function) isHidden: \(isHidden)")
localVideoView.isHidden = source == nil
updateVideoViewLayout()
}
internal func updateRemoteVideoTrack(remoteVideoTrack: RTCVideoTrack?) {
AssertIsOnMainThread()
if self.remoteVideoTrack == remoteVideoTrack {
return
}
self.remoteVideoTrack?.remove(remoteVideoView)
self.remoteVideoTrack = nil
remoteVideoView.renderFrame(nil)
self.remoteVideoTrack = remoteVideoTrack
self.remoteVideoTrack?.add(remoteVideoView)
// TODO: We need to figure out how to observe start/stop of remote video.
updateVideoViewLayout()
}
// MARK: - CallServiceObserver
internal func didUpdateVideoTracks(localVideoTrack: RTCVideoTrack?,
remoteVideoTrack: RTCVideoTrack?) {
AssertIsOnMainThread()
updateLocalVideoTrack(localVideoTrack:localVideoTrack)
updateRemoteVideoTrack(remoteVideoTrack:remoteVideoTrack)
}
// MARK: - RTCEAGLVideoViewDelegate
internal func videoView(_ videoView: RTCEAGLVideoView, didChangeVideoSize size: CGSize) {
AssertIsOnMainThread()
if videoView != remoteVideoView {
return
}
Logger.info("\(TAG) \(#function): \(size)")
remoteVideoSize = size
updateVideoViewLayout()
}
}