Implement VideoCallVC & CameraManager
This commit is contained in:
parent
b513eeb898
commit
170da7a276
3
Podfile
3
Podfile
|
@ -26,12 +26,14 @@ target 'SessionShareExtension' do
|
|||
pod 'PromiseKit', :inhibit_warnings => true
|
||||
pod 'PureLayout', '~> 3.1.8', :inhibit_warnings => true
|
||||
pod 'SignalCoreKit', git: 'https://github.com/signalapp/SignalCoreKit.git', :inhibit_warnings => true
|
||||
pod 'WebRTC', '~> 63.11', :inhibit_warnings => true
|
||||
pod 'YapDatabase/SQLCipher', :git => 'https://github.com/loki-project/session-ios-yap-database.git', branch: 'signal-release', :inhibit_warnings => true
|
||||
end
|
||||
|
||||
target 'SessionNotificationServiceExtension' do
|
||||
pod 'Curve25519Kit', git: 'https://github.com/signalapp/Curve25519Kit.git', :inhibit_warnings => true
|
||||
pod 'SignalCoreKit', git: 'https://github.com/signalapp/SignalCoreKit.git', :inhibit_warnings => true
|
||||
pod 'WebRTC', '~> 63.11', :inhibit_warnings => true
|
||||
pod 'YapDatabase/SQLCipher', :git => 'https://github.com/loki-project/session-ios-yap-database.git', branch: 'signal-release', :inhibit_warnings => true
|
||||
end
|
||||
|
||||
|
@ -49,6 +51,7 @@ target 'SignalUtilitiesKit' do
|
|||
pod 'SAMKeychain', :inhibit_warnings => true
|
||||
pod 'SignalCoreKit', git: 'https://github.com/signalapp/SignalCoreKit.git', :inhibit_warnings => true
|
||||
pod 'SwiftProtobuf', '~> 1.5.0', :inhibit_warnings => true
|
||||
pod 'WebRTC', '~> 63.11', :inhibit_warnings => true
|
||||
pod 'YapDatabase/SQLCipher', :git => 'https://github.com/loki-project/session-ios-yap-database.git', branch: 'signal-release', :inhibit_warnings => true
|
||||
pod 'YYImage', git: 'https://github.com/signalapp/YYImage', :inhibit_warnings => true
|
||||
end
|
||||
|
|
|
@ -212,6 +212,6 @@ SPEC CHECKSUMS:
|
|||
YYImage: 6db68da66f20d9f169ceb94dfb9947c3867b9665
|
||||
ZXingObjC: fdbb269f25dd2032da343e06f10224d62f537bdb
|
||||
|
||||
PODFILE CHECKSUM: 15bcb2aeee31dc86a3a9febc85208ba890b0dddf
|
||||
PODFILE CHECKSUM: e94e0a63e3b5609dad5b74fbb8e1266ccce2f011
|
||||
|
||||
COCOAPODS: 1.10.1
|
||||
|
|
|
@ -247,6 +247,8 @@
|
|||
B8B32033258B235D0020074B /* Storage+Contacts.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8B32032258B235D0020074B /* Storage+Contacts.swift */; };
|
||||
B8B3204E258C15C80020074B /* ContactsMigration.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8B32044258C117C0020074B /* ContactsMigration.swift */; };
|
||||
B8B320B7258C30D70020074B /* HTMLMetadata.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8B320B6258C30D70020074B /* HTMLMetadata.swift */; };
|
||||
B8B558EF26C4B56C00693325 /* VideoCallVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8B558EE26C4B56C00693325 /* VideoCallVC.swift */; };
|
||||
B8B558F126C4BB0600693325 /* CameraManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8B558F026C4BB0600693325 /* CameraManager.swift */; };
|
||||
B8BB82A5238F627000BA5194 /* HomeVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BB82A4238F627000BA5194 /* HomeVC.swift */; };
|
||||
B8BC00C0257D90E30032E807 /* General.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8BC00BF257D90E30032E807 /* General.swift */; };
|
||||
B8C2B2C82563685C00551B4D /* CircleView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B8C2B2C72563685C00551B4D /* CircleView.swift */; };
|
||||
|
@ -1215,6 +1217,8 @@
|
|||
B8B32032258B235D0020074B /* Storage+Contacts.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Storage+Contacts.swift"; sourceTree = "<group>"; };
|
||||
B8B32044258C117C0020074B /* ContactsMigration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContactsMigration.swift; sourceTree = "<group>"; };
|
||||
B8B320B6258C30D70020074B /* HTMLMetadata.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = HTMLMetadata.swift; sourceTree = "<group>"; };
|
||||
B8B558EE26C4B56C00693325 /* VideoCallVC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoCallVC.swift; sourceTree = "<group>"; };
|
||||
B8B558F026C4BB0600693325 /* CameraManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraManager.swift; sourceTree = "<group>"; };
|
||||
B8B5BCEB2394D869003823C9 /* Button.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Button.swift; sourceTree = "<group>"; };
|
||||
B8BAC75B2695645400EA1759 /* hr */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = hr; path = hr.lproj/Localizable.strings; sourceTree = "<group>"; };
|
||||
B8BAC75C2695648500EA1759 /* sv */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = sv; path = sv.lproj/Localizable.strings; sourceTree = "<group>"; };
|
||||
|
@ -2312,6 +2316,15 @@
|
|||
path = Contacts;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B8B558ED26C4B55F00693325 /* Calls */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B8B558EE26C4B56C00693325 /* VideoCallVC.swift */,
|
||||
B8B558F026C4BB0600693325 /* CameraManager.swift */,
|
||||
);
|
||||
path = Calls;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B8CCF63B239757C10091D419 /* Shared */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
|
@ -3533,6 +3546,7 @@
|
|||
children = (
|
||||
C3F0A58F255C8E3D007BE2A3 /* Meta */,
|
||||
C36096BC25AD1C3E008B62B2 /* Backups */,
|
||||
B8B558ED26C4B55F00693325 /* Calls */,
|
||||
C360969C25AD18BA008B62B2 /* Closed Groups */,
|
||||
B835246C25C38AA20089A44F /* Conversations */,
|
||||
C32B405424A961E1001117B5 /* Dependencies */,
|
||||
|
@ -4872,6 +4886,7 @@
|
|||
C328254925CA60E60062D0A7 /* ContextMenuVC+Action.swift in Sources */,
|
||||
4542DF54208D40AC007B4E76 /* LoadingViewController.swift in Sources */,
|
||||
34D5CCA91EAE3D30005515DB /* AvatarViewHelper.m in Sources */,
|
||||
B8B558F126C4BB0600693325 /* CameraManager.swift in Sources */,
|
||||
B8F5F71A25F1B35C003BF8D4 /* MediaPlaceholderView.swift in Sources */,
|
||||
341341EF2187467A00192D59 /* ConversationViewModel.m in Sources */,
|
||||
4C21D5D8223AC60F00EF8A77 /* PhotoCapture.swift in Sources */,
|
||||
|
@ -4922,6 +4937,7 @@
|
|||
B8269D3D25C7B34D00488AB4 /* InputTextView.swift in Sources */,
|
||||
76EB054018170B33006006FC /* AppDelegate.m in Sources */,
|
||||
340FC8B6204DAC8D007AEB0F /* OWSQRCodeScanningViewController.m in Sources */,
|
||||
B8B558EF26C4B56C00693325 /* VideoCallVC.swift in Sources */,
|
||||
C33100082558FF6D00070591 /* NewConversationButtonSet.swift in Sources */,
|
||||
C3AAFFF225AE99710089E6DD /* AppDelegate.swift in Sources */,
|
||||
B8BB82A5238F627000BA5194 /* HomeVC.swift in Sources */,
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
import Foundation
|
||||
import AVFoundation
|
||||
|
||||
@objc
|
||||
protocol CameraCaptureDelegate : AnyObject {
|
||||
|
||||
func captureVideoOutput(sampleBuffer: CMSampleBuffer)
|
||||
}
|
||||
|
||||
final class CameraManager : NSObject {
|
||||
private let captureSession = AVCaptureSession()
|
||||
private let videoDataOutput = AVCaptureVideoDataOutput()
|
||||
private let audioDataOutput = AVCaptureAudioDataOutput()
|
||||
private let dataOutputQueue = DispatchQueue(label: "CameraManager.dataOutputQueue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
|
||||
private var isCapturing = false
|
||||
weak var delegate: CameraCaptureDelegate?
|
||||
|
||||
private lazy var videoCaptureDevice: AVCaptureDevice? = {
|
||||
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
|
||||
}()
|
||||
|
||||
static let shared = CameraManager()
|
||||
|
||||
private override init() { }
|
||||
|
||||
func prepare() {
|
||||
if let videoCaptureDevice = videoCaptureDevice,
|
||||
let videoInput = try? AVCaptureDeviceInput(device: videoCaptureDevice), captureSession.canAddInput(videoInput) {
|
||||
captureSession.addInput(videoInput)
|
||||
}
|
||||
if captureSession.canAddOutput(videoDataOutput) {
|
||||
captureSession.addOutput(videoDataOutput)
|
||||
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
|
||||
videoDataOutput.setSampleBufferDelegate(self, queue: dataOutputQueue)
|
||||
videoDataOutput.connection(with: .video)?.videoOrientation = .portrait
|
||||
videoDataOutput.connection(with: .video)?.automaticallyAdjustsVideoMirroring = false
|
||||
videoDataOutput.connection(with: .video)?.isVideoMirrored = true
|
||||
} else {
|
||||
SNLog("Couldn't add video data output to capture session.")
|
||||
captureSession.commitConfiguration()
|
||||
}
|
||||
}
|
||||
|
||||
func start() {
|
||||
guard !isCapturing else { return }
|
||||
isCapturing = true
|
||||
#if arch(arm64)
|
||||
captureSession.startRunning()
|
||||
#endif
|
||||
}
|
||||
|
||||
func stop() {
|
||||
guard isCapturing else { return }
|
||||
isCapturing = false
|
||||
#if arch(arm64)
|
||||
captureSession.stopRunning()
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
extension CameraManager : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
||||
|
||||
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
||||
guard connection == videoDataOutput.connection(with: .video) else { return }
|
||||
delegate?.captureVideoOutput(sampleBuffer: sampleBuffer)
|
||||
}
|
||||
|
||||
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { }
|
||||
}
|
|
@ -0,0 +1,58 @@
|
|||
import UIKit
|
||||
import AVFoundation
|
||||
import WebRTC
|
||||
|
||||
class VideoCallVC : UIViewController {
|
||||
private var localVideoView: UIView!
|
||||
private var remoteVideoView: UIView!
|
||||
|
||||
override func viewDidLoad() {
|
||||
super.viewDidLoad()
|
||||
setUpViewHierarchy()
|
||||
CameraManager.shared.delegate = self
|
||||
}
|
||||
|
||||
private func setUpViewHierarchy() {
|
||||
// Create video views
|
||||
#if arch(arm64)
|
||||
// Use Metal
|
||||
let localRenderer = RTCMTLVideoView(frame: self.localVideoView.frame)
|
||||
localRenderer.contentMode = .scaleAspectFill
|
||||
let remoteRenderer = RTCMTLVideoView(frame: self.remoteVideoView.frame)
|
||||
remoteRenderer.contentMode = .scaleAspectFill
|
||||
#else
|
||||
// Use OpenGLES
|
||||
let localRenderer = RTCEAGLVideoView(frame: self.localVideoView.frame)
|
||||
let remoteRenderer = RTCEAGLVideoView(frame: self.remoteVideoView.frame)
|
||||
#endif
|
||||
// Set up stack view
|
||||
let stackView = UIStackView(arrangedSubviews: [ localVideoView, remoteVideoView ])
|
||||
stackView.axis = .vertical
|
||||
stackView.distribution = .fillEqually
|
||||
stackView.alignment = .fill
|
||||
view.addSubview(stackView)
|
||||
stackView.translatesAutoresizingMaskIntoConstraints = false
|
||||
stackView.pin(to: view)
|
||||
// Attach video views
|
||||
CallManager.shared.attachLocalRenderer(localRenderer)
|
||||
CallManager.shared.attachRemoteRenderer(remoteRenderer)
|
||||
localVideoView.addSubview(localRenderer)
|
||||
localRenderer.translatesAutoresizingMaskIntoConstraints = false
|
||||
localRenderer.pin(to: localVideoView)
|
||||
remoteVideoView.addSubview(remoteRenderer)
|
||||
remoteRenderer.translatesAutoresizingMaskIntoConstraints = false
|
||||
remoteRenderer.pin(to: remoteVideoView)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: Camera
|
||||
extension VideoCallVC : CameraCaptureDelegate {
|
||||
|
||||
func captureVideoOutput(sampleBuffer: CMSampleBuffer) {
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
|
||||
let rtcpixelBuffer = RTCCVPixelBuffer(pixelBuffer: pixelBuffer)
|
||||
let timeStampNs = Int64(CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000)
|
||||
let videoFrame = RTCVideoFrame(buffer: rtcpixelBuffer, rotation: RTCVideoRotation._0, timeStampNs: timeStampNs)
|
||||
CallManager.shared.handleLocalFrameCaptured(videoFrame)
|
||||
}
|
||||
}
|
|
@ -2,15 +2,15 @@ import WebRTC
|
|||
|
||||
extension CallManager {
|
||||
|
||||
func attachLocalRenderer(_ renderer: RTCVideoRenderer) {
|
||||
public func attachLocalRenderer(_ renderer: RTCVideoRenderer) {
|
||||
localVideoTrack.add(renderer)
|
||||
}
|
||||
|
||||
func attachRemoteRenderer(_ renderer: RTCVideoRenderer) {
|
||||
public func attachRemoteRenderer(_ renderer: RTCVideoRenderer) {
|
||||
remoteVideoTrack?.add(renderer)
|
||||
}
|
||||
|
||||
func handleLocalFrameCaptured(_ videoFrame: RTCVideoFrame) {
|
||||
public func handleLocalFrameCaptured(_ videoFrame: RTCVideoFrame) {
|
||||
localVideoSource.capturer(videoCapturer, didCapture: videoFrame)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
//
|
||||
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
|
||||
//
|
||||
|
||||
import PromiseKit
|
||||
|
||||
enum MockTURNSserver {
|
||||
|
|
Loading…
Reference in New Issue