mirror of
https://github.com/oxen-io/session-ios.git
synced 2023-12-13 21:30:14 +01:00
Applied theming logic to the ConversationTitleView, blocked banner Removed a few redundant modals (replaced them with the "Confirmation Modal") Removed some duplicate code Fixed an issue where a synchronous start/stop behaviour was running on the main thread causing some UI blocking Fixed an issue where the minimised call view could be covered by presenting view controllers
97 lines
3.9 KiB
Swift
97 lines
3.9 KiB
Swift
import Foundation
|
|
import AVFoundation
|
|
import SessionUtilitiesKit
|
|
|
|
@objc
|
|
protocol CameraManagerDelegate : AnyObject {
|
|
|
|
func handleVideoOutputCaptured(sampleBuffer: CMSampleBuffer)
|
|
}
|
|
|
|
final class CameraManager : NSObject {
|
|
private let captureSession = AVCaptureSession()
|
|
private let videoDataOutput = AVCaptureVideoDataOutput()
|
|
private let videoDataOutputQueue
|
|
= DispatchQueue(label: "CameraManager.videoDataOutputQueue", qos: .userInitiated, attributes: [], autoreleaseFrequency: .workItem)
|
|
private let audioDataOutput = AVCaptureAudioDataOutput()
|
|
private var isCapturing = false
|
|
weak var delegate: CameraManagerDelegate?
|
|
|
|
private var videoCaptureDevice: AVCaptureDevice?
|
|
private var videoInput: AVCaptureDeviceInput?
|
|
|
|
func prepare() {
|
|
print("[Calls] Preparing camera.")
|
|
addNewVideoIO(position: .front)
|
|
}
|
|
|
|
private func addNewVideoIO(position: AVCaptureDevice.Position) {
|
|
if let videoCaptureDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position),
|
|
let videoInput = try? AVCaptureDeviceInput(device: videoCaptureDevice), captureSession.canAddInput(videoInput) {
|
|
captureSession.addInput(videoInput)
|
|
self.videoCaptureDevice = videoCaptureDevice
|
|
self.videoInput = videoInput
|
|
}
|
|
if captureSession.canAddOutput(videoDataOutput) {
|
|
captureSession.addOutput(videoDataOutput)
|
|
videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String : Int(kCVPixelFormatType_32BGRA) ]
|
|
videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
|
|
guard let connection = videoDataOutput.connection(with: AVMediaType.video) else { return }
|
|
connection.videoOrientation = .portrait
|
|
connection.automaticallyAdjustsVideoMirroring = false
|
|
connection.isVideoMirrored = (position == .front)
|
|
} else {
|
|
SNLog("Couldn't add video data output to capture session.")
|
|
}
|
|
}
|
|
|
|
func start() {
|
|
guard !isCapturing else { return }
|
|
|
|
// Note: The 'startRunning' task is blocking so we want to do it on a non-main thread
|
|
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
|
|
print("[Calls] Starting camera.")
|
|
self?.isCapturing = true
|
|
self?.captureSession.startRunning()
|
|
}
|
|
}
|
|
|
|
func stop() {
|
|
guard isCapturing else { return }
|
|
|
|
// Note: The 'stopRunning' task is blocking so we want to do it on a non-main thread
|
|
DispatchQueue.global(qos: .userInitiated).async { [weak self] in
|
|
print("[Calls] Stopping camera.")
|
|
self?.isCapturing = false
|
|
self?.captureSession.stopRunning()
|
|
}
|
|
}
|
|
|
|
func switchCamera() {
|
|
guard let videoCaptureDevice = videoCaptureDevice, let videoInput = videoInput else { return }
|
|
stop()
|
|
if videoCaptureDevice.position == .front {
|
|
captureSession.removeInput(videoInput)
|
|
captureSession.removeOutput(videoDataOutput)
|
|
addNewVideoIO(position: .back)
|
|
} else {
|
|
captureSession.removeInput(videoInput)
|
|
captureSession.removeOutput(videoDataOutput)
|
|
addNewVideoIO(position: .front)
|
|
}
|
|
start()
|
|
}
|
|
}
|
|
|
|
extension CameraManager : AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
|
|
|
|
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
|
guard connection == videoDataOutput.connection(with: .video) else { return }
|
|
delegate?.handleVideoOutputCaptured(sampleBuffer: sampleBuffer)
|
|
}
|
|
|
|
func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
|
|
print("[Calls] Frame dropped.")
|
|
}
|
|
}
|