Photo/Movie Capture

This commit is contained in:
Michael Kirk 2019-03-07 21:05:58 -08:00
parent 95b11ddf8c
commit 284357137f
31 changed files with 1536 additions and 18 deletions

View File

@ -476,6 +476,7 @@
4C20B2B720CA0034001BAC90 /* ThreadViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4542DF51208B82E9007B4E76 /* ThreadViewModel.swift */; };
4C20B2B920CA10DE001BAC90 /* ConversationSearchViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C20B2B820CA10DE001BAC90 /* ConversationSearchViewController.swift */; };
4C21D5D6223A9DC500EF8A77 /* UIAlerts+iOS9.m in Sources */ = {isa = PBXBuildFile; fileRef = 4C21D5D5223A9DC500EF8A77 /* UIAlerts+iOS9.m */; };
4C21D5D8223AC60F00EF8A77 /* PhotoCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C21D5D7223AC60F00EF8A77 /* PhotoCapture.swift */; };
4C23A5F2215C4ADE00534937 /* SheetViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C23A5F1215C4ADE00534937 /* SheetViewController.swift */; };
4C2F454F214C00E1004871FF /* AvatarTableViewCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4C2F454E214C00E1004871FF /* AvatarTableViewCell.swift */; };
4C3E245C21F29FCE000AE092 /* Toast.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CA5F792211E1F06008C2708 /* Toast.swift */; };
@ -496,6 +497,7 @@
4C9CA25D217E676900607C63 /* ZXingObjC.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 4C9CA25C217E676900607C63 /* ZXingObjC.framework */; };
4CA46F4C219CCC630038ABDE /* CaptionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CA46F4B219CCC630038ABDE /* CaptionView.swift */; };
4CA46F4D219CFDAA0038ABDE /* GalleryRailView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CA46F49219C78050038ABDE /* GalleryRailView.swift */; };
4CA485BB2232339F004B9E7D /* PhotoCaptureViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CA485BA2232339F004B9E7D /* PhotoCaptureViewController.swift */; };
4CB5F26720F6E1E2004D1B42 /* MenuActionsViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CFF4C0920F55BBA005DA313 /* MenuActionsViewController.swift */; };
4CB5F26920F7D060004D1B42 /* MessageActions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CB5F26820F7D060004D1B42 /* MessageActions.swift */; };
4CB93DC22180FF07004B9764 /* ProximityMonitoringManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4CB93DC12180FF07004B9764 /* ProximityMonitoringManager.swift */; };
@ -1225,6 +1227,7 @@
4C1D233B218B6D3100A0598F /* tr */ = {isa = PBXFileReference; lastKnownFileType = text.plist.strings; name = tr; path = translations/tr.lproj/Localizable.strings; sourceTree = "<group>"; };
4C20B2B820CA10DE001BAC90 /* ConversationSearchViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConversationSearchViewController.swift; sourceTree = "<group>"; };
4C21D5D5223A9DC500EF8A77 /* UIAlerts+iOS9.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "UIAlerts+iOS9.m"; sourceTree = "<group>"; };
4C21D5D7223AC60F00EF8A77 /* PhotoCapture.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoCapture.swift; sourceTree = "<group>"; };
4C23A5F1215C4ADE00534937 /* SheetViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SheetViewController.swift; sourceTree = "<group>"; };
4C2F454E214C00E1004871FF /* AvatarTableViewCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AvatarTableViewCell.swift; sourceTree = "<group>"; };
4C3EF7FC2107DDEE0007EBF7 /* ParamParserTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParamParserTest.swift; sourceTree = "<group>"; };
@ -1243,6 +1246,7 @@
4C9CA25C217E676900607C63 /* ZXingObjC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ZXingObjC.framework; path = ThirdParty/Carthage/Build/iOS/ZXingObjC.framework; sourceTree = "<group>"; };
4CA46F49219C78050038ABDE /* GalleryRailView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GalleryRailView.swift; sourceTree = "<group>"; };
4CA46F4B219CCC630038ABDE /* CaptionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CaptionView.swift; sourceTree = "<group>"; };
4CA485BA2232339F004B9E7D /* PhotoCaptureViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PhotoCaptureViewController.swift; sourceTree = "<group>"; };
4CA5F792211E1F06008C2708 /* Toast.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Toast.swift; sourceTree = "<group>"; };
4CB5F26820F7D060004D1B42 /* MessageActions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MessageActions.swift; sourceTree = "<group>"; };
4CB93DC12180FF07004B9764 /* ProximityMonitoringManager.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProximityMonitoringManager.swift; sourceTree = "<group>"; };
@ -1847,14 +1851,16 @@
path = mocks;
sourceTree = "<group>";
};
34969558219B605E00DCFE74 /* PhotoLibrary */ = {
34969558219B605E00DCFE74 /* Photos */ = {
isa = PBXGroup;
children = (
34969559219B605E00DCFE74 /* ImagePickerController.swift */,
3496955A219B605E00DCFE74 /* PhotoCollectionPickerController.swift */,
3496955B219B605E00DCFE74 /* PhotoLibrary.swift */,
4CA485BA2232339F004B9E7D /* PhotoCaptureViewController.swift */,
4C21D5D7223AC60F00EF8A77 /* PhotoCapture.swift */,
);
path = PhotoLibrary;
path = Photos;
sourceTree = "<group>";
};
3496956121A301A100DCFE74 /* Backup */ = {
@ -1912,7 +1918,7 @@
345BC30A2047030600257B7C /* OWS2FASettingsViewController.h */,
345BC30B2047030600257B7C /* OWS2FASettingsViewController.m */,
34A6C27F21E503E600B5B12E /* OWSImagePickerController.swift */,
34969558219B605E00DCFE74 /* PhotoLibrary */,
34969558219B605E00DCFE74 /* Photos */,
34CE88E51F2FB9A10098030F /* ProfileViewController.h */,
34CE88E61F2FB9A10098030F /* ProfileViewController.m */,
340FC875204DAC8C007AEB0F /* Registration */,
@ -3615,6 +3621,7 @@
4556FA681F54AA9500AF40DD /* DebugUIProfile.swift in Sources */,
45A6DAD61EBBF85500893231 /* ReminderView.swift in Sources */,
34D1F0881F8678AA0066283D /* ConversationViewLayout.m in Sources */,
4CA485BB2232339F004B9E7D /* PhotoCaptureViewController.swift in Sources */,
3448E16422135FFA004B052E /* OnboardingPhoneNumberViewController.swift in Sources */,
34330AA31E79686200DF2FB9 /* OWSProgressView.m in Sources */,
344825C6211390C800DB4BD8 /* OWSOrphanDataCleaner.m in Sources */,
@ -3628,6 +3635,7 @@
348BB25D20A0C5530047AEC2 /* ContactShareViewHelper.swift in Sources */,
34B3F8801E8DF1700035BE1A /* InviteFlow.swift in Sources */,
457C87B82032645C008D52D6 /* DebugUINotifications.swift in Sources */,
4C21D5D8223AC60F00EF8A77 /* PhotoCapture.swift in Sources */,
4C13C9F620E57BA30089A98B /* ColorPickerViewController.swift in Sources */,
4CC1ECFB211A553000CC13BE /* AppUpdateNag.swift in Sources */,
3448E16022134C89004B052E /* OnboardingSplashViewController.swift in Sources */,

View File

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "flash-auto-32@1x.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "flash-auto-32@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "flash-auto-32@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 585 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

View File

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "flash-off-32@1x.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "flash-off-32@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "flash-off-32@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 537 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "flash-on-32@1x.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "flash-on-32@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "flash-on-32@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 445 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 840 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "switch-camera-32@1x.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "switch-camera-32@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "switch-camera-32@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 500 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 954 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,23 @@
{
"images" : [
{
"idiom" : "universal",
"filename" : "x-24@1x.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "x-24@2x.png",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "x-24@3x.png",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 243 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 398 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 573 B

View File

@ -153,6 +153,7 @@ typedef enum : NSUInteger {
UIDocumentPickerDelegate,
UIImagePickerControllerDelegate,
OWSImagePickerControllerDelegate,
OWSPhotoCaptureViewControllerDelegate,
UINavigationControllerDelegate,
UITextViewDelegate,
ConversationCollectionViewDelegate,
@ -2781,6 +2782,24 @@ typedef enum : NSUInteger {
[self showApprovalDialogForAttachment:attachment];
}
#pragma mark - OWSPhotoCaptureViewControllerDelegate
- (void)photoCaptureViewController:(OWSPhotoCaptureViewController *)photoCaptureViewController
didFinishProcessingAttachment:(SignalAttachment *)attachment
{
OWSLogDebug(@"");
[self dismissViewControllerAnimated:YES
completion:^{
[self showApprovalDialogForAttachment:attachment];
}];
}
- (void)photoCaptureViewControllerDidCancel:(OWSPhotoCaptureViewController *)photoCaptureViewController
{
OWSLogDebug(@"");
[self dismissViewControllerAnimated:YES completion:nil];
}
#pragma mark - UIImagePickerController
/*
@ -2788,20 +2807,48 @@ typedef enum : NSUInteger {
*/
- (void)takePictureOrVideo
{
[self ows_askForCameraPermissions:^(BOOL granted) {
if (!granted) {
[self ows_askForCameraPermissions:^(BOOL cameraGranted) {
if (!cameraGranted) {
OWSLogWarn(@"camera permission denied.");
return;
}
[self ows_askForMicrophonePermissions:^(BOOL micGranted) {
if (!micGranted) {
OWSLogWarn(@"proceeding, though mic permission denied.");
// We can still continue without mic permissions, but any captured video will
// be silent.
}
UIImagePickerController *picker = [OWSImagePickerController new];
picker.sourceType = UIImagePickerControllerSourceTypeCamera;
picker.mediaTypes = @[ (__bridge NSString *)kUTTypeImage, (__bridge NSString *)kUTTypeMovie ];
picker.allowsEditing = NO;
picker.delegate = self;
[self dismissKeyBoard];
[self presentViewController:picker animated:YES completion:nil];
UIViewController *pickerModal;
if (SSKFeatureFlags.useCustomPhotoCapture) {
OWSPhotoCaptureViewController *captureVC = [OWSPhotoCaptureViewController new];
captureVC.delegate = self;
OWSNavigationController *navController =
[[OWSNavigationController alloc] initWithRootViewController:captureVC];
UINavigationBar *navigationBar = navController.navigationBar;
if (![navigationBar isKindOfClass:[OWSNavigationBar class]]) {
OWSFailDebug(@"navigationBar was nil or unexpected class");
} else {
OWSNavigationBar *owsNavigationBar = (OWSNavigationBar *)navigationBar;
[owsNavigationBar overrideThemeWithType:NavigationBarThemeOverrideClear];
}
navController.ows_prefersStatusBarHidden = @(YES);
pickerModal = navController;
} else {
UIImagePickerController *picker = [OWSImagePickerController new];
pickerModal = picker;
picker.sourceType = UIImagePickerControllerSourceTypeCamera;
picker.mediaTypes = @[ (__bridge NSString *)kUTTypeImage, (__bridge NSString *)kUTTypeMovie ];
picker.allowsEditing = NO;
picker.delegate = self;
}
OWSAssertDebug(pickerModal);
[self dismissKeyBoard];
[self presentViewController:pickerModal animated:YES completion:nil];
}];
}];
}

View File

@ -0,0 +1,673 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import Foundation
import PromiseKit
protocol PhotoCaptureDelegate: AnyObject {
func photoCapture(_ photoCapture: PhotoCapture, didFinishProcessingAttachment attachment: SignalAttachment)
func photoCapture(_ photoCapture: PhotoCapture, processingDidError error: Error)
func photoCaptureDidBeginVideo(_ photoCapture: PhotoCapture)
func photoCaptureDidCompleteVideo(_ photoCapture: PhotoCapture)
func photoCaptureDidCancelVideo(_ photoCapture: PhotoCapture)
var zoomScaleReferenceHeight: CGFloat? { get }
var captureOrientation: AVCaptureVideoOrientation { get }
}
class PhotoCapture: NSObject {
weak var delegate: PhotoCaptureDelegate?
var flashMode: AVCaptureDevice.FlashMode {
return captureOutput.flashMode
}
let session: AVCaptureSession
let sessionQueue = DispatchQueue(label: "PhotoCapture.sessionQueue")
private var currentCaptureInput: AVCaptureDeviceInput?
private let captureOutput: CaptureOutput
var captureDevice: AVCaptureDevice? {
return currentCaptureInput?.device
}
private(set) var desiredPosition: AVCaptureDevice.Position = .back
override init() {
self.session = AVCaptureSession()
self.captureOutput = CaptureOutput()
}
func startCapture() -> Promise<Void> {
return sessionQueue.async(.promise) { [weak self] in
guard let self = self else { return }
self.session.beginConfiguration()
defer { self.session.commitConfiguration() }
try self.updateCurrentInput(position: .back)
let audioDevice = AVCaptureDevice.default(for: .audio)
// verify works without audio permissions
let audioDeviceInput = try AVCaptureDeviceInput(device: audioDevice!)
if self.session.canAddInput(audioDeviceInput) {
self.session.addInput(audioDeviceInput)
} else {
owsFailDebug("Could not add audio device input to the session")
}
guard let photoOutput = self.captureOutput.photoOutput else {
throw PhotoCaptureError.initializationFailed
}
guard self.session.canAddOutput(photoOutput) else {
throw PhotoCaptureError.initializationFailed
}
if let connection = photoOutput.connection(with: .video) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
self.session.addOutput(photoOutput)
let movieOutput = self.captureOutput.movieOutput
if self.session.canAddOutput(movieOutput) {
self.session.addOutput(movieOutput)
self.session.sessionPreset = .medium
if let connection = movieOutput.connection(with: .video) {
if connection.isVideoStabilizationSupported {
connection.preferredVideoStabilizationMode = .auto
}
}
}
}.done(on: sessionQueue) {
self.session.startRunning()
}
}
func stopCapture() -> Guarantee<Void> {
return sessionQueue.async(.promise) {
self.session.stopRunning()
}
}
func assertIsOnSessionQueue() {
assertOnQueue(sessionQueue)
}
func switchCamera() -> Promise<Void> {
AssertIsOnMainThread()
let newPosition: AVCaptureDevice.Position
switch desiredPosition {
case .front:
newPosition = .back
case .back:
newPosition = .front
case .unspecified:
newPosition = .front
}
desiredPosition = newPosition
return sessionQueue.async(.promise) { [weak self] in
guard let self = self else { return }
self.session.beginConfiguration()
defer { self.session.commitConfiguration() }
try self.updateCurrentInput(position: newPosition)
}
}
// This method should be called on the serial queue,
// and between calls to session.beginConfiguration/commitConfiguration
func updateCurrentInput(position: AVCaptureDevice.Position) throws {
assertIsOnSessionQueue()
guard let device = captureOutput.videoDevice(position: position) else {
throw PhotoCaptureError.assertionError(description: description)
}
let newInput = try AVCaptureDeviceInput(device: device)
if let oldInput = self.currentCaptureInput {
session.removeInput(oldInput)
NotificationCenter.default.removeObserver(self, name: .AVCaptureDeviceSubjectAreaDidChange, object: oldInput.device)
}
session.addInput(newInput)
NotificationCenter.default.addObserver(self, selector: #selector(subjectAreaDidChange), name: .AVCaptureDeviceSubjectAreaDidChange, object: newInput.device)
currentCaptureInput = newInput
resetFocusAndExposure()
}
func switchFlashMode() -> Guarantee<Void> {
return sessionQueue.async(.promise) {
switch self.captureOutput.flashMode {
case .auto:
Logger.debug("new flashMode: on")
self.captureOutput.flashMode = .on
case .on:
Logger.debug("new flashMode: off")
self.captureOutput.flashMode = .off
case .off:
Logger.debug("new flashMode: auto")
self.captureOutput.flashMode = .auto
}
}
}
func focus(with focusMode: AVCaptureDevice.FocusMode,
exposureMode: AVCaptureDevice.ExposureMode,
at devicePoint: CGPoint,
monitorSubjectAreaChange: Bool) {
sessionQueue.async {
guard let device = self.captureDevice else {
owsFailDebug("device was unexpectedly nil")
return
}
do {
try device.lockForConfiguration()
// Setting (focus/exposure)PointOfInterest alone does not initiate a (focus/exposure) operation.
// Call set(Focus/Exposure)Mode() to apply the new point of interest.
if device.isFocusPointOfInterestSupported && device.isFocusModeSupported(focusMode) {
device.focusPointOfInterest = devicePoint
device.focusMode = focusMode
}
if device.isExposurePointOfInterestSupported && device.isExposureModeSupported(exposureMode) {
device.exposurePointOfInterest = devicePoint
device.exposureMode = exposureMode
}
device.isSubjectAreaChangeMonitoringEnabled = monitorSubjectAreaChange
device.unlockForConfiguration()
} catch {
owsFailDebug("error: \(error)")
}
}
}
func resetFocusAndExposure() {
let devicePoint = CGPoint(x: 0.5, y: 0.5)
focus(with: .continuousAutoFocus, exposureMode: .continuousAutoExposure, at: devicePoint, monitorSubjectAreaChange: false)
}
@objc
func subjectAreaDidChange(notification: NSNotification) {
resetFocusAndExposure()
}
// MARK: - Zoom
let minimumZoom: CGFloat = 1.0
let maximumZoom: CGFloat = 3.0
var previousZoomFactor: CGFloat = 1.0
func updateZoom(alpha: CGFloat) {
assert(alpha >= 0 && alpha <= 1)
sessionQueue.async {
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
return
}
// we might want this to be non-linear
let scale = CGFloatLerp(self.minimumZoom, self.maximumZoom, alpha)
let zoomFactor = self.clampZoom(scale, device: captureDevice)
self.updateZoom(factor: zoomFactor)
}
}
func updateZoom(scaleFromPreviousZoomFactor scale: CGFloat) {
sessionQueue.async {
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
return
}
let zoomFactor = self.clampZoom(scale * self.previousZoomFactor, device: captureDevice)
self.updateZoom(factor: zoomFactor)
}
}
func completeZoom(scaleFromPreviousZoomFactor scale: CGFloat) {
sessionQueue.async {
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
return
}
let zoomFactor = self.clampZoom(scale * self.previousZoomFactor, device: captureDevice)
Logger.debug("ended with scaleFactor: \(zoomFactor)")
self.previousZoomFactor = zoomFactor
self.updateZoom(factor: zoomFactor)
}
}
private func updateZoom(factor: CGFloat) {
assertIsOnSessionQueue()
guard let captureDevice = self.captureDevice else {
owsFailDebug("captureDevice was unexpectedly nil")
return
}
do {
try captureDevice.lockForConfiguration()
captureDevice.videoZoomFactor = factor
captureDevice.unlockForConfiguration()
} catch {
owsFailDebug("error: \(error)")
}
}
private func clampZoom(_ factor: CGFloat, device: AVCaptureDevice) -> CGFloat {
return min(factor.clamp(minimumZoom, maximumZoom), device.activeFormat.videoMaxZoomFactor)
}
}
extension PhotoCapture: CaptureButtonDelegate {
// MARK: - Photo
func didTapCaptureButton(_ captureButton: CaptureButton) {
Logger.verbose("")
sessionQueue.async {
self.captureOutput.takePhoto(delegate: self)
}
}
// MARK: - Video
func didBeginLongPressCaptureButton(_ captureButton: CaptureButton) {
AssertIsOnMainThread()
Logger.verbose("")
sessionQueue.async {
self.captureOutput.beginVideo(delegate: self)
DispatchQueue.main.async {
self.delegate?.photoCaptureDidBeginVideo(self)
}
}
}
func didCompleteLongPressCaptureButton(_ captureButton: CaptureButton) {
Logger.verbose("")
sessionQueue.async {
self.captureOutput.completeVideo(delegate: self)
}
AssertIsOnMainThread()
// immediately inform UI that capture is stopping
delegate?.photoCaptureDidCompleteVideo(self)
}
func didCancelLongPressCaptureButton(_ captureButton: CaptureButton) {
Logger.verbose("")
AssertIsOnMainThread()
delegate?.photoCaptureDidCancelVideo(self)
}
var zoomScaleReferenceHeight: CGFloat? {
return delegate?.zoomScaleReferenceHeight
}
func longPressCaptureButton(_ captureButton: CaptureButton, didUpdateZoomAlpha zoomAlpha: CGFloat) {
Logger.verbose("zoomAlpha: \(zoomAlpha)")
updateZoom(alpha: zoomAlpha)
}
}
extension PhotoCapture: CaptureOutputDelegate {
var captureOrientation: AVCaptureVideoOrientation {
guard let delegate = delegate else { return .portrait }
return delegate.captureOrientation
}
// MARK: - Photo
func captureOutputDidFinishProcessing(photoData: Data?, error: Error?) {
Logger.verbose("")
AssertIsOnMainThread()
if let error = error {
delegate?.photoCapture(self, processingDidError: error)
return
}
guard let photoData = photoData else {
owsFailDebug("photoData was unexpectedly nil")
delegate?.photoCapture(self, processingDidError: PhotoCaptureError.captureFailed)
return
}
let dataSource = DataSourceValue.dataSource(with: photoData, utiType: kUTTypeJPEG as String)
let attachment = SignalAttachment.attachment(dataSource: dataSource, dataUTI: kUTTypeJPEG as String, imageQuality: .medium)
delegate?.photoCapture(self, didFinishProcessingAttachment: attachment)
}
// MARK: - Movie
func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
Logger.verbose("")
AssertIsOnMainThread()
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
Logger.verbose("")
AssertIsOnMainThread()
if let error = error {
delegate?.photoCapture(self, processingDidError: error)
return
}
let dataSource = DataSourcePath.dataSource(with: outputFileURL, shouldDeleteOnDeallocation: true)
let attachment = SignalAttachment.attachment(dataSource: dataSource, dataUTI: kUTTypeMPEG4 as String)
delegate?.photoCapture(self, didFinishProcessingAttachment: attachment)
}
}
// MARK: - Capture Adapter
protocol CaptureOutputDelegate: AVCaptureFileOutputRecordingDelegate {
var session: AVCaptureSession { get }
func assertIsOnSessionQueue()
func captureOutputDidFinishProcessing(photoData: Data?, error: Error?)
var captureOrientation: AVCaptureVideoOrientation { get }
}
protocol ImageCaptureOutput: AnyObject {
var avOutput: AVCaptureOutput { get }
var flashMode: AVCaptureDevice.FlashMode { get set }
func videoDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice?
func takePhoto(delegate: CaptureOutputDelegate)
}
class CaptureOutput {
let imageOutput: ImageCaptureOutput
let movieOutput: AVCaptureMovieFileOutput
init() {
if #available(iOS 10.0, *) {
imageOutput = PhotoCaptureOutputAdaptee()
} else {
imageOutput = StillImageCaptureOutput()
}
movieOutput = AVCaptureMovieFileOutput()
}
var photoOutput: AVCaptureOutput? {
return imageOutput.avOutput
}
var flashMode: AVCaptureDevice.FlashMode {
get { return imageOutput.flashMode }
set { imageOutput.flashMode = newValue }
}
func videoDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
return imageOutput.videoDevice(position: position)
}
func takePhoto(delegate: CaptureOutputDelegate) {
delegate.assertIsOnSessionQueue()
guard let photoOutput = photoOutput else {
owsFailDebug("photoOutput was unexpectedly nil")
return
}
guard let photoVideoConnection = photoOutput.connection(with: .video) else {
owsFailDebug("photoVideoConnection was unexpectedly nil")
return
}
let videoOrientation = delegate.captureOrientation
photoVideoConnection.videoOrientation = videoOrientation
Logger.verbose("videoOrientation: \(videoOrientation)")
return imageOutput.takePhoto(delegate: delegate)
}
// MARK: - Movie Output
func beginVideo(delegate: CaptureOutputDelegate) {
delegate.assertIsOnSessionQueue()
guard let videoConnection = movieOutput.connection(with: .video) else {
owsFailDebug("movieOutputConnection was unexpectedly nil")
return
}
let videoOrientation = delegate.captureOrientation
videoConnection.videoOrientation = videoOrientation
let outputFilePath = OWSFileSystem.temporaryFilePath(withFileExtension: "mp4")
movieOutput.startRecording(to: URL(fileURLWithPath: outputFilePath), recordingDelegate: delegate)
}
func completeVideo(delegate: CaptureOutputDelegate) {
delegate.assertIsOnSessionQueue()
movieOutput.stopRecording()
}
func cancelVideo(delegate: CaptureOutputDelegate) {
delegate.assertIsOnSessionQueue()
// There's currently no user-visible way to cancel, if so, we may need to do some cleanup here.
owsFailDebug("video was unexpectedly canceled.")
}
}
@available(iOS 10.0, *)
class PhotoCaptureOutputAdaptee: NSObject, ImageCaptureOutput {
let photoOutput = AVCapturePhotoOutput()
var avOutput: AVCaptureOutput {
return photoOutput
}
var flashMode: AVCaptureDevice.FlashMode = .off
override init() {
photoOutput.isLivePhotoCaptureEnabled = false
photoOutput.isHighResolutionCaptureEnabled = true
}
private var photoProcessors: [Int64: PhotoProcessor] = [:]
func takePhoto(delegate: CaptureOutputDelegate) {
delegate.assertIsOnSessionQueue()
let settings = buildCaptureSettings()
let photoProcessor = PhotoProcessor(delegate: delegate, completion: { [weak self] in
self?.photoProcessors[settings.uniqueID] = nil
})
photoProcessors[settings.uniqueID] = photoProcessor
photoOutput.capturePhoto(with: settings, delegate: photoProcessor)
}
func videoDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
// use dual camera where available
return AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position)
}
// MARK: -
private func buildCaptureSettings() -> AVCapturePhotoSettings {
let photoSettings = AVCapturePhotoSettings()
photoSettings.flashMode = flashMode
photoSettings.isAutoStillImageStabilizationEnabled =
photoOutput.isStillImageStabilizationSupported
return photoSettings
}
private class PhotoProcessor: NSObject, AVCapturePhotoCaptureDelegate {
weak var delegate: CaptureOutputDelegate?
let completion: () -> Void
init(delegate: CaptureOutputDelegate, completion: @escaping () -> Void) {
self.delegate = delegate
self.completion = completion
}
@available(iOS 11.0, *)
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
let data = photo.fileDataRepresentation()!
DispatchQueue.main.async {
self.delegate?.captureOutputDidFinishProcessing(photoData: data, error: error)
}
completion()
}
// for legacy (iOS10) devices
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?, previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {
if #available(iOS 11, *) {
owsFailDebug("unexpectedly calling legacy method.")
}
guard let photoSampleBuffer = photoSampleBuffer else {
owsFailDebug("sampleBuffer was unexpectedly nil")
return
}
let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(photoSampleBuffer)
DispatchQueue.main.async {
self.delegate?.captureOutputDidFinishProcessing(photoData: data, error: error)
}
completion()
}
}
}
class StillImageCaptureOutput: ImageCaptureOutput {
var flashMode: AVCaptureDevice.FlashMode = .off
let stillImageOutput = AVCaptureStillImageOutput()
var avOutput: AVCaptureOutput {
return stillImageOutput
}
init() {
stillImageOutput.isHighResolutionStillImageOutputEnabled = true
}
// MARK: -
func takePhoto(delegate: CaptureOutputDelegate) {
guard let videoConnection = stillImageOutput.connection(with: .video) else {
owsFailDebug("videoConnection was unexpectedly nil")
return
}
stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { [weak delegate] (sampleBuffer, error) in
guard let sampleBuffer = sampleBuffer else {
owsFailDebug("sampleBuffer was unexpectedly nil")
return
}
let data = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer)
DispatchQueue.main.async {
delegate?.captureOutputDidFinishProcessing(photoData: data, error: error)
}
}
}
func videoDevice(position: AVCaptureDevice.Position) -> AVCaptureDevice? {
let captureDevices = AVCaptureDevice.devices()
guard let device = (captureDevices.first { $0.hasMediaType(.video) && $0.position == position }) else {
Logger.debug("unable to find desired position: \(position)")
return captureDevices.first
}
return device
}
}
extension AVCaptureVideoOrientation {
init?(deviceOrientation: UIDeviceOrientation) {
switch deviceOrientation {
case .portrait: self = .portrait
case .portraitUpsideDown: self = .portraitUpsideDown
case .landscapeLeft: self = .landscapeRight
case .landscapeRight: self = .landscapeLeft
default: return nil
}
}
}
extension AVCaptureVideoOrientation: CustomStringConvertible {
public var description: String {
switch self {
case .portrait:
return "AVCaptureVideoOrientation.portrait"
case .portraitUpsideDown:
return "AVCaptureVideoOrientation.portraitUpsideDown"
case .landscapeRight:
return "AVCaptureVideoOrientation.landscapeRight"
case .landscapeLeft:
return "AVCaptureVideoOrientation.landscapeLeft"
}
}
}
extension UIDeviceOrientation: CustomStringConvertible {
public var description: String {
switch self {
case .unknown:
return "UIDeviceOrientation.unknown"
case .portrait:
return "UIDeviceOrientation.portrait"
case .portraitUpsideDown:
return "UIDeviceOrientation.portraitUpsideDown"
case .landscapeLeft:
return "UIDeviceOrientation.landscapeLeft"
case .landscapeRight:
return "UIDeviceOrientation.landscapeRight"
case .faceUp:
return "UIDeviceOrientation.faceUp"
case .faceDown:
return "UIDeviceOrientation.faceDown"
}
}
}
extension UIImageOrientation: CustomStringConvertible {
public var description: String {
switch self {
case .up:
return "UIImageOrientation.up"
case .down:
return "UIImageOrientation.down"
case .left:
return "UIImageOrientation.left"
case .right:
return "UIImageOrientation.right"
case .upMirrored:
return "UIImageOrientation.upMirrored"
case .downMirrored:
return "UIImageOrientation.downMirrored"
case .leftMirrored:
return "UIImageOrientation.leftMirrored"
case .rightMirrored:
return "UIImageOrientation.rightMirrored"
}
}
}

View File

@ -0,0 +1,663 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import Foundation
import AVFoundation
import PromiseKit
@objc(OWSPhotoCaptureViewControllerDelegate)
protocol PhotoCaptureViewControllerDelegate: AnyObject {
func photoCaptureViewController(_ photoCaptureViewController: PhotoCaptureViewController, didFinishProcessingAttachment attachment: SignalAttachment)
func photoCaptureViewControllerDidCancel(_ photoCaptureViewController: PhotoCaptureViewController)
}
enum PhotoCaptureError: Error {
case assertionError(description: String)
case initializationFailed
case captureFailed
}
extension PhotoCaptureError: LocalizedError {
var localizedDescription: String {
switch self {
case .initializationFailed:
return NSLocalizedString("PHOTO_CAPTURE_UNABLE_TO_INITIALIZE_CAMERA", comment: "alert title")
case .captureFailed:
return NSLocalizedString("PHOTO_CAPTURE_UNABLE_TO_CAPTURE_IMAGE", comment: "alert title")
case .assertionError:
return NSLocalizedString("PHOTO_CAPTURE_GENERIC_ERROR", comment: "alert title, generic error preventing user from capturing a photo")
}
}
}
@objc(OWSPhotoCaptureViewController)
class PhotoCaptureViewController: OWSViewController {
@objc
weak var delegate: PhotoCaptureViewControllerDelegate?
private var photoCapture: PhotoCapture!
deinit {
UIDevice.current.endGeneratingDeviceOrientationNotifications()
if let photoCapture = photoCapture {
photoCapture.stopCapture().done {
Logger.debug("stopCapture completed")
}.retainUntilComplete()
}
}
// MARK: - Dependencies
var audioActivity: AudioActivity?
var audioSession: OWSAudioSession {
return Environment.shared.audioSession
}
// MARK: - Overrides
override func loadView() {
self.view = UIView()
self.view.backgroundColor = Theme.darkThemeBackgroundColor
let audioActivity = AudioActivity(audioDescription: "PhotoCaptureViewController", behavior: .playAndRecord)
self.audioActivity = audioActivity
if !self.audioSession.startAudioActivity(audioActivity) {
owsFailDebug("unexpectedly unable to start audio activity")
}
}
override func viewDidLoad() {
super.viewDidLoad()
setupPhotoCapture()
setupOrientationMonitoring()
updateNavigationItems()
updateFlashModeControl()
let initialCaptureOrientation = AVCaptureVideoOrientation(deviceOrientation: UIDevice.current.orientation) ?? .portrait
updateIconOrientations(isAnimated: false, captureOrientation: initialCaptureOrientation)
view.addGestureRecognizer(pinchZoomGesture)
view.addGestureRecognizer(focusGesture)
view.addGestureRecognizer(doubleTapToSwitchCameraGesture)
}
override var prefersStatusBarHidden: Bool {
return true
}
// MARK -
var isRecordingMovie: Bool = false
let recordingTimerView = RecordingTimerView()
func updateNavigationItems() {
if isRecordingMovie {
navigationItem.leftBarButtonItem = nil
navigationItem.rightBarButtonItems = nil
navigationItem.titleView = recordingTimerView
recordingTimerView.sizeToFit()
} else {
navigationItem.titleView = nil
navigationItem.leftBarButtonItem = dismissControl.barButtonItem
let fixedSpace = UIBarButtonItem(barButtonSystemItem: .fixedSpace, target: nil, action: nil)
fixedSpace.width = 16
navigationItem.rightBarButtonItems = [flashModeControl.barButtonItem, fixedSpace, switchCameraControl.barButtonItem]
}
}
// HACK: Though we don't have an input accessory view, the VC we are presented above (ConversationVC) does.
// If the app is backgrounded and then foregrounded, when OWSWindowManager calls mainWindow.makeKeyAndVisible
// the ConversationVC's inputAccessoryView will appear *above* us unless we'd previously become first responder.
override public var canBecomeFirstResponder: Bool {
Logger.debug("")
return true
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .portrait
}
// MARK: - Views
let captureButton = CaptureButton()
var previewView: CapturePreviewView!
class PhotoControl {
let button: OWSButton
let barButtonItem: UIBarButtonItem
init(imageName: String, block: @escaping () -> Void) {
self.button = OWSButton(imageName: imageName, tintColor: .ows_white, block: block)
if #available(iOS 10, *) {
button.autoPinToSquareAspectRatio()
} else {
button.sizeToFit()
}
button.layer.shadowOffset = CGSize.zero
button.layer.shadowOpacity = 0.35
button.layer.shadowRadius = 4
self.barButtonItem = UIBarButtonItem(customView: button)
}
func setImage(imageName: String) {
button.setImage(imageName: imageName)
}
}
private lazy var dismissControl: PhotoControl = {
return PhotoControl(imageName: "ic_x_with_shadow") { [weak self] in
self?.didTapClose()
}
}()
private lazy var switchCameraControl: PhotoControl = {
return PhotoControl(imageName: "ic_switch_camera") { [weak self] in
self?.didTapSwitchCamera()
}
}()
private lazy var flashModeControl: PhotoControl = {
return PhotoControl(imageName: "ic_flash_mode_auto") { [weak self] in
self?.didTapFlashMode()
}
}()
lazy var pinchZoomGesture: UIPinchGestureRecognizer = {
return UIPinchGestureRecognizer(target: self, action: #selector(didPinchZoom(pinchGesture:)))
}()
lazy var focusGesture: UITapGestureRecognizer = {
return UITapGestureRecognizer(target: self, action: #selector(didTapFocusExpose(tapGesture:)))
}()
lazy var doubleTapToSwitchCameraGesture: UITapGestureRecognizer = {
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(didDoubleTapToSwitchCamera(tapGesture:)))
tapGesture.numberOfTapsRequired = 2
return tapGesture
}()
// MARK: - Events
@objc
func didTapClose() {
self.delegate?.photoCaptureViewControllerDidCancel(self)
}
@objc
func didTapSwitchCamera() {
Logger.debug("")
switchCamera()
}
@objc
func didDoubleTapToSwitchCamera(tapGesture: UITapGestureRecognizer) {
Logger.debug("")
switchCamera()
}
private func switchCamera() {
UIView.animate(withDuration: 0.2) {
let epsilonToForceCounterClockwiseRotation: CGFloat = 0.00001
self.switchCameraControl.button.transform = self.switchCameraControl.button.transform.rotate(.pi + epsilonToForceCounterClockwiseRotation)
}
photoCapture.switchCamera().catch { error in
self.showFailureUI(error: error)
}.retainUntilComplete()
}
@objc
func didTapFlashMode() {
Logger.debug("")
photoCapture.switchFlashMode().done {
self.updateFlashModeControl()
}.retainUntilComplete()
}
@objc
func didPinchZoom(pinchGesture: UIPinchGestureRecognizer) {
switch pinchGesture.state {
case .began: fallthrough
case .changed:
photoCapture.updateZoom(scaleFromPreviousZoomFactor: pinchGesture.scale)
case .ended:
photoCapture.completeZoom(scaleFromPreviousZoomFactor: pinchGesture.scale)
default:
break
}
}
@objc
func didTapFocusExpose(tapGesture: UITapGestureRecognizer) {
let viewLocation = tapGesture.location(in: view)
let devicePoint = previewView.previewLayer.captureDevicePointConverted(fromLayerPoint: viewLocation)
photoCapture.focus(with: .autoFocus, exposureMode: .autoExpose, at: devicePoint, monitorSubjectAreaChange: true)
}
// MARK: - Orientation
private func setupOrientationMonitoring() {
UIDevice.current.beginGeneratingDeviceOrientationNotifications()
NotificationCenter.default.addObserver(self,
selector: #selector(didChangeDeviceOrientation),
name: .UIDeviceOrientationDidChange,
object: UIDevice.current)
}
var lastKnownCaptureOrientation: AVCaptureVideoOrientation = .portrait
@objc
func didChangeDeviceOrientation(notification: Notification) {
let currentOrientation = UIDevice.current.orientation
if let captureOrientation = AVCaptureVideoOrientation(deviceOrientation: currentOrientation) {
// since the "face up" and "face down" orientations aren't reflected in the photo output,
// we need to capture the last known _other_ orientation so we can reflect the appropriate
// portrait/landscape in our captured photos.
Logger.verbose("lastKnownCaptureOrientation: \(lastKnownCaptureOrientation)->\(captureOrientation)")
lastKnownCaptureOrientation = captureOrientation
updateIconOrientations(isAnimated: true, captureOrientation: captureOrientation)
}
}
// MARK: -
private func updateIconOrientations(isAnimated: Bool, captureOrientation: AVCaptureVideoOrientation) {
Logger.verbose("captureOrientation: \(captureOrientation)")
let transformFromOrientation: CGAffineTransform
switch captureOrientation {
case .portrait:
transformFromOrientation = .identity
case .portraitUpsideDown:
transformFromOrientation = CGAffineTransform(rotationAngle: .pi)
case .landscapeLeft:
transformFromOrientation = CGAffineTransform(rotationAngle: .halfPi)
case .landscapeRight:
transformFromOrientation = CGAffineTransform(rotationAngle: -1 * .halfPi)
}
// Don't "unrotate" the switch camera icon if the front facing camera had been selected.
let tranformFromCameraType: CGAffineTransform = photoCapture.desiredPosition == .front ? CGAffineTransform(rotationAngle: -.pi) : .identity
let updateOrientation = {
self.flashModeControl.button.transform = transformFromOrientation
self.switchCameraControl.button.transform = transformFromOrientation.concatenating(tranformFromCameraType)
}
if isAnimated {
UIView.animate(withDuration: 0.3, animations: updateOrientation)
} else {
updateOrientation()
}
}
private func setupPhotoCapture() {
photoCapture = PhotoCapture()
photoCapture.delegate = self
captureButton.delegate = photoCapture
previewView = CapturePreviewView(session: photoCapture.session)
photoCapture.startCapture().done { [weak self] in
guard let self = self else { return }
self.showCaptureUI()
}.catch { [weak self] error in
guard let self = self else { return }
self.showFailureUI(error: error)
}.retainUntilComplete()
}
private func showCaptureUI() {
Logger.debug("")
view.addSubview(previewView)
if UIDevice.current.hasIPhoneXNotch {
previewView.autoPinEdgesToSuperviewEdges()
} else {
previewView.autoPinEdgesToSuperviewEdges(with: UIEdgeInsets(top: 0, leading: 0, bottom: 40, trailing: 0))
}
view.addSubview(captureButton)
captureButton.autoHCenterInSuperview()
let captureButtonDiameter: CGFloat = 80
captureButton.autoSetDimensions(to: CGSize(width: captureButtonDiameter, height: captureButtonDiameter))
// on iPhoneX 12.1
captureButton.autoPinEdge(toSuperviewMargin: .bottom, withInset: 10)
}
private func showFailureUI(error: Error) {
Logger.error("error: \(error)")
OWSAlerts.showAlert(title: nil,
message: error.localizedDescription,
buttonTitle: CommonStrings.dismissButton,
buttonAction: { [weak self] _ in self?.dismiss(animated: true) })
}
private func updateFlashModeControl() {
let imageName: String
switch photoCapture.flashMode {
case .auto:
imageName = "ic_flash_mode_auto"
case .on:
imageName = "ic_flash_mode_on"
case .off:
imageName = "ic_flash_mode_off"
}
self.flashModeControl.setImage(imageName: imageName)
}
}
extension PhotoCaptureViewController: PhotoCaptureDelegate {
// MARK: - Photo
func photoCapture(_ photoCapture: PhotoCapture, didFinishProcessingAttachment attachment: SignalAttachment) {
delegate?.photoCaptureViewController(self, didFinishProcessingAttachment: attachment)
}
func photoCapture(_ photoCapture: PhotoCapture, processingDidError error: Error) {
showFailureUI(error: error)
}
// MARK: - Video
func photoCaptureDidBeginVideo(_ photoCapture: PhotoCapture) {
isRecordingMovie = true
updateNavigationItems()
recordingTimerView.startCounting()
}
func photoCaptureDidCompleteVideo(_ photoCapture: PhotoCapture) {
// Stop counting, but keep visible
recordingTimerView.stopCounting()
}
func photoCaptureDidCancelVideo(_ photoCapture: PhotoCapture) {
owsFailDebug("If we ever allow this, we should test.")
isRecordingMovie = false
recordingTimerView.stopCounting()
updateNavigationItems()
}
// MARK: -
var zoomScaleReferenceHeight: CGFloat? {
return view.bounds.height
}
var captureOrientation: AVCaptureVideoOrientation {
return lastKnownCaptureOrientation
}
}
// MARK: - Views
protocol CaptureButtonDelegate: AnyObject {
// MARK: Photo
func didTapCaptureButton(_ captureButton: CaptureButton)
// MARK: Video
func didBeginLongPressCaptureButton(_ captureButton: CaptureButton)
func didCompleteLongPressCaptureButton(_ captureButton: CaptureButton)
func didCancelLongPressCaptureButton(_ captureButton: CaptureButton)
var zoomScaleReferenceHeight: CGFloat? { get }
func longPressCaptureButton(_ captureButton: CaptureButton, didUpdateZoomAlpha zoomAlpha: CGFloat)
}
class CaptureButton: UIView {
let innerButton = CircleView()
var tapGesture: UITapGestureRecognizer!
var longPressGesture: UILongPressGestureRecognizer!
let longPressDuration = 0.5
let zoomIndicator = CircleView()
weak var delegate: CaptureButtonDelegate?
override init(frame: CGRect) {
super.init(frame: frame)
tapGesture = UITapGestureRecognizer(target: self, action: #selector(didTap))
innerButton.addGestureRecognizer(tapGesture)
longPressGesture = UILongPressGestureRecognizer(target: self, action: #selector(didLongPress))
longPressGesture.minimumPressDuration = longPressDuration
innerButton.addGestureRecognizer(longPressGesture)
addSubview(innerButton)
innerButton.backgroundColor = UIColor.ows_white.withAlphaComponent(0.33)
innerButton.layer.shadowOffset = .zero
innerButton.layer.shadowOpacity = 0.33
innerButton.layer.shadowRadius = 2
innerButton.autoPinEdgesToSuperviewEdges()
zoomIndicator.isUserInteractionEnabled = false
addSubview(zoomIndicator)
zoomIndicator.layer.borderColor = UIColor.ows_white.cgColor
zoomIndicator.layer.borderWidth = 1.5
zoomIndicator.autoPin(toEdgesOf: innerButton)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// MARK: - Gestures
@objc
func didTap(_ gesture: UITapGestureRecognizer) {
delegate?.didTapCaptureButton(self)
}
var initialTouchLocation: CGPoint?
@objc
func didLongPress(_ gesture: UILongPressGestureRecognizer) {
Logger.verbose("")
guard let gestureView = gesture.view else {
owsFailDebug("gestureView was unexpectedly nil")
return
}
switch gesture.state {
case .possible: break
case .began:
initialTouchLocation = gesture.location(in: gesture.view)
zoomIndicator.transform = .identity
delegate?.didBeginLongPressCaptureButton(self)
case .changed:
guard let referenceHeight = delegate?.zoomScaleReferenceHeight else {
owsFailDebug("referenceHeight was unexpectedly nil")
return
}
guard referenceHeight > 0 else {
owsFailDebug("referenceHeight was unexpectedly <= 0")
return
}
guard let initialTouchLocation = initialTouchLocation else {
owsFailDebug("initialTouchLocation was unexpectedly nil")
return
}
let currentLocation = gesture.location(in: gestureView)
let minDistanceBeforeActivatingZoom: CGFloat = 30
let distance = initialTouchLocation.y - currentLocation.y - minDistanceBeforeActivatingZoom
let distanceForFullZoom = referenceHeight / 4
let ratio = distance / distanceForFullZoom
let alpha = ratio.clamp(0, 1)
Logger.verbose("distance: \(distance), alpha: \(alpha)")
let transformScale = CGFloatLerp(1, 0.1, alpha)
zoomIndicator.transform = CGAffineTransform(scaleX: transformScale, y: transformScale)
zoomIndicator.superview?.layoutIfNeeded()
delegate?.longPressCaptureButton(self, didUpdateZoomAlpha: alpha)
case .ended:
zoomIndicator.transform = .identity
delegate?.didCompleteLongPressCaptureButton(self)
case .cancelled, .failed:
zoomIndicator.transform = .identity
delegate?.didCancelLongPressCaptureButton(self)
}
}
}
class CapturePreviewView: UIView {
let previewLayer: AVCaptureVideoPreviewLayer
override var bounds: CGRect {
didSet {
previewLayer.frame = bounds
}
}
init(session: AVCaptureSession) {
previewLayer = AVCaptureVideoPreviewLayer(session: session)
super.init(frame: .zero)
self.contentMode = .scaleAspectFill
previewLayer.frame = bounds
layer.addSublayer(previewLayer)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
class RecordingTimerView: UIView {
let stackViewSpacing: CGFloat = 4
override init(frame: CGRect) {
super.init(frame: frame)
let stackView = UIStackView(arrangedSubviews: [icon, label])
stackView.axis = .horizontal
stackView.alignment = .center
stackView.spacing = stackViewSpacing
addSubview(stackView)
stackView.autoPinEdgesToSuperviewMargins()
updateView()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
// MARK: - Subviews
private lazy var label: UILabel = {
let label = UILabel()
label.font = UIFont.ows_monospacedDigitFont(withSize: 20)
label.textAlignment = .center
label.textColor = UIColor.white
label.layer.shadowOffset = CGSize.zero
label.layer.shadowOpacity = 0.35
label.layer.shadowRadius = 4
return label
}()
static let iconWidth: CGFloat = 6
private let icon: UIView = {
let icon = CircleView()
icon.layer.shadowOffset = CGSize.zero
icon.layer.shadowOpacity = 0.35
icon.layer.shadowRadius = 4
icon.backgroundColor = .red
icon.autoSetDimensions(to: CGSize(width: iconWidth, height: iconWidth))
icon.alpha = 0
return icon
}()
// MARK: - Overrides //
override func sizeThatFits(_ size: CGSize) -> CGSize {
if #available(iOS 10, *) {
return super.sizeThatFits(size)
} else {
// iOS9 manual layout sizing required for items in the navigation bar
var baseSize = label.frame.size
baseSize.width = baseSize.width + stackViewSpacing + RecordingTimerView.iconWidth + layoutMargins.left + layoutMargins.right
baseSize.height = baseSize.height + layoutMargins.top + layoutMargins.bottom
return baseSize
}
}
// MARK: -
var recordingStartTime: TimeInterval?
func startCounting() {
recordingStartTime = CACurrentMediaTime()
timer = Timer.weakScheduledTimer(withTimeInterval: 0.1, target: self, selector: #selector(updateView), userInfo: nil, repeats: true)
UIView.animate(withDuration: 0.5,
delay: 0,
options: [.autoreverse, .repeat],
animations: { self.icon.alpha = 1 })
}
func stopCounting() {
timer?.invalidate()
timer = nil
icon.layer.removeAllAnimations()
UIView.animate(withDuration: 0.4) {
self.icon.alpha = 0
}
}
// MARK: -
private var timer: Timer?
private lazy var timeFormatter: DateFormatter = {
let formatter = DateFormatter()
formatter.dateFormat = "mm:ss"
formatter.timeZone = TimeZone(identifier: "UTC")!
return formatter
}()
// This method should only be called when the call state is "connected".
var recordingDuration: TimeInterval {
guard let recordingStartTime = recordingStartTime else {
return 0
}
return CACurrentMediaTime() - recordingStartTime
}
@objc
private func updateView() {
let recordingDuration = self.recordingDuration
Logger.verbose("recordingDuration: \(recordingDuration)")
let durationDate = Date(timeIntervalSinceReferenceDate: recordingDuration)
label.text = timeFormatter.string(from: durationDate)
if #available(iOS 10, *) {
// do nothing
} else {
label.sizeToFit()
}
}
}

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import Foundation

View File

@ -13,7 +13,7 @@ public class OWSButton: UIButton {
// MARK: -
@objc
init(block: @escaping () -> Void = { }) {
public init(block: @escaping () -> Void = { }) {
super.init(frame: .zero)
self.block = block
@ -21,7 +21,7 @@ public class OWSButton: UIButton {
}
@objc
init(title: String, block: @escaping () -> Void = { }) {
public init(title: String, block: @escaping () -> Void = { }) {
super.init(frame: .zero)
self.block = block
@ -30,8 +30,8 @@ public class OWSButton: UIButton {
}
@objc
init(imageName: String,
tintColor: UIColor,
public init(imageName: String,
tintColor: UIColor?,
block: @escaping () -> Void = { }) {
super.init(frame: .zero)

View File

@ -18,6 +18,8 @@ NS_ASSUME_NONNULL_BEGIN
+ (UIFont *)ows_boldFontWithSize:(CGFloat)size;
+ (UIFont *)ows_monospacedDigitFontWithSize:(CGFloat)size;
#pragma mark - Icon Fonts
+ (UIFont *)ows_fontAwesomeFont:(CGFloat)size;

View File

@ -34,6 +34,11 @@ NS_ASSUME_NONNULL_BEGIN
return [UIFont boldSystemFontOfSize:size];
}
+ (UIFont *)ows_monospacedDigitFontWithSize:(CGFloat)size;
{
return [self monospacedDigitSystemFontOfSize:size weight:UIFontWeightRegular];
}
#pragma mark - Icon Fonts
+ (UIFont *)ows_fontAwesomeFont:(CGFloat)size

View File

@ -19,4 +19,9 @@ public class FeatureFlags: NSObject {
public static var sendingMediaWithOversizeText: Bool {
return false
}
@objc
public static var useCustomPhotoCapture: Bool {
return true
}
}