Build SignalRingRTC & add in call related code

This commit is contained in:
Niels Andriesse 2021-07-26 16:03:35 +10:00
parent c90009d5fd
commit d403b275cc
48 changed files with 12613 additions and 5 deletions

View File

@ -0,0 +1,212 @@
// Generated by Apple Swift version 5.4 (swiftlang-1205.0.26.9 clang-1205.0.19.55)
#ifndef SIGNALRINGRTC_SWIFT_H
#define SIGNALRINGRTC_SWIFT_H
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wgcc-compat"
#if !defined(__has_include)
# define __has_include(x) 0
#endif
#if !defined(__has_attribute)
# define __has_attribute(x) 0
#endif
#if !defined(__has_feature)
# define __has_feature(x) 0
#endif
#if !defined(__has_warning)
# define __has_warning(x) 0
#endif
#if __has_include(<swift/objc-prologue.h>)
# include <swift/objc-prologue.h>
#endif
#pragma clang diagnostic ignored "-Wauto-import"
#include <Foundation/Foundation.h>
#include <stdint.h>
#include <stddef.h>
#include <stdbool.h>
#if !defined(SWIFT_TYPEDEFS)
# define SWIFT_TYPEDEFS 1
# if __has_include(<uchar.h>)
# include <uchar.h>
# elif !defined(__cplusplus)
typedef uint_least16_t char16_t;
typedef uint_least32_t char32_t;
# endif
typedef float swift_float2 __attribute__((__ext_vector_type__(2)));
typedef float swift_float3 __attribute__((__ext_vector_type__(3)));
typedef float swift_float4 __attribute__((__ext_vector_type__(4)));
typedef double swift_double2 __attribute__((__ext_vector_type__(2)));
typedef double swift_double3 __attribute__((__ext_vector_type__(3)));
typedef double swift_double4 __attribute__((__ext_vector_type__(4)));
typedef int swift_int2 __attribute__((__ext_vector_type__(2)));
typedef int swift_int3 __attribute__((__ext_vector_type__(3)));
typedef int swift_int4 __attribute__((__ext_vector_type__(4)));
typedef unsigned int swift_uint2 __attribute__((__ext_vector_type__(2)));
typedef unsigned int swift_uint3 __attribute__((__ext_vector_type__(3)));
typedef unsigned int swift_uint4 __attribute__((__ext_vector_type__(4)));
#endif
#if !defined(SWIFT_PASTE)
# define SWIFT_PASTE_HELPER(x, y) x##y
# define SWIFT_PASTE(x, y) SWIFT_PASTE_HELPER(x, y)
#endif
#if !defined(SWIFT_METATYPE)
# define SWIFT_METATYPE(X) Class
#endif
#if !defined(SWIFT_CLASS_PROPERTY)
# if __has_feature(objc_class_property)
# define SWIFT_CLASS_PROPERTY(...) __VA_ARGS__
# else
# define SWIFT_CLASS_PROPERTY(...)
# endif
#endif
#if __has_attribute(objc_runtime_name)
# define SWIFT_RUNTIME_NAME(X) __attribute__((objc_runtime_name(X)))
#else
# define SWIFT_RUNTIME_NAME(X)
#endif
#if __has_attribute(swift_name)
# define SWIFT_COMPILE_NAME(X) __attribute__((swift_name(X)))
#else
# define SWIFT_COMPILE_NAME(X)
#endif
#if __has_attribute(objc_method_family)
# define SWIFT_METHOD_FAMILY(X) __attribute__((objc_method_family(X)))
#else
# define SWIFT_METHOD_FAMILY(X)
#endif
#if __has_attribute(noescape)
# define SWIFT_NOESCAPE __attribute__((noescape))
#else
# define SWIFT_NOESCAPE
#endif
#if __has_attribute(ns_consumed)
# define SWIFT_RELEASES_ARGUMENT __attribute__((ns_consumed))
#else
# define SWIFT_RELEASES_ARGUMENT
#endif
#if __has_attribute(warn_unused_result)
# define SWIFT_WARN_UNUSED_RESULT __attribute__((warn_unused_result))
#else
# define SWIFT_WARN_UNUSED_RESULT
#endif
#if __has_attribute(noreturn)
# define SWIFT_NORETURN __attribute__((noreturn))
#else
# define SWIFT_NORETURN
#endif
#if !defined(SWIFT_CLASS_EXTRA)
# define SWIFT_CLASS_EXTRA
#endif
#if !defined(SWIFT_PROTOCOL_EXTRA)
# define SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_ENUM_EXTRA)
# define SWIFT_ENUM_EXTRA
#endif
#if !defined(SWIFT_CLASS)
# if __has_attribute(objc_subclassing_restricted)
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# else
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# endif
#endif
#if !defined(SWIFT_RESILIENT_CLASS)
# if __has_attribute(objc_class_stub)
# define SWIFT_RESILIENT_CLASS(SWIFT_NAME) SWIFT_CLASS(SWIFT_NAME) __attribute__((objc_class_stub))
# define SWIFT_RESILIENT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_class_stub)) SWIFT_CLASS_NAMED(SWIFT_NAME)
# else
# define SWIFT_RESILIENT_CLASS(SWIFT_NAME) SWIFT_CLASS(SWIFT_NAME)
# define SWIFT_RESILIENT_CLASS_NAMED(SWIFT_NAME) SWIFT_CLASS_NAMED(SWIFT_NAME)
# endif
#endif
#if !defined(SWIFT_PROTOCOL)
# define SWIFT_PROTOCOL(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
# define SWIFT_PROTOCOL_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_EXTENSION)
# define SWIFT_EXTENSION(M) SWIFT_PASTE(M##_Swift_, __LINE__)
#endif
#if !defined(OBJC_DESIGNATED_INITIALIZER)
# if __has_attribute(objc_designated_initializer)
# define OBJC_DESIGNATED_INITIALIZER __attribute__((objc_designated_initializer))
# else
# define OBJC_DESIGNATED_INITIALIZER
# endif
#endif
#if !defined(SWIFT_ENUM_ATTR)
# if defined(__has_attribute) && __has_attribute(enum_extensibility)
# define SWIFT_ENUM_ATTR(_extensibility) __attribute__((enum_extensibility(_extensibility)))
# else
# define SWIFT_ENUM_ATTR(_extensibility)
# endif
#endif
#if !defined(SWIFT_ENUM)
# define SWIFT_ENUM(_type, _name, _extensibility) enum _name : _type _name; enum SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# if __has_feature(generalized_swift_name)
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) enum _name : _type _name SWIFT_COMPILE_NAME(SWIFT_NAME); enum SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# else
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) SWIFT_ENUM(_type, _name, _extensibility)
# endif
#endif
#if !defined(SWIFT_UNAVAILABLE)
# define SWIFT_UNAVAILABLE __attribute__((unavailable))
#endif
#if !defined(SWIFT_UNAVAILABLE_MSG)
# define SWIFT_UNAVAILABLE_MSG(msg) __attribute__((unavailable(msg)))
#endif
#if !defined(SWIFT_AVAILABILITY)
# define SWIFT_AVAILABILITY(plat, ...) __attribute__((availability(plat, __VA_ARGS__)))
#endif
#if !defined(SWIFT_WEAK_IMPORT)
# define SWIFT_WEAK_IMPORT __attribute__((weak_import))
#endif
#if !defined(SWIFT_DEPRECATED)
# define SWIFT_DEPRECATED __attribute__((deprecated))
#endif
#if !defined(SWIFT_DEPRECATED_MSG)
# define SWIFT_DEPRECATED_MSG(...) __attribute__((deprecated(__VA_ARGS__)))
#endif
#if __has_feature(attribute_diagnose_if_objc)
# define SWIFT_DEPRECATED_OBJC(Msg) __attribute__((diagnose_if(1, Msg, "warning")))
#else
# define SWIFT_DEPRECATED_OBJC(Msg) SWIFT_DEPRECATED_MSG(Msg)
#endif
#if !defined(IBSegueAction)
# define IBSegueAction
#endif
#if __has_feature(modules)
#if __has_warning("-Watimport-in-framework-header")
#pragma clang diagnostic ignored "-Watimport-in-framework-header"
#endif
#endif
#pragma clang diagnostic ignored "-Wproperty-attribute-mismatch"
#pragma clang diagnostic ignored "-Wduplicate-method-arg"
#if __has_warning("-Wpragma-clang-attribute")
# pragma clang diagnostic ignored "-Wpragma-clang-attribute"
#endif
#pragma clang diagnostic ignored "-Wunknown-pragmas"
#pragma clang diagnostic ignored "-Wnullability"
#if __has_attribute(external_source_symbol)
# pragma push_macro("any")
# undef any
# pragma clang attribute push(__attribute__((external_source_symbol(language="Swift", defined_in="SignalRingRTC",generated_declaration))), apply_to=any(function,enum,objc_interface,objc_category,objc_protocol))
# pragma pop_macro("any")
#endif
#if __has_attribute(external_source_symbol)
# pragma clang attribute pop
#endif
#pragma clang diagnostic pop
#endif

View File

@ -0,0 +1,14 @@
//
// Copyright 2019-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
//
#import <Foundation/Foundation.h>
//! Project version number for SignalRingRTC.
FOUNDATION_EXPORT double SignalRingRTCVersionNumber;
//! Project version string for SignalRingRTC.
FOUNDATION_EXPORT const unsigned char SignalRingRTCVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <SignalRingRTC/PublicHeader.h>

Binary file not shown.

View File

@ -0,0 +1,18 @@
framework module SignalRingRTC {
umbrella header "SignalRingRTC.h"
export *
module * { export * }
explicit module RingRTC {
header "ringrtc.h"
link "ringrtc"
export *
}
}
module SignalRingRTC.Swift {
header "SignalRingRTC-Swift.h"
requires objc
}

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@ -200,6 +200,39 @@
B879D449247E1BE300DB3608 /* PathVC.swift in Sources */ = {isa = PBXBuildFile; fileRef = B879D448247E1BE300DB3608 /* PathVC.swift */; };
B87EF17126367CF800124B3C /* FileServerAPIV2.swift in Sources */ = {isa = PBXBuildFile; fileRef = B87EF17026367CF800124B3C /* FileServerAPIV2.swift */; };
B87EF18126377A1D00124B3C /* Features.swift in Sources */ = {isa = PBXBuildFile; fileRef = B87EF18026377A1D00124B3C /* Features.swift */; };
B882A74E26AE86E500B5AB69 /* SignalRingRTC.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B882A74D26AE86E500B5AB69 /* SignalRingRTC.framework */; };
B882A77826AE878300B5AB69 /* CallService.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75026AE878300B5AB69 /* CallService.swift */; };
B882A77926AE878300B5AB69 /* GroupCallUpdateMessageHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75226AE878300B5AB69 /* GroupCallUpdateMessageHandler.swift */; };
B882A77A26AE878300B5AB69 /* GroupCallRemoteVideoManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75326AE878300B5AB69 /* GroupCallRemoteVideoManager.swift */; };
B882A77B26AE878300B5AB69 /* CallAudioService.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75426AE878300B5AB69 /* CallAudioService.swift */; };
B882A77C26AE878300B5AB69 /* OWSAudioSession+WebRTC.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75526AE878300B5AB69 /* OWSAudioSession+WebRTC.swift */; };
B882A77D26AE878300B5AB69 /* SignalCall.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75626AE878300B5AB69 /* SignalCall.swift */; };
B882A77E26AE878300B5AB69 /* GroupCallNotificationView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75926AE878300B5AB69 /* GroupCallNotificationView.swift */; };
B882A77F26AE878300B5AB69 /* GroupCallVideoOverflow.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75A26AE878300B5AB69 /* GroupCallVideoOverflow.swift */; };
B882A78026AE878300B5AB69 /* GroupCallMemberSheet.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75B26AE878300B5AB69 /* GroupCallMemberSheet.swift */; };
B882A78126AE878300B5AB69 /* GroupCallViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75C26AE878300B5AB69 /* GroupCallViewController.swift */; };
B882A78226AE878300B5AB69 /* GroupCallVideoGridLayout.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75D26AE878300B5AB69 /* GroupCallVideoGridLayout.swift */; };
B882A78326AE878300B5AB69 /* GroupCallVideoGrid.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75E26AE878300B5AB69 /* GroupCallVideoGrid.swift */; };
B882A78426AE878300B5AB69 /* GroupCallSwipeToastView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A75F26AE878300B5AB69 /* GroupCallSwipeToastView.swift */; };
B882A78526AE878300B5AB69 /* GroupCallErrorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76026AE878300B5AB69 /* GroupCallErrorView.swift */; };
B882A78626AE878300B5AB69 /* GroupCallTooltip.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76126AE878300B5AB69 /* GroupCallTooltip.swift */; };
B882A78726AE878300B5AB69 /* GroupCallMemberView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76226AE878300B5AB69 /* GroupCallMemberView.swift */; };
B882A78826AE878300B5AB69 /* RemoteVideoView.m in Sources */ = {isa = PBXBuildFile; fileRef = B882A76326AE878300B5AB69 /* RemoteVideoView.m */; };
B882A78926AE878300B5AB69 /* LocalVideoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76426AE878300B5AB69 /* LocalVideoView.swift */; };
B882A78A26AE878300B5AB69 /* CallHeader.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76526AE878300B5AB69 /* CallHeader.swift */; };
B882A78B26AE878300B5AB69 /* CallButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76626AE878300B5AB69 /* CallButton.swift */; };
B882A78C26AE878300B5AB69 /* CallControls.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76726AE878300B5AB69 /* CallControls.swift */; };
B882A78D26AE878300B5AB69 /* NonCallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76A26AE878300B5AB69 /* NonCallKitCallUIAdaptee.swift */; };
B882A78E26AE878300B5AB69 /* IndividualCallViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76B26AE878300B5AB69 /* IndividualCallViewController.swift */; };
B882A78F26AE878300B5AB69 /* CallKitCallManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76D26AE878300B5AB69 /* CallKitCallManager.swift */; };
B882A79026AE878300B5AB69 /* CallKitCallUIAdaptee.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76E26AE878300B5AB69 /* CallKitCallUIAdaptee.swift */; };
B882A79126AE878300B5AB69 /* CallUIAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A76F26AE878300B5AB69 /* CallUIAdapter.swift */; };
B882A79226AE878300B5AB69 /* WebRTCCallMessageHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A77126AE878300B5AB69 /* WebRTCCallMessageHandler.swift */; };
B882A79326AE878300B5AB69 /* TurnServerInfo.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A77226AE878300B5AB69 /* TurnServerInfo.swift */; };
B882A79426AE878300B5AB69 /* AudioSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A77326AE878300B5AB69 /* AudioSource.swift */; };
B882A79526AE878300B5AB69 /* IndividualCallService.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A77526AE878300B5AB69 /* IndividualCallService.swift */; };
B882A79626AE878300B5AB69 /* OutboundIndividualCallInitiator.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A77626AE878300B5AB69 /* OutboundIndividualCallInitiator.swift */; };
B882A79726AE878300B5AB69 /* IndividualCall.swift in Sources */ = {isa = PBXBuildFile; fileRef = B882A77726AE878300B5AB69 /* IndividualCall.swift */; };
B8856CA8256F0F42001CE70E /* OWSBackupFragment.m in Sources */ = {isa = PBXBuildFile; fileRef = C33FDB07255A580700E217F9 /* OWSBackupFragment.m */; };
B8856CB1256F0F47001CE70E /* OWSBackupFragment.h in Headers */ = {isa = PBXBuildFile; fileRef = C33FDAEA255A580500E217F9 /* OWSBackupFragment.h */; settings = {ATTRIBUTES = (Public, ); }; };
B8856CEE256F1054001CE70E /* OWSAudioPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = C38EF2F7255B6DBC007E1867 /* OWSAudioPlayer.m */; };
@ -1185,6 +1218,40 @@
B879D44A247E1D9200DB3608 /* PathStatusView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PathStatusView.swift; sourceTree = "<group>"; };
B87EF17026367CF800124B3C /* FileServerAPIV2.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FileServerAPIV2.swift; sourceTree = "<group>"; };
B87EF18026377A1D00124B3C /* Features.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Features.swift; sourceTree = "<group>"; };
B882A74D26AE86E500B5AB69 /* SignalRingRTC.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = SignalRingRTC.framework; path = Dependencies/SignalRingRTC.framework; sourceTree = "<group>"; };
B882A75026AE878300B5AB69 /* CallService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallService.swift; sourceTree = "<group>"; };
B882A75226AE878300B5AB69 /* GroupCallUpdateMessageHandler.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallUpdateMessageHandler.swift; sourceTree = "<group>"; };
B882A75326AE878300B5AB69 /* GroupCallRemoteVideoManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallRemoteVideoManager.swift; sourceTree = "<group>"; };
B882A75426AE878300B5AB69 /* CallAudioService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallAudioService.swift; sourceTree = "<group>"; };
B882A75526AE878300B5AB69 /* OWSAudioSession+WebRTC.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "OWSAudioSession+WebRTC.swift"; sourceTree = "<group>"; };
B882A75626AE878300B5AB69 /* SignalCall.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SignalCall.swift; sourceTree = "<group>"; };
B882A75926AE878300B5AB69 /* GroupCallNotificationView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallNotificationView.swift; sourceTree = "<group>"; };
B882A75A26AE878300B5AB69 /* GroupCallVideoOverflow.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallVideoOverflow.swift; sourceTree = "<group>"; };
B882A75B26AE878300B5AB69 /* GroupCallMemberSheet.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallMemberSheet.swift; sourceTree = "<group>"; };
B882A75C26AE878300B5AB69 /* GroupCallViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallViewController.swift; sourceTree = "<group>"; };
B882A75D26AE878300B5AB69 /* GroupCallVideoGridLayout.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallVideoGridLayout.swift; sourceTree = "<group>"; };
B882A75E26AE878300B5AB69 /* GroupCallVideoGrid.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallVideoGrid.swift; sourceTree = "<group>"; };
B882A75F26AE878300B5AB69 /* GroupCallSwipeToastView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallSwipeToastView.swift; sourceTree = "<group>"; };
B882A76026AE878300B5AB69 /* GroupCallErrorView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallErrorView.swift; sourceTree = "<group>"; };
B882A76126AE878300B5AB69 /* GroupCallTooltip.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallTooltip.swift; sourceTree = "<group>"; };
B882A76226AE878300B5AB69 /* GroupCallMemberView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GroupCallMemberView.swift; sourceTree = "<group>"; };
B882A76326AE878300B5AB69 /* RemoteVideoView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = RemoteVideoView.m; sourceTree = "<group>"; };
B882A76426AE878300B5AB69 /* LocalVideoView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = LocalVideoView.swift; sourceTree = "<group>"; };
B882A76526AE878300B5AB69 /* CallHeader.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallHeader.swift; sourceTree = "<group>"; };
B882A76626AE878300B5AB69 /* CallButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallButton.swift; sourceTree = "<group>"; };
B882A76726AE878300B5AB69 /* CallControls.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallControls.swift; sourceTree = "<group>"; };
B882A76826AE878300B5AB69 /* RemoteVideoView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = RemoteVideoView.h; sourceTree = "<group>"; };
B882A76A26AE878300B5AB69 /* NonCallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = NonCallKitCallUIAdaptee.swift; sourceTree = "<group>"; };
B882A76B26AE878300B5AB69 /* IndividualCallViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IndividualCallViewController.swift; sourceTree = "<group>"; };
B882A76D26AE878300B5AB69 /* CallKitCallManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallManager.swift; sourceTree = "<group>"; };
B882A76E26AE878300B5AB69 /* CallKitCallUIAdaptee.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallKitCallUIAdaptee.swift; sourceTree = "<group>"; };
B882A76F26AE878300B5AB69 /* CallUIAdapter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CallUIAdapter.swift; sourceTree = "<group>"; };
B882A77126AE878300B5AB69 /* WebRTCCallMessageHandler.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = WebRTCCallMessageHandler.swift; sourceTree = "<group>"; };
B882A77226AE878300B5AB69 /* TurnServerInfo.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = TurnServerInfo.swift; sourceTree = "<group>"; };
B882A77326AE878300B5AB69 /* AudioSource.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AudioSource.swift; sourceTree = "<group>"; };
B882A77526AE878300B5AB69 /* IndividualCallService.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IndividualCallService.swift; sourceTree = "<group>"; };
B882A77626AE878300B5AB69 /* OutboundIndividualCallInitiator.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OutboundIndividualCallInitiator.swift; sourceTree = "<group>"; };
B882A77726AE878300B5AB69 /* IndividualCall.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = IndividualCall.swift; sourceTree = "<group>"; };
B8856D5F256F129B001CE70E /* OWSAlerts.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OWSAlerts.swift; sourceTree = "<group>"; };
B885D5F52334A32100EE0D8E /* UIView+Constraints.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIView+Constraints.swift"; sourceTree = "<group>"; };
B886B4A62398B23E00211ABE /* QRCodeVC.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = QRCodeVC.swift; sourceTree = "<group>"; };
@ -1848,6 +1915,7 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
B882A74E26AE86E500B5AB69 /* SignalRingRTC.framework in Frameworks */,
B8FF8DAE25C0D00F004D1F22 /* SessionMessagingKit.framework in Frameworks */,
B8FF8DAF25C0D00F004D1F22 /* SessionUtilitiesKit.framework in Frameworks */,
C37F54DC255BB84A002AEA92 /* SessionSnodeKit.framework in Frameworks */,
@ -2158,6 +2226,102 @@
path = "Message Cells";
sourceTree = "<group>";
};
B882A74F26AE878300B5AB69 /* Calls */ = {
isa = PBXGroup;
children = (
B882A75026AE878300B5AB69 /* CallService.swift */,
B882A75126AE878300B5AB69 /* Group */,
B882A75426AE878300B5AB69 /* CallAudioService.swift */,
B882A75526AE878300B5AB69 /* OWSAudioSession+WebRTC.swift */,
B882A75626AE878300B5AB69 /* SignalCall.swift */,
B882A75726AE878300B5AB69 /* UserInterface */,
B882A77026AE878300B5AB69 /* Signaling */,
B882A77326AE878300B5AB69 /* AudioSource.swift */,
B882A77426AE878300B5AB69 /* Individual */,
);
path = Calls;
sourceTree = "<group>";
};
B882A75126AE878300B5AB69 /* Group */ = {
isa = PBXGroup;
children = (
B882A75226AE878300B5AB69 /* GroupCallUpdateMessageHandler.swift */,
B882A75326AE878300B5AB69 /* GroupCallRemoteVideoManager.swift */,
);
path = Group;
sourceTree = "<group>";
};
B882A75726AE878300B5AB69 /* UserInterface */ = {
isa = PBXGroup;
children = (
B882A75826AE878300B5AB69 /* Group */,
B882A76326AE878300B5AB69 /* RemoteVideoView.m */,
B882A76426AE878300B5AB69 /* LocalVideoView.swift */,
B882A76526AE878300B5AB69 /* CallHeader.swift */,
B882A76626AE878300B5AB69 /* CallButton.swift */,
B882A76726AE878300B5AB69 /* CallControls.swift */,
B882A76826AE878300B5AB69 /* RemoteVideoView.h */,
B882A76926AE878300B5AB69 /* Individual */,
);
path = UserInterface;
sourceTree = "<group>";
};
B882A75826AE878300B5AB69 /* Group */ = {
isa = PBXGroup;
children = (
B882A75926AE878300B5AB69 /* GroupCallNotificationView.swift */,
B882A75A26AE878300B5AB69 /* GroupCallVideoOverflow.swift */,
B882A75B26AE878300B5AB69 /* GroupCallMemberSheet.swift */,
B882A75C26AE878300B5AB69 /* GroupCallViewController.swift */,
B882A75D26AE878300B5AB69 /* GroupCallVideoGridLayout.swift */,
B882A75E26AE878300B5AB69 /* GroupCallVideoGrid.swift */,
B882A75F26AE878300B5AB69 /* GroupCallSwipeToastView.swift */,
B882A76026AE878300B5AB69 /* GroupCallErrorView.swift */,
B882A76126AE878300B5AB69 /* GroupCallTooltip.swift */,
B882A76226AE878300B5AB69 /* GroupCallMemberView.swift */,
);
path = Group;
sourceTree = "<group>";
};
B882A76926AE878300B5AB69 /* Individual */ = {
isa = PBXGroup;
children = (
B882A76A26AE878300B5AB69 /* NonCallKitCallUIAdaptee.swift */,
B882A76B26AE878300B5AB69 /* IndividualCallViewController.swift */,
B882A76C26AE878300B5AB69 /* CallKit */,
B882A76F26AE878300B5AB69 /* CallUIAdapter.swift */,
);
path = Individual;
sourceTree = "<group>";
};
B882A76C26AE878300B5AB69 /* CallKit */ = {
isa = PBXGroup;
children = (
B882A76D26AE878300B5AB69 /* CallKitCallManager.swift */,
B882A76E26AE878300B5AB69 /* CallKitCallUIAdaptee.swift */,
);
path = CallKit;
sourceTree = "<group>";
};
B882A77026AE878300B5AB69 /* Signaling */ = {
isa = PBXGroup;
children = (
B882A77126AE878300B5AB69 /* WebRTCCallMessageHandler.swift */,
B882A77226AE878300B5AB69 /* TurnServerInfo.swift */,
);
path = Signaling;
sourceTree = "<group>";
};
B882A77426AE878300B5AB69 /* Individual */ = {
isa = PBXGroup;
children = (
B882A77526AE878300B5AB69 /* IndividualCallService.swift */,
B882A77626AE878300B5AB69 /* OutboundIndividualCallInitiator.swift */,
B882A77726AE878300B5AB69 /* IndividualCall.swift */,
);
path = Individual;
sourceTree = "<group>";
};
B887C38125C7C79700E11DAE /* Input View */ = {
isa = PBXGroup;
children = (
@ -3448,6 +3612,7 @@
D221A08C169C9E5E00537ABF /* Frameworks */ = {
isa = PBXGroup;
children = (
B882A74D26AE86E500B5AB69 /* SignalRingRTC.framework */,
C35E8AA22485C72300ACB629 /* SwiftCSV.framework */,
B847570023D568EB00759540 /* SignalServiceKit.framework */,
3496955F21A2FC8100DCFE74 /* CloudKit.framework */,
@ -3502,6 +3667,7 @@
children = (
C3F0A58F255C8E3D007BE2A3 /* Meta */,
C36096BC25AD1C3E008B62B2 /* Backups */,
B882A74F26AE878300B5AB69 /* Calls */,
C360969C25AD18BA008B62B2 /* Closed Groups */,
B835246C25C38AA20089A44F /* Conversations */,
C32B405424A961E1001117B5 /* Dependencies */,
@ -4759,6 +4925,8 @@
3496956E21A301A100DCFE74 /* OWSBackupExportJob.m in Sources */,
4C1885D2218F8E1C00B67051 /* PhotoGridViewCell.swift in Sources */,
34D1F0501F7D45A60066283D /* GifPickerCell.swift in Sources */,
B882A79226AE878300B5AB69 /* WebRTCCallMessageHandler.swift in Sources */,
B882A79326AE878300B5AB69 /* TurnServerInfo.swift in Sources */,
3496957421A301A100DCFE74 /* OWSBackupAPI.swift in Sources */,
C3E5C2FA251DBABB0040DFFC /* EditClosedGroupVC.swift in Sources */,
B8783E9E23EB948D00404FB8 /* UILabel+Interaction.swift in Sources */,
@ -4779,23 +4947,31 @@
B83524A525C3BA4B0089A44F /* InfoMessageCell.swift in Sources */,
B84A89BC25DE328A0040017D /* ProfilePictureVC.swift in Sources */,
34386A54207D271D009F5D9C /* NeverClearView.swift in Sources */,
B882A77E26AE878300B5AB69 /* GroupCallNotificationView.swift in Sources */,
451166C01FD86B98000739BA /* AccountManager.swift in Sources */,
C374EEF425DB31D40073A857 /* VoiceMessageRecordingView.swift in Sources */,
B83F2B88240CB75A000A54AB /* UIImage+Scaling.swift in Sources */,
3430FE181F7751D4000EC51B /* GiphyAPI.swift in Sources */,
340FC8AA204DAC8D007AEB0F /* NotificationSettingsViewController.m in Sources */,
4C090A1B210FD9C7001FD7F9 /* HapticFeedback.swift in Sources */,
B882A79126AE878300B5AB69 /* CallUIAdapter.swift in Sources */,
B882A78A26AE878300B5AB69 /* CallHeader.swift in Sources */,
3496744F2076ACD000080B5F /* LongTextViewController.swift in Sources */,
B882A78026AE878300B5AB69 /* GroupCallMemberSheet.swift in Sources */,
34F308A21ECB469700BB7697 /* OWSBezierPathView.m in Sources */,
B882A77F26AE878300B5AB69 /* GroupCallVideoOverflow.swift in Sources */,
B882A79426AE878300B5AB69 /* AudioSource.swift in Sources */,
B886B4A92398BA1500211ABE /* QRCode.swift in Sources */,
3496955D219B605E00DCFE74 /* PhotoCollectionPickerController.swift in Sources */,
34B0796D1FCF46B100E248C2 /* MainAppContext.m in Sources */,
34A8B3512190A40E00218A25 /* MediaAlbumView.swift in Sources */,
B882A78226AE878300B5AB69 /* GroupCallVideoGridLayout.swift in Sources */,
4C4AEC4520EC343B0020E72B /* DismissableTextField.swift in Sources */,
3496955E219B605E00DCFE74 /* PhotoLibrary.swift in Sources */,
C3A76A8D25DB83F90074CB90 /* PermissionMissingModal.swift in Sources */,
340FC8A9204DAC8D007AEB0F /* NotificationSettingsOptionsViewController.m in Sources */,
B849789625D4A2F500D0D0B3 /* LinkPreviewView.swift in Sources */,
B882A79726AE878300B5AB69 /* IndividualCall.swift in Sources */,
C3D0972B2510499C00F6E3E4 /* BackgroundPoller.swift in Sources */,
C3548F0624456447009433A8 /* PNModeVC.swift in Sources */,
B80A579F23DFF1F300876683 /* NewClosedGroupVC.swift in Sources */,
@ -4803,21 +4979,30 @@
3496957221A301A100DCFE74 /* OWSBackup.m in Sources */,
B835247925C38D880089A44F /* MessageCell.swift in Sources */,
B86BD08623399CEF000F5AE3 /* SeedModal.swift in Sources */,
B882A77826AE878300B5AB69 /* CallService.swift in Sources */,
34E3E5681EC4B19400495BAC /* AudioProgressView.swift in Sources */,
B8D0A26925E4A2C200C1835E /* Onboarding.swift in Sources */,
B882A79526AE878300B5AB69 /* IndividualCallService.swift in Sources */,
B882A78326AE878300B5AB69 /* GroupCallVideoGrid.swift in Sources */,
34D1F0521F7E8EA30066283D /* GiphyDownloader.swift in Sources */,
B882A78826AE878300B5AB69 /* RemoteVideoView.m in Sources */,
450DF2051E0D74AC003D14BE /* Platform.swift in Sources */,
4CC613362227A00400E21A3A /* ConversationSearch.swift in Sources */,
B882A77B26AE878300B5AB69 /* CallAudioService.swift in Sources */,
B882A79026AE878300B5AB69 /* CallKitCallUIAdaptee.swift in Sources */,
B82149B825D60393009C0F2A /* BlockedModal.swift in Sources */,
B82B408C239A068800A248E7 /* RegisterVC.swift in Sources */,
346129991FD1E4DA00532771 /* SignalApp.m in Sources */,
3496957121A301A100DCFE74 /* OWSBackupImportJob.m in Sources */,
34BECE301F7ABCF800D7438D /* GifPickerLayout.swift in Sources */,
C331FFFE2558FF3B00070591 /* ConversationCell.swift in Sources */,
B882A77D26AE878300B5AB69 /* SignalCall.swift in Sources */,
B8F5F72325F1B4CA003BF8D4 /* DownloadAttachmentModal.swift in Sources */,
C3DFFAC623E96F0D0058DAF8 /* Sheet.swift in Sources */,
B882A78426AE878300B5AB69 /* GroupCallSwipeToastView.swift in Sources */,
C31FFE57254A5FFE00F19441 /* KeyPairUtilities.swift in Sources */,
B8D84EA325DF745A005A043E /* LinkPreviewState.swift in Sources */,
B882A78E26AE878300B5AB69 /* IndividualCallViewController.swift in Sources */,
45C0DC1E1E69011F00E04C47 /* UIStoryboard+OWS.swift in Sources */,
45A6DAD61EBBF85500893231 /* ReminderView.swift in Sources */,
B82B408E239DC00D00A248E7 /* DisplayNameVC.swift in Sources */,
@ -4826,10 +5011,12 @@
B8AF4BB426A5204600583500 /* SendSeedModal.swift in Sources */,
B821494625D4D6FF009C0F2A /* URLModal.swift in Sources */,
C374EEEB25DA3CA70073A857 /* ConversationTitleView.swift in Sources */,
B882A77A26AE878300B5AB69 /* GroupCallRemoteVideoManager.swift in Sources */,
B88FA7F2260C3EB10049422F /* OpenGroupSuggestionGrid.swift in Sources */,
4CA485BB2232339F004B9E7D /* PhotoCaptureViewController.swift in Sources */,
34330AA31E79686200DF2FB9 /* OWSProgressView.m in Sources */,
344825C6211390C800DB4BD8 /* OWSOrphanDataCleaner.m in Sources */,
B882A78926AE878300B5AB69 /* LocalVideoView.swift in Sources */,
C328254925CA60E60062D0A7 /* ContextMenuVC+Action.swift in Sources */,
4542DF54208D40AC007B4E76 /* LoadingViewController.swift in Sources */,
34D5CCA91EAE3D30005515DB /* AvatarViewHelper.m in Sources */,
@ -4845,17 +5032,22 @@
B8544E3323D50E4900299F14 /* SNAppearance.swift in Sources */,
4C586926224FAB83003FD070 /* AVAudioSession+OWS.m in Sources */,
C331FFF42558FF0300070591 /* PNOptionView.swift in Sources */,
B882A79626AE878300B5AB69 /* OutboundIndividualCallInitiator.swift in Sources */,
4C4AE6A1224AF35700D4AF6F /* SendMediaNavigationController.swift in Sources */,
45F32C222057297A00A300D5 /* MediaDetailViewController.m in Sources */,
B82149C125D605C6009C0F2A /* InfoBanner.swift in Sources */,
C3DAB3242480CB2B00725F25 /* SRCopyableLabel.swift in Sources */,
B882A77C26AE878300B5AB69 /* OWSAudioSession+WebRTC.swift in Sources */,
B882A78526AE878300B5AB69 /* GroupCallErrorView.swift in Sources */,
B8CCF63F23975CFB0091D419 /* JoinOpenGroupVC.swift in Sources */,
34ABC0E421DD20C500ED9469 /* ConversationMessageMapping.swift in Sources */,
B882A77926AE878300B5AB69 /* GroupCallUpdateMessageHandler.swift in Sources */,
B85357C323A1BD1200AAF6CD /* SeedVC.swift in Sources */,
45B5360E206DD8BB00D61655 /* UIResponder+OWS.swift in Sources */,
B8D84ECF25E3108A005A043E /* ExpandingAttachmentsButton.swift in Sources */,
B875885A264503A6000E60D0 /* JoinOpenGroupModal.swift in Sources */,
B8CCF6432397711F0091D419 /* SettingsVC.swift in Sources */,
B882A78D26AE878300B5AB69 /* NonCallKitCallUIAdaptee.swift in Sources */,
C354E75A23FE2A7600CE22E3 /* BaseVC.swift in Sources */,
3441FD9F21A3604F00BB9542 /* BackupRestoreViewController.swift in Sources */,
45C0DC1B1E68FE9000E04C47 /* UIApplication+OWS.swift in Sources */,
@ -4868,9 +5060,12 @@
340FC8B7204DAC8D007AEB0F /* OWSConversationSettingsViewController.m in Sources */,
34BECE2E1F7ABCE000D7438D /* GifPickerViewController.swift in Sources */,
B84664F5235022F30083A1CD /* MentionUtilities.swift in Sources */,
B882A78726AE878300B5AB69 /* GroupCallMemberView.swift in Sources */,
34D1F0C01F8EC1760066283D /* MessageRecipientStatusUtils.swift in Sources */,
C328250F25CA06020062D0A7 /* VoiceMessageView.swift in Sources */,
B82B4090239DD75000A248E7 /* RestoreVC.swift in Sources */,
B882A78626AE878300B5AB69 /* GroupCallTooltip.swift in Sources */,
B882A78C26AE878300B5AB69 /* CallControls.swift in Sources */,
3488F9362191CC4000E524CC /* MediaView.swift in Sources */,
B8569AC325CB5D2900DBA3DB /* ConversationVC+Interaction.swift in Sources */,
3496955C219B605E00DCFE74 /* ImagePickerController.swift in Sources */,
@ -4884,6 +5079,7 @@
76EB054018170B33006006FC /* AppDelegate.m in Sources */,
340FC8B6204DAC8D007AEB0F /* OWSQRCodeScanningViewController.m in Sources */,
C33100082558FF6D00070591 /* NewConversationButtonSet.swift in Sources */,
B882A78126AE878300B5AB69 /* GroupCallViewController.swift in Sources */,
C3AAFFF225AE99710089E6DD /* AppDelegate.swift in Sources */,
B8BB82A5238F627000BA5194 /* HomeVC.swift in Sources */,
C31A6C5A247F214E001123EF /* UIView+Glow.swift in Sources */,
@ -4893,6 +5089,7 @@
B8569AE325CBB19A00DBA3DB /* DocumentView.swift in Sources */,
B85357BF23A1AE0800AAF6CD /* SeedReminderView.swift in Sources */,
B821494F25D4E163009C0F2A /* BodyTextView.swift in Sources */,
B882A78B26AE878300B5AB69 /* CallButton.swift in Sources */,
C35E8AAE2485E51D00ACB629 /* IP2Country.swift in Sources */,
B835249B25C3AB650089A44F /* VisibleMessageCell.swift in Sources */,
340FC8AE204DAC8D007AEB0F /* OWSSoundSettingsViewController.m in Sources */,
@ -4904,6 +5101,7 @@
45E5A6991F61E6DE001E4A8A /* MarqueeLabel.swift in Sources */,
C302093E25DCBF08001F572D /* MentionSelectionView.swift in Sources */,
C328251F25CA3A900062D0A7 /* QuoteView.swift in Sources */,
B882A78F26AE878300B5AB69 /* CallKitCallManager.swift in Sources */,
B8EB20F02640F7F000773E52 /* OpenGroupInvitationView.swift in Sources */,
B86BD08423399ACF000F5AE3 /* Modal.swift in Sources */,
C328254025CA55880062D0A7 /* ContextMenuVC.swift in Sources */,
@ -6153,6 +6351,7 @@
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(SRCROOT)",
"$(PROJECT_DIR)/Dependencies",
);
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PRECOMPILE_PREFIX_HEADER = YES;
@ -6221,6 +6420,7 @@
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
"$(SRCROOT)",
"$(PROJECT_DIR)/Dependencies",
);
GCC_OPTIMIZATION_LEVEL = 3;
GCC_PRECOMPILE_PREFIX_HEADER = YES;

View File

@ -0,0 +1,84 @@
//
// Copyright (c) 2020 Open Whisper Systems. All rights reserved.
//
import Foundation
import AVFoundation
import SignalServiceKit
public struct AudioSource: Hashable {
public let localizedName: String
public let portDescription: AVAudioSessionPortDescription?
// The built-in loud speaker / aka speakerphone
public let isBuiltInSpeaker: Bool
// The built-in quiet speaker, aka the normal phone handset receiver earpiece
public let isBuiltInEarPiece: Bool
public init(localizedName: String, isBuiltInSpeaker: Bool, isBuiltInEarPiece: Bool, portDescription: AVAudioSessionPortDescription? = nil) {
self.localizedName = localizedName
self.isBuiltInSpeaker = isBuiltInSpeaker
self.isBuiltInEarPiece = isBuiltInEarPiece
self.portDescription = portDescription
}
public init(portDescription: AVAudioSessionPortDescription) {
let isBuiltInEarPiece = portDescription.portType == AVAudioSession.Port.builtInMic
// portDescription.portName works well for BT linked devices, but if we are using
// the built in mic, we have "iPhone Microphone" which is a little awkward.
// In that case, instead we prefer just the model name e.g. "iPhone" or "iPad"
let localizedName = isBuiltInEarPiece ? UIDevice.current.localizedModel : portDescription.portName
self.init(localizedName: localizedName,
isBuiltInSpeaker: false,
isBuiltInEarPiece: isBuiltInEarPiece,
portDescription: portDescription)
}
// Speakerphone is handled separately from the other audio routes as it doesn't appear as an "input"
public static var builtInSpeaker: AudioSource {
return self.init(localizedName: NSLocalizedString("AUDIO_ROUTE_BUILT_IN_SPEAKER", comment: "action sheet button title to enable built in speaker during a call"),
isBuiltInSpeaker: true,
isBuiltInEarPiece: false)
}
// MARK: Hashable
public static func ==(lhs: AudioSource, rhs: AudioSource) -> Bool {
// Simply comparing the `portDescription` vs the `portDescription.uid`
// caused multiple instances of the built in mic to turn up in a set.
if lhs.isBuiltInSpeaker && rhs.isBuiltInSpeaker {
return true
}
if lhs.isBuiltInSpeaker || rhs.isBuiltInSpeaker {
return false
}
guard let lhsPortDescription = lhs.portDescription else {
owsFailDebug("only the built in speaker should lack a port description")
return false
}
guard let rhsPortDescription = rhs.portDescription else {
owsFailDebug("only the built in speaker should lack a port description")
return false
}
return lhsPortDescription.uid == rhsPortDescription.uid
}
public func hash(into hasher: inout Hasher) {
guard let portDescription = self.portDescription else {
assert(self.isBuiltInSpeaker)
hasher.combine("Built In Speaker")
return
}
hasher.combine(portDescription.uid)
}
}

View File

@ -0,0 +1,604 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import AVFoundation
import SignalServiceKit
import SignalMessaging
import AVKit
import SignalRingRTC
protocol CallAudioServiceDelegate: AnyObject {
func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService)
func callAudioServiceDidChangeAudioSource(_ callAudioService: CallAudioService, audioSource: AudioSource?)
}
@objc class CallAudioService: NSObject, CallObserver {
private var vibrateTimer: Timer?
var handleRinging = false
weak var delegate: CallAudioServiceDelegate? {
willSet {
assert(newValue == nil || delegate == nil)
}
}
// MARK: Vibration config
private let vibrateRepeatDuration = 1.6
// Our ring buzz is a pair of vibrations.
// `pulseDuration` is the small pause between the two vibrations in the pair.
private let pulseDuration = 0.2
var avAudioSession: AVAudioSession {
return AVAudioSession.sharedInstance()
}
// MARK: - Initializers
override init() {
super.init()
// We cannot assert singleton here, because this class gets rebuilt when the user changes relevant call settings
// Configure audio session so we don't prompt user with Record permission until call is connected.
audioSession.configureRTCAudio()
NotificationCenter.default.addObserver(forName: AVAudioSession.routeChangeNotification, object: avAudioSession, queue: OperationQueue()) { _ in
assert(!Thread.isMainThread)
self.audioRouteDidChange()
}
Self.callService.addObserverAndSyncState(observer: self)
}
deinit {
NotificationCenter.default.removeObserver(self)
}
// MARK: - CallObserver
internal func individualCallStateDidChange(_ call: SignalCall, state: CallState) {
AssertIsOnMainThread()
handleState(call: call.individualCall)
}
internal func individualCallLocalAudioMuteDidChange(_ call: SignalCall, isAudioMuted: Bool) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
}
internal func individualCallHoldDidChange(_ call: SignalCall, isOnHold: Bool) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
}
internal func individualCallLocalVideoMuteDidChange(_ call: SignalCall, isVideoMuted: Bool) {
AssertIsOnMainThread()
ensureProperAudioSession(call: call)
}
func groupCallLocalDeviceStateChanged(_ call: SignalCall) {
ensureProperAudioSession(call: call)
}
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
// This should not be required, but for some reason setting the mode
// to "videoChat" prior to a remote device being connected gets changed
// to "voiceChat" by iOS. This results in the audio coming out of the
// earpiece instead of the speaker. It may be a result of us not actually
// playing any audio until the remote device connects, or something
// going on with the underlying RTCAudioSession that's not directly
// in our control.
ensureProperAudioSession(call: call)
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
ensureProperAudioSession(call: call)
}
private let routePicker = AVRoutePickerView()
@discardableResult
public func presentRoutePicker() -> Bool {
guard let routeButton = routePicker.subviews.first(where: { $0 is UIButton }) as? UIButton else {
owsFailDebug("Failed to find subview to present route picker, falling back to old system")
return false
}
routeButton.sendActions(for: .touchUpInside)
return true
}
public func requestSpeakerphone(isEnabled: Bool) {
// This is a little too slow to execute on the main thread and the results are not immediately available after execution
// anyway, so we dispatch async. If you need to know the new value, you'll need to check isSpeakerphoneEnabled and take
// advantage of the CallAudioServiceDelegate.callAudioService(_:didUpdateIsSpeakerphoneEnabled:)
DispatchQueue.global().async {
do {
try self.avAudioSession.overrideOutputAudioPort( isEnabled ? .speaker : .none )
} catch {
Logger.warn("failed to set \(#function) = \(isEnabled) with error: \(error)")
}
}
}
private func audioRouteDidChange() {
guard let currentAudioSource = currentAudioSource else {
Logger.warn("Switched to route without audio source")
return
}
DispatchQueue.main.async { [weak self] in
guard let self = self else { return }
self.delegate?.callAudioServiceDidChangeAudioSource(self, audioSource: currentAudioSource)
}
}
private func ensureProperAudioSession(call: SignalCall?) {
switch call?.mode {
case .individual(let call):
ensureProperAudioSession(call: call)
case .group(let call):
ensureProperAudioSession(call: call)
default:
// Revert to ambient audio
setAudioSession(category: .ambient, mode: .default)
}
}
private func ensureProperAudioSession(call: GroupCall?) {
guard let call = call, call.localDeviceState.joinState != .notJoined else {
// Revert to ambient audio
setAudioSession(category: .ambient, mode: .default)
return
}
if call.isOutgoingVideoMuted {
setAudioSession(category: .playAndRecord, mode: .voiceChat, options: .allowBluetooth)
} else {
setAudioSession(category: .playAndRecord, mode: .videoChat, options: .allowBluetooth)
}
}
private func ensureProperAudioSession(call: IndividualCall?) {
AssertIsOnMainThread()
guard let call = call, !call.isEnded else {
// Revert to ambient audio
setAudioSession(category: .ambient,
mode: .default)
return
}
if call.state == .localRinging {
setAudioSession(category: .playback, mode: .default)
} else if call.hasLocalVideo {
// Because ModeVideoChat affects gain, we don't want to apply it until the call is connected.
// otherwise sounds like ringing will be extra loud for video vs. speakerphone
// Apple Docs say that setting mode to AVAudioSessionModeVideoChat has the
// side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// option, and inspect AVAudioSession.shared.categoryOptions == 0. And availableInputs
// does not include my linked bluetooth device
setAudioSession(category: .playAndRecord,
mode: .videoChat,
options: .allowBluetooth)
} else {
// Apple Docs say that setting mode to AVAudioSessionModeVoiceChat has the
// side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// option, and inspect AVAudioSession.shared.categoryOptions == 0. And availableInputs
// does not include my linked bluetooth device
setAudioSession(category: .playAndRecord,
mode: .voiceChat,
options: .allowBluetooth)
}
}
// MARK: - Service action handlers
public func handleState(call: IndividualCall) {
assert(Thread.isMainThread)
Logger.verbose("new state: \(call.state)")
// Stop playing sounds while switching audio session so we don't
// get any blips across a temporary unintended route.
stopPlayingAnySounds()
self.ensureProperAudioSession(call: call)
switch call.state {
case .idle: handleIdle(call: call)
case .dialing: handleDialing(call: call)
case .answering: handleAnswering(call: call)
case .remoteRinging: handleRemoteRinging(call: call)
case .localRinging: handleLocalRinging(call: call)
case .connected: handleConnected(call: call)
case .reconnecting: handleReconnecting(call: call)
case .localFailure: handleLocalFailure(call: call)
case .localHangup: handleLocalHangup(call: call)
case .remoteHangup: handleRemoteHangup(call: call)
case .remoteHangupNeedPermission: handleRemoteHangup(call: call)
case .remoteBusy: handleBusy(call: call)
case .answeredElsewhere: handleAnsweredElsewhere(call: call)
case .declinedElsewhere: handleAnsweredElsewhere(call: call)
case .busyElsewhere: handleAnsweredElsewhere(call: call)
}
}
private func handleIdle(call: IndividualCall) {
Logger.debug("")
}
private func handleDialing(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
// HACK: Without this async, dialing sound only plays once. I don't really understand why. Does the audioSession
// need some time to settle? Is somethign else interrupting our session?
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.2) {
self.play(sound: .callConnecting)
}
}
private func handleAnswering(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
}
private func handleRemoteRinging(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
self.play(sound: .callOutboundRinging)
}
private func handleLocalRinging(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
startRinging(call: call)
}
private func handleConnected(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
}
private func handleReconnecting(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
}
private func handleLocalFailure(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
play(sound: .callEnded)
handleCallEnded(call: call)
}
private func handleLocalHangup(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
play(sound: .callEnded)
handleCallEnded(call: call)
}
private func handleRemoteHangup(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
vibrate()
play(sound: .callEnded)
handleCallEnded(call: call)
}
private func handleBusy(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
play(sound: .callBusy)
// Let the busy sound play for 4 seconds. The full file is longer than necessary
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 4.0) {
self.handleCallEnded(call: call)
}
}
private func handleAnsweredElsewhere(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
play(sound: .callEnded)
handleCallEnded(call: call)
}
private func handleCallEnded(call: IndividualCall) {
AssertIsOnMainThread()
Logger.debug("")
// Sometimes (usually but not always) upon ending a call, the currentPlayer does not get
// played to completion. This is necessary in order for the players
// audioActivity to remove itself from OWSAudioSession. Otherwise future AudioActivities,
// like recording a voice note, will be prevented from having their needs met.
//
// Furthermore, no interruption delegate is called nor AVAudioSessionInterruptionNotification
// is posted. I'm not sure why we have to do this.
if let audioPlayer = currentPlayer {
audioPlayer.stop()
}
// Stop solo audio, revert to ambient.
setAudioSession(category: .ambient)
}
// MARK: Playing Sounds
var currentPlayer: OWSAudioPlayer?
private func stopPlayingAnySounds() {
currentPlayer?.stop()
stopRinging()
}
private func prepareToPlay(sound: OWSStandardSound) -> OWSAudioPlayer? {
guard let newPlayer = OWSSounds.audioPlayer(forSound: sound.rawValue, audioBehavior: .call) else {
owsFailDebug("unable to build player for sound: \(OWSSounds.displayName(forSound: sound.rawValue))")
return nil
}
Logger.info("playing sound: \(OWSSounds.displayName(forSound: sound.rawValue))")
// It's important to stop the current player **before** starting the new player. In the case that
// we're playing the same sound, since the player is memoized on the sound instance, we'd otherwise
// stop the sound we just started.
self.currentPlayer?.stop()
self.currentPlayer = newPlayer
return newPlayer
}
private func play(sound: OWSStandardSound) {
guard let newPlayer = prepareToPlay(sound: sound) else { return }
newPlayer.play()
}
// MARK: - Ringing
private func startRinging(call: IndividualCall) {
guard handleRinging else {
Logger.debug("ignoring \(#function) since CallKit handles it's own ringing state")
return
}
vibrateTimer?.invalidate()
vibrateTimer = .scheduledTimer(withTimeInterval: vibrateRepeatDuration, repeats: true) { [weak self] _ in
self?.ringVibration()
}
guard let player = prepareToPlay(sound: .defaultiOSIncomingRingtone) else {
return owsFailDebug("Failed to prepare player for ringing")
}
startObservingRingerState { [weak self] isDeviceSilenced in
AssertIsOnMainThread()
// We must ensure the proper audio session before
// each time we play / pause, otherwise the category
// may have changed and no playback would occur.
self?.ensureProperAudioSession(call: call)
if isDeviceSilenced {
player.pause()
} else {
player.play()
}
}
}
private func stopRinging() {
guard handleRinging else {
Logger.debug("ignoring \(#function) since CallKit handles it's own ringing state")
return
}
Logger.debug("")
// Stop vibrating
vibrateTimer?.invalidate()
vibrateTimer = nil
stopObservingRingerState()
currentPlayer?.stop()
}
// public so it can be called by timer via selector
public func ringVibration() {
// Since a call notification is more urgent than a message notifaction, we
// vibrate twice, like a pulse, to differentiate from a normal notification vibration.
vibrate()
DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + pulseDuration) {
self.vibrate()
}
}
func vibrate() {
// TODO implement HapticAdapter for iPhone7 and up
AudioServicesPlaySystemSound(kSystemSoundID_Vibrate)
}
// MARK: - AudioSession MGMT
// TODO move this to CallAudioSession?
// Note this method is sensitive to the current audio session configuration.
// Specifically if you call it while speakerphone is enabled you won't see
// any connected bluetooth routes.
var availableInputs: [AudioSource] {
guard let availableInputs = avAudioSession.availableInputs else {
// I'm not sure why this would happen, but it may indicate an error.
owsFailDebug("No available inputs or inputs not ready")
return [AudioSource.builtInSpeaker]
}
Logger.info("availableInputs: \(availableInputs)")
return [AudioSource.builtInSpeaker] + availableInputs.map { portDescription in
return AudioSource(portDescription: portDescription)
}
}
var hasExternalInputs: Bool { return availableInputs.count > 2 }
var currentAudioSource: AudioSource? {
get {
let outputsByType = avAudioSession.currentRoute.outputs.reduce(
into: [AVAudioSession.Port: AVAudioSessionPortDescription]()
) { result, portDescription in
result[portDescription.portType] = portDescription
}
let inputsByType = avAudioSession.currentRoute.inputs.reduce(
into: [AVAudioSession.Port: AVAudioSessionPortDescription]()
) { result, portDescription in
result[portDescription.portType] = portDescription
}
if let builtInMic = inputsByType[.builtInMic], inputsByType[.builtInReceiver] != nil {
return AudioSource(portDescription: builtInMic)
} else if outputsByType[.builtInSpeaker] != nil {
return AudioSource.builtInSpeaker
} else if let firstRemaining = inputsByType.values.first {
return AudioSource(portDescription: firstRemaining)
} else {
return nil
}
}
set {
guard currentAudioSource != newValue else { return }
Logger.info("changing preferred input: \(String(describing: currentAudioSource)) -> \(String(describing: newValue))")
if let portDescription = newValue?.portDescription {
do {
try avAudioSession.setPreferredInput(portDescription)
} catch {
owsFailDebug("failed setting audio source with error: \(error)")
}
} else if newValue == AudioSource.builtInSpeaker {
requestSpeakerphone(isEnabled: true)
} else {
owsFailDebug("Tried to set unexpected audio source")
}
delegate?.callAudioServiceDidChangeAudioSource(self, audioSource: newValue)
}
}
private func setAudioSession(category: AVAudioSession.Category,
mode: AVAudioSession.Mode? = nil,
options: AVAudioSession.CategoryOptions = AVAudioSession.CategoryOptions(rawValue: 0)) {
AssertIsOnMainThread()
var audioSessionChanged = false
do {
if let mode = mode {
let oldCategory = avAudioSession.category
let oldMode = avAudioSession.mode
let oldOptions = avAudioSession.categoryOptions
guard oldCategory != category || oldMode != mode || oldOptions != options else {
return
}
audioSessionChanged = true
if oldCategory != category {
Logger.debug("audio session changed category: \(oldCategory) -> \(category) ")
}
if oldMode != mode {
Logger.debug("audio session changed mode: \(oldMode) -> \(mode) ")
}
if oldOptions != options {
Logger.debug("audio session changed options: \(oldOptions) -> \(options) ")
}
try avAudioSession.setCategory(category, mode: mode, options: options)
} else {
let oldCategory = avAudioSession.category
let oldOptions = avAudioSession.categoryOptions
guard avAudioSession.category != category || avAudioSession.categoryOptions != options else {
return
}
audioSessionChanged = true
if oldCategory != category {
Logger.debug("audio session changed category: \(oldCategory) -> \(category) ")
}
if oldOptions != options {
Logger.debug("audio session changed options: \(oldOptions) -> \(options) ")
}
try avAudioSession.ows_setCategory(category, with: options)
}
} catch {
let message = "failed to set category: \(category) mode: \(String(describing: mode)), options: \(options) with error: \(error)"
owsFailDebug(message)
}
if audioSessionChanged {
Logger.info("")
self.delegate?.callAudioServiceDidChangeAudioSession(self)
}
}
// MARK: - Ringer State
// let encodedDarwinNotificationName = "com.apple.springboard.ringerstate".encodedForSelector
private static let ringerStateNotificationName = DarwinNotificationName("dAF+P3ICAn12PwUCBHoAeHMBcgR1PwR6AHh2BAUGcgZ2".decodedForSelector!)
private var ringerStateToken: Int32?
private func startObservingRingerState(stateChanged: @escaping (_ isDeviceSilenced: Bool) -> Void) {
func isRingerStateSilenced(token: Int32) -> Bool {
return DarwinNotificationCenter.getStateForObserver(token) > 0 ? false : true
}
let token = DarwinNotificationCenter.addObserver(
for: Self.ringerStateNotificationName,
queue: .main
) { stateChanged(isRingerStateSilenced(token: $0)) }
ringerStateToken = token
stateChanged(isRingerStateSilenced(token: token))
}
private func stopObservingRingerState() {
guard let ringerStateToken = ringerStateToken else { return }
DarwinNotificationCenter.removeObserver(ringerStateToken)
self.ringerStateToken = nil
}
// MARK: - Join / Leave sound
func playJoinSound() {
play(sound: .groupCallJoin)
}
func playLeaveSound() {
play(sound: .groupCallLeave)
}
}
extension CallAudioService: CallServiceObserver {
func didUpdateCall(from oldValue: SignalCall?, to newValue: SignalCall?) {
oldValue?.removeObserver(self)
newValue?.addObserverAndSyncState(observer: self)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,189 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
class GroupCallRemoteVideoManager: Dependencies {
private var currentGroupCall: GroupCall? {
guard let call = Self.callService.currentCall, call.isGroupCall else { return nil }
return call.groupCall
}
// MARK: - Remote Video Views
private var videoViews = [UInt32: [GroupCallRemoteMemberView.Mode: GroupCallRemoteVideoView]]()
func remoteVideoView(for device: RemoteDeviceState, mode: GroupCallRemoteMemberView.Mode) -> GroupCallRemoteVideoView {
AssertIsOnMainThread()
var currentVideoViewsDevice = videoViews[device.demuxId] ?? [:]
if let current = currentVideoViewsDevice[mode] { return current }
let videoView = GroupCallRemoteVideoView(demuxId: device.demuxId)
videoView.sizeDelegate = self
videoView.isGroupCall = true
if mode == .speaker { videoView.isFullScreen = true }
currentVideoViewsDevice[mode] = videoView
videoViews[device.demuxId] = currentVideoViewsDevice
return videoView
}
private func destroyRemoteVideoView(for demuxId: UInt32) {
AssertIsOnMainThread()
videoViews[demuxId]?.forEach { $0.value.removeFromSuperview() }
videoViews[demuxId] = nil
}
private var updateVideoRequestsDebounceTimer: Timer?
private func updateVideoRequests() {
updateVideoRequestsDebounceTimer?.invalidate()
updateVideoRequestsDebounceTimer = Timer.scheduledTimer(withTimeInterval: 0.2, repeats: false, block: { [weak self] _ in
AssertIsOnMainThread()
guard let self = self else { return }
guard let groupCall = self.currentGroupCall else { return }
let videoRequests: [VideoRequest] = groupCall.remoteDeviceStates.map { demuxId, _ in
guard let renderingVideoViews = self.videoViews[demuxId]?.values.filter({ $0.isRenderingVideo }),
!renderingVideoViews.isEmpty else {
return VideoRequest(demuxId: demuxId, width: 0, height: 0, framerate: nil)
}
let width = renderingVideoViews.reduce(into: 0, { $0 = max($0, $1.currentSize.width) })
let height = renderingVideoViews.reduce(into: 0, { $0 = max($0, $1.currentSize.height) })
return VideoRequest(
demuxId: demuxId,
width: UInt16(width),
height: UInt16(height),
framerate: height <= GroupCallVideoOverflow.itemHeight ? 15 : 30
)
}
groupCall.updateVideoRequests(resolutions: videoRequests)
})
}
}
extension GroupCallRemoteVideoManager: GroupCallRemoteVideoViewSizeDelegate {
func groupCallRemoteVideoViewDidChangeSize(remoteVideoView: GroupCallRemoteVideoView) {
AssertIsOnMainThread()
updateVideoRequests()
}
func groupCallRemoteVideoViewDidChangeSuperview(remoteVideoView: GroupCallRemoteVideoView) {
AssertIsOnMainThread()
guard let device = currentGroupCall?.remoteDeviceStates[remoteVideoView.demuxId] else { return }
remoteVideoView.configure(for: device)
updateVideoRequests()
}
}
extension GroupCallRemoteVideoManager: CallServiceObserver {
func didUpdateCall(from oldValue: SignalCall?, to newValue: SignalCall?) {
guard oldValue != newValue else { return }
videoViews.forEach { self.destroyRemoteVideoView(for: $0.key) }
oldValue?.removeObserver(self)
newValue?.addObserverAndSyncState(observer: self)
}
}
extension GroupCallRemoteVideoManager: CallObserver {
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
for (demuxId, videoViews) in videoViews {
guard let device = call.groupCall.remoteDeviceStates[demuxId] else {
destroyRemoteVideoView(for: demuxId)
continue
}
videoViews.values.forEach { $0.configure(for: device) }
}
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
videoViews.keys.forEach { destroyRemoteVideoView(for: $0) }
}
}
private protocol GroupCallRemoteVideoViewSizeDelegate: AnyObject {
func groupCallRemoteVideoViewDidChangeSize(remoteVideoView: GroupCallRemoteVideoView)
func groupCallRemoteVideoViewDidChangeSuperview(remoteVideoView: GroupCallRemoteVideoView)
}
class GroupCallRemoteVideoView: UIView {
fileprivate weak var sizeDelegate: GroupCallRemoteVideoViewSizeDelegate?
fileprivate private(set) var currentSize: CGSize = .zero {
didSet {
guard oldValue != currentSize else { return }
remoteVideoView.frame = CGRect(origin: .zero, size: currentSize)
sizeDelegate?.groupCallRemoteVideoViewDidChangeSize(remoteVideoView: self)
}
}
// We cannot subclass this, for some unknown reason WebRTC
// will not render frames properly if we try to.
private let remoteVideoView = RemoteVideoView()
private weak var videoTrack: RTCVideoTrack? {
didSet {
guard oldValue != videoTrack else { return }
oldValue?.remove(remoteVideoView)
videoTrack?.add(remoteVideoView)
}
}
override var frame: CGRect {
didSet { currentSize = frame.size }
}
override var bounds: CGRect {
didSet { currentSize = bounds.size }
}
override func didMoveToSuperview() {
sizeDelegate?.groupCallRemoteVideoViewDidChangeSuperview(remoteVideoView: self)
}
var isGroupCall: Bool {
get { remoteVideoView.isGroupCall }
set { remoteVideoView.isGroupCall = newValue }
}
var isFullScreen: Bool {
get { remoteVideoView.isFullScreen }
set { remoteVideoView.isFullScreen = newValue }
}
var isScreenShare: Bool {
get { remoteVideoView.isScreenShare }
set { remoteVideoView.isScreenShare = newValue }
}
var isRenderingVideo: Bool { videoTrack != nil }
fileprivate let demuxId: UInt32
fileprivate init(demuxId: UInt32) {
self.demuxId = demuxId
super.init(frame: .zero)
addSubview(remoteVideoView)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit { videoTrack = nil }
func configure(for device: RemoteDeviceState) {
guard device.demuxId == demuxId else {
return owsFailDebug("Tried to configure with incorrect device")
}
videoTrack = superview == nil ? nil : device.videoTrack
}
}

View File

@ -0,0 +1,85 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalServiceKit
import SignalRingRTC
class GroupCallUpdateMessageHandler: CallServiceObserver, CallObserver, Dependencies {
var didSendJoinMessage: Bool = false
func sendJoinMessageForCallIfNecessary(_ signalCall: SignalCall) {
guard !didSendJoinMessage else { return }
guard signalCall.isGroupCall, let groupCall = signalCall.groupCall else { return }
guard let eraId = groupCall.peekInfo?.eraId else { return }
guard let groupThread = signalCall.thread as? TSGroupThread else { return }
guard groupCall.localDeviceState.joinState == .joined else { return }
sendUpdateMessageForThread(groupThread, eraId: eraId)
didSendJoinMessage = true
}
func sendLeaveMessageForCallIfNecessary(_ signalCall: SignalCall) {
guard didSendJoinMessage else { return }
guard signalCall.isGroupCall, let groupCall = signalCall.groupCall else { return }
guard let groupThread = signalCall.thread as? TSGroupThread else { return }
sendUpdateMessageForThread(groupThread, eraId: groupCall.peekInfo?.eraId)
didSendJoinMessage = false
}
func sendUpdateMessageForThread(_ thread: TSGroupThread, eraId: String?) {
Logger.info("Sending call update message for thread \(thread.uniqueId)")
let updateMessage = OWSOutgoingGroupCallMessage(thread: thread, eraId: eraId)
let messagePreparer = updateMessage.asPreparer
SDSDatabaseStorage.shared.asyncWrite { writeTx in
Self.messageSenderJobQueue.add(message: messagePreparer, transaction: writeTx)
}
}
func handleUpdateMessage(_ message: SSKProtoDataMessageGroupCallUpdate, for thread: TSGroupThread, serverReceivedTimestamp: UInt64) {
Logger.info("Received group call update message for thread: \(thread.uniqueId) eraId: \(String(describing: message.eraID))")
DispatchQueue.main.async {
Self.callService.peekCallAndUpdateThread(
thread,
expectedEraId: message.eraID,
triggerEventTimestamp: serverReceivedTimestamp)
}
}
// MARK: - CallServiceObserver
func didUpdateCall(from oldValue: SignalCall?, to newValue: SignalCall?) {
if let oldValue = oldValue {
sendLeaveMessageForCallIfNecessary(oldValue)
}
didSendJoinMessage = false
oldValue?.removeObserver(self)
newValue?.addObserverAndSyncState(observer: self)
}
// MARK: - CallObserver
func groupCallLocalDeviceStateChanged(_ call: SignalCall) {
owsAssertDebug(call == Self.callService.currentCall)
guard call.isGroupCall, let groupCall = call.groupCall else { return owsFailDebug("Expected a group call") }
let isJoined = (groupCall.localDeviceState.joinState == .joined)
if isJoined {
sendJoinMessageForCallIfNecessary(call)
} else {
sendLeaveMessageForCallIfNecessary(call)
}
}
func groupCallPeekChanged(_ call: SignalCall) {
sendJoinMessageForCallIfNecessary(call)
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
didSendJoinMessage = false
}
}

View File

@ -0,0 +1,236 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalServiceKit
import SignalRingRTC
public enum CallState: String {
case idle
case dialing
case answering
case remoteRinging
case localRinging
case connected
case reconnecting
case localFailure // terminal
case localHangup // terminal
case remoteHangup // terminal
case remoteHangupNeedPermission // terminal
case remoteBusy // terminal
case answeredElsewhere // terminal
case declinedElsewhere // terminal
case busyElsewhere // terminal
}
public enum CallAdapterType {
case `default`, nonCallKit
}
public enum CallDirection {
case outgoing, incoming
}
public protocol IndividualCallDelegate: AnyObject {
func individualCallStateDidChange(_ call: IndividualCall, state: CallState)
func individualCallLocalVideoMuteDidChange(_ call: IndividualCall, isVideoMuted: Bool)
func individualCallLocalAudioMuteDidChange(_ call: IndividualCall, isAudioMuted: Bool)
func individualCallHoldDidChange(_ call: IndividualCall, isOnHold: Bool)
func individualCallRemoteVideoMuteDidChange(_ call: IndividualCall, isVideoMuted: Bool)
func individualCallRemoteSharingScreenDidChange(_ call: IndividualCall, isRemoteSharingScreen: Bool)
}
/**
* Data model for a WebRTC backed voice/video call.
*
* This class' state should only be accessed on the main queue.
*/
@objc
public class IndividualCall: NSObject, IndividualCallNotificationInfo {
// Mark -
var backgroundTask: OWSBackgroundTask? {
didSet {
AssertIsOnMainThread()
Logger.info("")
}
}
var callId: UInt64? {
didSet {
AssertIsOnMainThread()
Logger.info("")
}
}
let callAdapterType: CallAdapterType
weak var remoteVideoTrack: RTCVideoTrack? {
didSet {
AssertIsOnMainThread()
Logger.info("")
delegate?.individualCallRemoteVideoMuteDidChange(self, isVideoMuted: !isRemoteVideoEnabled)
}
}
var isRemoteVideoEnabled = false {
didSet {
AssertIsOnMainThread()
Logger.info("\(isRemoteVideoEnabled)")
delegate?.individualCallRemoteVideoMuteDidChange(self, isVideoMuted: !isRemoteVideoEnabled)
}
}
var isRemoteSharingScreen = false {
didSet {
AssertIsOnMainThread()
Logger.info("\(isRemoteSharingScreen)")
delegate?.individualCallRemoteSharingScreenDidChange(self, isRemoteSharingScreen: isRemoteSharingScreen)
}
}
// MARK: -
// tracking cleanup
var wasReportedToSystem = false
var wasRemovedFromSystem = false
@objc
public let remoteAddress: SignalServiceAddress
public var isEnded: Bool {
switch state {
case .localFailure, .localHangup, .remoteHangup, .remoteHangupNeedPermission, .remoteBusy, .answeredElsewhere, .declinedElsewhere, .busyElsewhere:
return true
case .idle, .dialing, .answering, .remoteRinging, .localRinging, .connected, .reconnecting:
return false
}
}
public let direction: CallDirection
// Distinguishes between calls locally, e.g. in CallKit
@objc
public let localId: UUID
public let thread: TSContactThread
public let sentAtTimestamp: UInt64
public var callRecord: TSCall? {
didSet {
AssertIsOnMainThread()
assert(oldValue == nil)
updateCallRecordType()
}
}
public lazy var hasLocalVideo = offerMediaType == .video {
didSet {
AssertIsOnMainThread()
delegate?.individualCallLocalVideoMuteDidChange(self, isVideoMuted: !hasLocalVideo)
}
}
public var state: CallState {
didSet {
AssertIsOnMainThread()
Logger.debug("state changed: \(oldValue) -> \(self.state) for call: \(self)")
updateCallRecordType()
delegate?.individualCallStateDidChange(self, state: state)
}
}
public var offerMediaType: TSRecentCallOfferType = .audio
// We start out muted if the record permission isn't granted. This should generally
// only happen for incoming calls, because we proactively ask about it before you
// can make an outgoing call.
public var isMuted = AVAudioSession.sharedInstance().recordPermission != .granted {
didSet {
AssertIsOnMainThread()
Logger.debug("muted changed: \(oldValue) -> \(self.isMuted)")
delegate?.individualCallLocalAudioMuteDidChange(self, isAudioMuted: isMuted)
}
}
public var isOnHold = false {
didSet {
AssertIsOnMainThread()
Logger.debug("isOnHold changed: \(oldValue) -> \(self.isOnHold)")
delegate?.individualCallHoldDidChange(self, isOnHold: isOnHold)
}
}
public weak var delegate: IndividualCallDelegate?
// MARK: Initializers and Factory Methods
init(direction: CallDirection, localId: UUID, state: CallState, remoteAddress: SignalServiceAddress, sentAtTimestamp: UInt64, callAdapterType: CallAdapterType) {
self.direction = direction
self.localId = localId
self.state = state
self.remoteAddress = remoteAddress
self.thread = TSContactThread.getOrCreateThread(contactAddress: remoteAddress)
self.sentAtTimestamp = sentAtTimestamp
self.callAdapterType = callAdapterType
}
deinit {
Logger.debug("")
if !isEnded {
owsFailDebug("isEnded was unexpectedly false")
}
if wasReportedToSystem {
if !wasRemovedFromSystem {
owsFailDebug("wasRemovedFromSystem was unexpectedly false")
}
} else {
if wasRemovedFromSystem {
owsFailDebug("wasRemovedFromSystem was unexpectedly true")
}
}
}
override public var description: String {
return "IndividualCall: {\(remoteAddress), localId: \(localId), signalingId: \(callId as Optional)))}"
}
private func updateCallRecordType() {
AssertIsOnMainThread()
guard let callRecord = self.callRecord else {
return
}
// Mark incomplete calls as completed if call has connected.
if state == .connected &&
callRecord.callType == .outgoingIncomplete {
callRecord.updateCallType(.outgoing)
}
if state == .connected &&
callRecord.callType == .incomingIncomplete {
callRecord.updateCallType(.incoming)
}
}
// MARK: Equatable
static func == (lhs: IndividualCall, rhs: IndividualCall) -> Bool {
return lhs.localId == rhs.localId
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,89 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalServiceKit
import SignalMessaging
/**
* Creates an outbound call via WebRTC.
*/
@objc public class OutboundIndividualCallInitiator: NSObject {
@objc public override init() {
super.init()
SwiftSingletons.register(self)
}
/**
* |address| is a SignalServiceAddress
*/
@discardableResult
@objc
public func initiateCall(address: SignalServiceAddress) -> Bool {
Logger.info("with address: \(address)")
guard address.isValid else { return false }
return initiateCall(address: address, isVideo: false)
}
/**
* |address| is a SignalServiceAddress.
*/
@discardableResult
@objc
public func initiateCall(address: SignalServiceAddress, isVideo: Bool) -> Bool {
guard tsAccountManager.isOnboarded() else {
Logger.warn("aborting due to user not being onboarded.")
OWSActionSheets.showActionSheet(title: NSLocalizedString("YOU_MUST_COMPLETE_ONBOARDING_BEFORE_PROCEEDING",
comment: "alert body shown when trying to use features in the app before completing registration-related setup."))
return false
}
guard let callUIAdapter = Self.callService.individualCallService.callUIAdapter else {
owsFailDebug("missing callUIAdapter")
return false
}
guard let frontmostViewController = UIApplication.shared.frontmostViewController else {
owsFailDebug("could not identify frontmostViewController")
return false
}
let showedAlert = SafetyNumberConfirmationSheet.presentIfNecessary(
address: address,
confirmationText: CallStrings.confirmAndCallButtonTitle
) { didConfirmIdentity in
guard didConfirmIdentity else { return }
_ = self.initiateCall(address: address, isVideo: isVideo)
}
guard !showedAlert else {
return false
}
frontmostViewController.ows_askForMicrophonePermissions { granted in
guard granted == true else {
Logger.warn("aborting due to missing microphone permissions.")
frontmostViewController.ows_showNoMicrophonePermissionActionSheet()
return
}
if isVideo {
frontmostViewController.ows_askForCameraPermissions { granted in
guard granted else {
Logger.warn("aborting due to missing camera permissions.")
return
}
callUIAdapter.startAndShowOutgoingCall(address: address, hasLocalVideo: true)
}
} else {
callUIAdapter.startAndShowOutgoingCall(address: address, hasLocalVideo: false)
}
}
return true
}
}

View File

@ -0,0 +1,49 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalMessaging
import WebRTC
/**
* By default WebRTC starts the audio session (PlayAndRecord) immediately upon creating the peer connection
* but we want to create the peer connection and set up all the signaling channels before we prompt the user
* for an incoming call. Without manually handling the session, this would result in the user seeing a recording
* permission requested (and recording banner) before they even know they have an incoming call.
*
* By using the `useManualAudio` and `isAudioEnabled` attributes of the RTCAudioSession we can delay recording until
* it makes sense.
*/
extension OWSAudioSession {
/**
* The private class that manages AVAudioSession for WebRTC
*/
private var rtcAudioSession: RTCAudioSession {
return .sharedInstance()
}
/**
* This must be called before any audio tracks are added to the peerConnection, else we'll start recording before all
* our signaling is set up.
*/
@objc
public func configureRTCAudio() {
Logger.info("")
rtcAudioSession.useManualAudio = true
}
/**
* Because we useManualAudio with our RTCAudioSession, we have to start/stop the recording audio session ourselves.
* See header for details on manual audio.
*/
@objc
public var isRTCAudioEnabled: Bool {
get {
return rtcAudioSession.isAudioEnabled
}
set {
rtcAudioSession.isAudioEnabled = newValue
}
}
}

View File

@ -0,0 +1,325 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
// All Observer methods will be invoked from the main thread.
public protocol CallObserver: AnyObject {
func individualCallStateDidChange(_ call: SignalCall, state: CallState)
func individualCallLocalVideoMuteDidChange(_ call: SignalCall, isVideoMuted: Bool)
func individualCallLocalAudioMuteDidChange(_ call: SignalCall, isAudioMuted: Bool)
func individualCallRemoteVideoMuteDidChange(_ call: SignalCall, isVideoMuted: Bool)
func individualCallRemoteSharingScreenDidChange(_ call: SignalCall, isRemoteSharingScreen: Bool)
func individualCallHoldDidChange(_ call: SignalCall, isOnHold: Bool)
func groupCallLocalDeviceStateChanged(_ call: SignalCall)
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall)
func groupCallPeekChanged(_ call: SignalCall)
func groupCallRequestMembershipProof(_ call: SignalCall)
func groupCallRequestGroupMembers(_ call: SignalCall)
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason)
/// Invoked if a call message failed to send because of a safety number change
/// UI observing call state may choose to alert the user (e.g. presenting a SafetyNumberConfirmationSheet)
func callMessageSendFailedUntrustedIdentity(_ call: SignalCall)
}
public extension CallObserver {
func individualCallStateDidChange(_ call: SignalCall, state: CallState) {}
func individualCallLocalVideoMuteDidChange(_ call: SignalCall, isVideoMuted: Bool) {}
func individualCallLocalAudioMuteDidChange(_ call: SignalCall, isAudioMuted: Bool) {}
func individualCallRemoteVideoMuteDidChange(_ call: SignalCall, isVideoMuted: Bool) {}
func individualCallRemoteSharingScreenDidChange(_ call: SignalCall, isRemoteSharingScreen: Bool) {}
func individualCallHoldDidChange(_ call: SignalCall, isOnHold: Bool) {}
func groupCallLocalDeviceStateChanged(_ call: SignalCall) {}
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {}
func groupCallPeekChanged(_ call: SignalCall) {}
func groupCallRequestMembershipProof(_ call: SignalCall) {}
func groupCallRequestGroupMembers(_ call: SignalCall) {}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {}
func callMessageSendFailedUntrustedIdentity(_ call: SignalCall) {}
}
@objc
public class SignalCall: NSObject, CallManagerCallReference {
public let mode: Mode
public enum Mode {
case individual(IndividualCall)
case group(GroupCall)
}
public let audioActivity: AudioActivity
@objc
var isGroupCall: Bool {
switch mode {
case .group: return true
case .individual: return false
}
}
var groupCall: GroupCall! {
owsAssertDebug(isGroupCall)
guard case .group(let call) = mode else {
owsFailDebug("Missing group call")
return nil
}
return call
}
@objc
var isIndividualCall: Bool {
switch mode {
case .group: return false
case .individual: return true
}
}
@objc
var individualCall: IndividualCall! {
owsAssertDebug(isIndividualCall)
guard case .individual(let call) = mode else {
owsFailDebug("Missing individual call")
return nil
}
return call
}
private(set) lazy var videoCaptureController = VideoCaptureController()
// Should be used only on the main thread
public var connectedDate: Date? {
didSet { AssertIsOnMainThread() }
}
@objc
public let thread: TSThread
public var error: CallError?
public enum CallError: Error {
case providerReset
case disconnected
case externalError(underlyingError: Error)
case timeout(description: String)
case messageSendFailure(underlyingError: Error)
}
var participantAddresses: [SignalServiceAddress] {
switch mode {
case .group(let call):
return call.remoteDeviceStates.values.map { $0.address }
case .individual(let call):
return [call.remoteAddress]
}
}
init(groupCall: GroupCall, groupThread: TSGroupThread) {
mode = .group(groupCall)
audioActivity = AudioActivity(
audioDescription: "[SignalCall] with group \(groupThread.groupModel.groupId)",
behavior: .call
)
thread = groupThread
super.init()
groupCall.delegate = self
}
init(individualCall: IndividualCall) {
mode = .individual(individualCall)
audioActivity = AudioActivity(
audioDescription: "[SignalCall] with individual \(individualCall.remoteAddress)",
behavior: .call
)
thread = individualCall.thread
super.init()
individualCall.delegate = self
}
public class func groupCall(thread: TSGroupThread) -> SignalCall? {
owsAssertDebug(thread.groupModel.groupsVersion == .V2)
let videoCaptureController = VideoCaptureController()
let sfuURL = DebugFlags.callingUseTestSFU.get() ? TSConstants.sfuTestURL : TSConstants.sfuURL
guard let groupCall = Self.callService.callManager.createGroupCall(
groupId: thread.groupModel.groupId,
sfuUrl: sfuURL,
videoCaptureController: videoCaptureController
) else {
owsFailDebug("Failed to create group call")
return nil
}
let call = SignalCall(groupCall: groupCall, groupThread: thread)
call.videoCaptureController = videoCaptureController
return call
}
public class func outgoingIndividualCall(localId: UUID, remoteAddress: SignalServiceAddress) -> SignalCall {
let individualCall = IndividualCall(
direction: .outgoing,
localId: localId,
state: .dialing,
remoteAddress: remoteAddress,
sentAtTimestamp: Date.ows_millisecondTimestamp(),
callAdapterType: .default
)
return SignalCall(individualCall: individualCall)
}
public class func incomingIndividualCall(
localId: UUID,
remoteAddress: SignalServiceAddress,
sentAtTimestamp: UInt64,
offerMediaType: TSRecentCallOfferType
) -> SignalCall {
// If this is a video call, we want to use in the in app call screen
// because CallKit has poor support for video calls. On iOS 14+ we
// always use CallKit, because as of iOS 14 AVAudioPlayer is no longer
// able to start playing sounds in the background.
let callAdapterType: CallAdapterType
if #available(iOS 14, *) {
callAdapterType = .default
} else if offerMediaType == .video {
callAdapterType = .nonCallKit
} else {
callAdapterType = .default
}
let individualCall = IndividualCall(
direction: .incoming,
localId: localId,
state: .answering,
remoteAddress: remoteAddress,
sentAtTimestamp: sentAtTimestamp,
callAdapterType: callAdapterType
)
individualCall.offerMediaType = offerMediaType
return SignalCall(individualCall: individualCall)
}
// MARK: -
private var observers: WeakArray<CallObserver> = []
public func addObserverAndSyncState(observer: CallObserver) {
AssertIsOnMainThread()
observers.append(observer)
// Synchronize observer with current call state
switch mode {
case .individual(let individualCall):
observer.individualCallStateDidChange(self, state: individualCall.state)
case .group:
observer.groupCallLocalDeviceStateChanged(self)
observer.groupCallRemoteDeviceStatesChanged(self)
}
}
public func removeObserver(_ observer: CallObserver) {
AssertIsOnMainThread()
observers.removeAll { $0 === observer }
}
public func removeAllObservers() {
AssertIsOnMainThread()
observers = []
}
public func publishSendFailureUntrustedParticipantIdentity() {
observers.elements.forEach { $0.callMessageSendFailedUntrustedIdentity(self) }
}
// MARK: -
// This method should only be called when the call state is "connected".
public func connectionDuration() -> TimeInterval {
guard let connectedDate = connectedDate else {
owsFailDebug("Called connectionDuration before connected.")
return 0
}
return -connectedDate.timeIntervalSinceNow
}
}
extension SignalCall: GroupCallDelegate {
public func groupCall(onLocalDeviceStateChanged groupCall: GroupCall) {
if groupCall.localDeviceState.joinState == .joined, connectedDate == nil {
connectedDate = Date()
// make sure we don't terminate audio session during call
audioSession.isRTCAudioEnabled = true
owsAssertDebug(audioSession.startAudioActivity(audioActivity))
}
observers.elements.forEach { $0.groupCallLocalDeviceStateChanged(self) }
}
public func groupCall(onRemoteDeviceStatesChanged groupCall: GroupCall) {
observers.elements.forEach { $0.groupCallRemoteDeviceStatesChanged(self) }
}
public func groupCall(onPeekChanged groupCall: GroupCall) {
observers.elements.forEach { $0.groupCallPeekChanged(self) }
}
public func groupCall(requestMembershipProof groupCall: GroupCall) {
observers.elements.forEach { $0.groupCallRequestMembershipProof(self) }
}
public func groupCall(requestGroupMembers groupCall: GroupCall) {
observers.elements.forEach { $0.groupCallRequestGroupMembers(self) }
}
public func groupCall(onEnded groupCall: GroupCall, reason: GroupCallEndReason) {
observers.elements.forEach { $0.groupCallEnded(self, reason: reason) }
}
}
extension SignalCall: IndividualCallDelegate {
public func individualCallStateDidChange(_ call: IndividualCall, state: CallState) {
if case .connected = state, connectedDate == nil {
connectedDate = Date()
}
observers.elements.forEach { $0.individualCallStateDidChange(self, state: state) }
}
public func individualCallLocalVideoMuteDidChange(_ call: IndividualCall, isVideoMuted: Bool) {
observers.elements.forEach { $0.individualCallLocalVideoMuteDidChange(self, isVideoMuted: isVideoMuted) }
}
public func individualCallLocalAudioMuteDidChange(_ call: IndividualCall, isAudioMuted: Bool) {
observers.elements.forEach { $0.individualCallLocalAudioMuteDidChange(self, isAudioMuted: isAudioMuted) }
}
public func individualCallHoldDidChange(_ call: IndividualCall, isOnHold: Bool) {
observers.elements.forEach { $0.individualCallHoldDidChange(self, isOnHold: isOnHold) }
}
public func individualCallRemoteVideoMuteDidChange(_ call: IndividualCall, isVideoMuted: Bool) {
observers.elements.forEach { $0.individualCallRemoteVideoMuteDidChange(self, isVideoMuted: isVideoMuted) }
}
public func individualCallRemoteSharingScreenDidChange(_ call: IndividualCall, isRemoteSharingScreen: Bool) {
observers.elements.forEach { $0.individualCallRemoteSharingScreenDidChange(self, isRemoteSharingScreen: isRemoteSharingScreen) }
}
}
extension GroupCall {
public var isFull: Bool {
guard let peekInfo = peekInfo, let maxDevices = peekInfo.maxDevices else { return false }
return peekInfo.deviceCount >= maxDevices
}
public var maxDevices: UInt32? {
guard let peekInfo = peekInfo, let maxDevices = peekInfo.maxDevices else { return nil }
return maxDevices
}
}

View File

@ -0,0 +1,32 @@
//
// Copyright (c) 2020 Open Whisper Systems. All rights reserved.
//
import Foundation
struct TurnServerInfo {
let password: String
let username: String
let urls: [String]
init?(attributes: [String: AnyObject]) {
if let passwordAttribute = (attributes["password"] as? String) {
password = passwordAttribute
} else {
return nil
}
if let usernameAttribute = attributes["username"] as? String {
username = usernameAttribute
} else {
return nil
}
if let urlsAttribute = attributes["urls"] as? [String] {
urls = urlsAttribute
} else {
return nil
}
}
}

View File

@ -0,0 +1,166 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalServiceKit
import SignalMessaging
@objc(OWSWebRTCCallMessageHandler)
public class WebRTCCallMessageHandler: NSObject, OWSCallMessageHandler {
// MARK: Initializers
@objc
public override init() {
super.init()
SwiftSingletons.register(self)
}
// MARK: - Call Handlers
public func receivedOffer(
_ offer: SSKProtoCallMessageOffer,
from caller: SignalServiceAddress,
sourceDevice: UInt32,
sentAtTimestamp: UInt64,
serverReceivedTimestamp: UInt64,
serverDeliveryTimestamp: UInt64,
supportsMultiRing: Bool
) {
AssertIsOnMainThread()
let callType: SSKProtoCallMessageOfferType
if offer.hasType {
callType = offer.unwrappedType
} else {
// The type is not defined so assume the default, audio.
callType = .offerAudioCall
}
let thread = TSContactThread.getOrCreateThread(contactAddress: caller)
self.callService.individualCallService.handleReceivedOffer(
thread: thread,
callId: offer.id,
sourceDevice: sourceDevice,
sdp: offer.sdp,
opaque: offer.opaque,
sentAtTimestamp: sentAtTimestamp,
serverReceivedTimestamp: serverReceivedTimestamp,
serverDeliveryTimestamp: serverDeliveryTimestamp,
callType: callType,
supportsMultiRing: supportsMultiRing
)
}
public func receivedAnswer(_ answer: SSKProtoCallMessageAnswer, from caller: SignalServiceAddress, sourceDevice: UInt32, supportsMultiRing: Bool) {
AssertIsOnMainThread()
let thread = TSContactThread.getOrCreateThread(contactAddress: caller)
self.callService.individualCallService.handleReceivedAnswer(
thread: thread,
callId: answer.id,
sourceDevice: sourceDevice,
sdp: answer.sdp,
opaque: answer.opaque,
supportsMultiRing: supportsMultiRing
)
}
public func receivedIceUpdate(_ iceUpdate: [SSKProtoCallMessageIceUpdate], from caller: SignalServiceAddress, sourceDevice: UInt32) {
AssertIsOnMainThread()
let thread = TSContactThread.getOrCreateThread(contactAddress: caller)
self.callService.individualCallService.handleReceivedIceCandidates(
thread: thread,
callId: iceUpdate[0].id,
sourceDevice: sourceDevice,
candidates: iceUpdate
)
}
public func receivedHangup(_ hangup: SSKProtoCallMessageHangup, from caller: SignalServiceAddress, sourceDevice: UInt32) {
AssertIsOnMainThread()
// deviceId is optional and defaults to 0.
var deviceId: UInt32 = 0
let type: SSKProtoCallMessageHangupType
if hangup.hasType {
type = hangup.unwrappedType
if hangup.hasDeviceID {
deviceId = hangup.deviceID
}
} else {
// The type is not defined so assume the default, normal.
type = .hangupNormal
}
let thread = TSContactThread.getOrCreateThread(contactAddress: caller)
self.callService.individualCallService.handleReceivedHangup(
thread: thread,
callId: hangup.id,
sourceDevice: sourceDevice,
type: type,
deviceId: deviceId
)
}
public func receivedBusy(_ busy: SSKProtoCallMessageBusy, from caller: SignalServiceAddress, sourceDevice: UInt32) {
AssertIsOnMainThread()
let thread = TSContactThread.getOrCreateThread(contactAddress: caller)
self.callService.individualCallService.handleReceivedBusy(
thread: thread,
callId: busy.id,
sourceDevice: sourceDevice
)
}
public func receivedOpaque(
_ opaque: SSKProtoCallMessageOpaque,
from caller: SignalServiceAddress,
sourceDevice: UInt32,
serverReceivedTimestamp: UInt64,
serverDeliveryTimestamp: UInt64
) {
AssertIsOnMainThread()
Logger.info("Received opaque call message from \(caller) on device \(sourceDevice)")
guard let message = opaque.data else {
return owsFailDebug("Received opaque call message without data")
}
guard let senderUuid = caller.uuid else {
return owsFailDebug("Received opaque call message from sender without UUID")
}
var messageAgeSec: UInt64 = 0
if serverReceivedTimestamp > 0 && serverDeliveryTimestamp >= serverReceivedTimestamp {
messageAgeSec = (serverDeliveryTimestamp - serverReceivedTimestamp) / 1000
}
self.callService.callManager.receivedCallMessage(
senderUuid: senderUuid,
senderDeviceId: sourceDevice,
localDeviceId: TSAccountManager.shared.storedDeviceId(),
message: message,
messageAgeSec: messageAgeSec
)
}
public func receivedGroupCallUpdateMessage(
_ update: SSKProtoDataMessageGroupCallUpdate,
for groupThread: TSGroupThread,
serverReceivedTimestamp: UInt64) {
Logger.info("Received group call update for thread \(groupThread.uniqueId)")
callService.groupCallMessageHandler.handleUpdateMessage(update, for: groupThread, serverReceivedTimestamp: serverReceivedTimestamp)
}
public func externallyHandleCallMessage(envelope: SSKProtoEnvelope, plaintextData: Data, wasReceivedByUD: Bool, serverDeliveryTimestamp: UInt64, transaction: SDSAnyWriteTransaction) -> Bool {
return false
}
}

View File

@ -0,0 +1,153 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
class CallButton: UIButton {
var iconName: String { didSet { updateAppearance() } }
var selectedIconName: String? { didSet { updateAppearance() } }
var currentIconName: String {
if isSelected, let selectedImageName = selectedIconName {
return selectedImageName
}
return iconName
}
var iconColor: UIColor = .ows_white { didSet { updateAppearance() } }
var selectedIconColor: UIColor = .ows_gray75 { didSet { updateAppearance() } }
var currentIconColor: UIColor { isSelected ? selectedIconColor : iconColor }
var unselectedBackgroundColor = UIColor.ows_whiteAlpha40 { didSet { updateAppearance() } }
var selectedBackgroundColor = UIColor.ows_white { didSet { updateAppearance() } }
var currentBackgroundColor: UIColor {
return isSelected ? selectedBackgroundColor : unselectedBackgroundColor
}
var text: String? { didSet { updateAppearance() } }
override var isSelected: Bool { didSet { updateAppearance() } }
override var isHighlighted: Bool { didSet { updateAppearance() } }
var showDropdownArrow = false { didSet { updateDropdownArrow() } }
var isSmall = false { didSet { updateSizing() } }
private var currentConstraints = [NSLayoutConstraint]()
private var currentIconSize: CGFloat { isSmall ? 48 : 56 }
private var currentIconInsets: UIEdgeInsets {
var insets: UIEdgeInsets
if isSmall {
insets = UIEdgeInsets(top: 10, left: 10, bottom: 10, right: 10)
} else {
insets = UIEdgeInsets(top: 14, left: 14, bottom: 14, right: 14)
}
if showDropdownArrow {
if CurrentAppContext().isRTL {
insets.left += 3
insets.right -= 3
} else {
insets.left -= 3
insets.right += 3
}
}
return insets
}
private lazy var iconView = UIImageView()
private var dropdownIconView: UIImageView?
private lazy var circleView = CircleView()
private lazy var label = UILabel()
init(iconName: String) {
self.iconName = iconName
super.init(frame: .zero)
let circleViewContainer = UIView.container()
circleViewContainer.addSubview(circleView)
circleView.autoPinHeightToSuperview()
circleView.autoPinEdge(toSuperviewEdge: .leading, withInset: 0, relation: .greaterThanOrEqual)
circleView.autoPinEdge(toSuperviewEdge: .trailing, withInset: 0, relation: .greaterThanOrEqual)
circleView.autoHCenterInSuperview()
circleView.layer.shadowOffset = .zero
circleView.layer.shadowOpacity = 0.25
circleView.layer.shadowRadius = 4
let stackView = UIStackView(arrangedSubviews: [circleViewContainer, label])
stackView.axis = .vertical
stackView.spacing = 8
stackView.isUserInteractionEnabled = false
addSubview(stackView)
stackView.autoPinEdgesToSuperviewEdges()
label.font = .ows_dynamicTypeSubheadline
label.textColor = Theme.darkThemePrimaryColor
label.textAlignment = .center
label.layer.shadowOffset = .zero
label.layer.shadowOpacity = 0.25
label.layer.shadowRadius = 4
circleView.addSubview(iconView)
updateAppearance()
updateSizing()
}
private func updateAppearance() {
circleView.backgroundColor = currentBackgroundColor
iconView.setTemplateImageName(currentIconName, tintColor: currentIconColor)
dropdownIconView?.setTemplateImageName("arrow-down-12", tintColor: currentIconColor)
if let text = text {
label.isHidden = false
label.text = text
} else {
label.isHidden = true
}
alpha = isHighlighted ? 0.6 : 1
}
private func updateSizing() {
NSLayoutConstraint.deactivate(currentConstraints)
currentConstraints.removeAll()
currentConstraints += circleView.autoSetDimensions(to: CGSize(square: currentIconSize))
circleView.layer.shadowPath = UIBezierPath(
ovalIn: CGRect(origin: .zero, size: .square(currentIconSize))
).cgPath
currentConstraints += iconView.autoPinEdgesToSuperviewEdges(with: currentIconInsets)
if let dropdownIconView = dropdownIconView {
currentConstraints.append(dropdownIconView.autoPinEdge(.leading, to: .trailing, of: iconView, withOffset: isSmall ? 0 : 2))
}
}
private func updateDropdownArrow() {
if showDropdownArrow {
if dropdownIconView?.superview != nil { return }
let dropdownIconView = UIImageView()
self.dropdownIconView = dropdownIconView
circleView.addSubview(dropdownIconView)
dropdownIconView.autoSetDimensions(to: CGSize(square: 12))
dropdownIconView.autoVCenterInSuperview()
updateSizing()
updateAppearance()
} else {
dropdownIconView?.removeFromSuperview()
dropdownIconView = nil
}
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}

View File

@ -0,0 +1,291 @@
//
// Copyright (c) 2020 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
@objc
protocol CallControlsDelegate: AnyObject {
func didPressHangup(sender: UIButton)
func didPressAudioSource(sender: UIButton)
func didPressMute(sender: UIButton)
func didPressVideo(sender: UIButton)
func didPressFlipCamera(sender: UIButton)
func didPressCancel(sender: UIButton)
func didPressJoin(sender: UIButton)
}
class CallControls: UIView {
private lazy var hangUpButton: CallButton = {
let button = createButton(
iconName: "phone-down-solid-28",
action: #selector(CallControlsDelegate.didPressHangup)
)
button.unselectedBackgroundColor = .ows_accentRed
return button
}()
private(set) lazy var audioSourceButton = createButton(
iconName: "speaker-solid-28",
action: #selector(CallControlsDelegate.didPressAudioSource)
)
private lazy var muteButton = createButton(
iconName: "mic-off-solid-28",
action: #selector(CallControlsDelegate.didPressMute)
)
private lazy var videoButton = createButton(
iconName: "video-solid-28",
action: #selector(CallControlsDelegate.didPressVideo)
)
private lazy var flipCameraButton: CallButton = {
let button = createButton(
iconName: "switch-camera-28",
action: #selector(CallControlsDelegate.didPressFlipCamera)
)
button.selectedIconColor = button.iconColor
button.selectedBackgroundColor = button.unselectedBackgroundColor
return button
}()
private lazy var cancelButton: UIButton = {
let button = OWSButton()
button.setTitle(CommonStrings.cancelButton, for: .normal)
button.setTitleColor(.ows_white, for: .normal)
button.setBackgroundImage(UIImage(color: .ows_whiteAlpha40), for: .normal)
button.titleLabel?.font = UIFont.ows_dynamicTypeBodyClamped.ows_semibold
button.clipsToBounds = true
button.layer.cornerRadius = 8
button.block = { [weak self] in
self?.delegate.didPressCancel(sender: button)
}
button.contentEdgeInsets = UIEdgeInsets(top: 11, leading: 11, bottom: 11, trailing: 11)
return button
}()
private lazy var joinButtonActivityIndicator = UIActivityIndicatorView(style: .white)
private lazy var joinButton: UIButton = {
let button = OWSButton()
button.setTitleColor(.ows_white, for: .normal)
button.setBackgroundImage(UIImage(color: .ows_accentGreen), for: .normal)
button.titleLabel?.font = UIFont.ows_dynamicTypeBodyClamped.ows_semibold
button.clipsToBounds = true
button.layer.cornerRadius = 8
button.block = { [weak self] in
self?.delegate.didPressJoin(sender: button)
}
button.contentEdgeInsets = UIEdgeInsets(top: 11, leading: 11, bottom: 11, trailing: 11)
button.addSubview(joinButtonActivityIndicator)
button.setTitle(
NSLocalizedString(
"GROUP_CALL_IS_FULL",
comment: "Text explaining the group call is full"
),
for: .disabled
)
button.setTitleColor(.ows_whiteAlpha40, for: .disabled)
joinButtonActivityIndicator.autoCenterInSuperview()
return button
}()
private lazy var gradientView: UIView = {
let gradientLayer = CAGradientLayer()
gradientLayer.colors = [
UIColor.black.withAlphaComponent(0).cgColor,
UIColor.ows_blackAlpha60.cgColor
]
let view = OWSLayerView(frame: .zero) { view in
gradientLayer.frame = view.bounds
}
view.layer.addSublayer(gradientLayer)
return view
}()
private lazy var topStackView = createTopStackView()
private lazy var bottomStackView = createBottomStackView()
private weak var delegate: CallControlsDelegate!
private let call: SignalCall
init(call: SignalCall, delegate: CallControlsDelegate) {
self.call = call
self.delegate = delegate
super.init(frame: .zero)
call.addObserverAndSyncState(observer: self)
callService.audioService.delegate = self
addSubview(gradientView)
gradientView.autoPinEdgesToSuperviewEdges()
let controlsStack = UIStackView(arrangedSubviews: [topStackView, bottomStackView])
controlsStack.axis = .vertical
controlsStack.spacing = 40
addSubview(controlsStack)
controlsStack.autoPinWidthToSuperview()
controlsStack.autoPinEdge(toSuperviewSafeArea: .bottom, withInset: 24)
controlsStack.autoPinEdge(toSuperviewEdge: .top, withInset: 22)
updateControls()
}
deinit {
call.removeObserver(self)
callService.audioService.delegate = nil
}
func createTopStackView() -> UIStackView {
let stackView = UIStackView()
stackView.axis = .horizontal
stackView.spacing = 16
let leadingSpacer = UIView.hStretchingSpacer()
let trailingSpacer = UIView.hStretchingSpacer()
stackView.addArrangedSubview(leadingSpacer)
stackView.addArrangedSubview(audioSourceButton)
stackView.addArrangedSubview(flipCameraButton)
stackView.addArrangedSubview(muteButton)
stackView.addArrangedSubview(videoButton)
stackView.addArrangedSubview(hangUpButton)
stackView.addArrangedSubview(trailingSpacer)
leadingSpacer.autoMatch(.width, to: .width, of: trailingSpacer)
return stackView
}
func createBottomStackView() -> UIStackView {
let stackView = UIStackView()
stackView.axis = .horizontal
stackView.spacing = 8
let leadingSpacer = UIView.hStretchingSpacer()
let trailingSpacer = UIView.hStretchingSpacer()
stackView.addArrangedSubview(leadingSpacer)
stackView.addArrangedSubview(cancelButton)
stackView.addArrangedSubview(joinButton)
stackView.addArrangedSubview(trailingSpacer)
// Prefer to be big.
NSLayoutConstraint.autoSetPriority(.defaultHigh) {
cancelButton.autoSetDimension(.width, toSize: 170)
}
cancelButton.autoMatch(.width, to: .width, of: joinButton)
leadingSpacer.autoMatch(.width, to: .width, of: trailingSpacer)
leadingSpacer.autoSetDimension(.width, toSize: 16, relation: .greaterThanOrEqual)
return stackView
}
private func updateControls() {
let hasExternalAudioInputs = callService.audioService.hasExternalInputs
let isLocalVideoMuted = call.groupCall.isOutgoingVideoMuted
flipCameraButton.isHidden = isLocalVideoMuted
videoButton.isSelected = !isLocalVideoMuted
muteButton.isSelected = call.groupCall.isOutgoingAudioMuted
hangUpButton.isHidden = call.groupCall.localDeviceState.joinState != .joined
// Use small controls if video is enabled and we have external
// audio inputs, because we have five buttons now.
[audioSourceButton, flipCameraButton, videoButton, muteButton, hangUpButton].forEach {
$0.isSmall = hasExternalAudioInputs && !isLocalVideoMuted
}
// Audio Source Handling
if hasExternalAudioInputs, let audioSource = callService.audioService.currentAudioSource {
audioSourceButton.showDropdownArrow = true
audioSourceButton.isHidden = false
if audioSource.isBuiltInEarPiece {
audioSourceButton.iconName = "phone-solid-28"
} else if audioSource.isBuiltInSpeaker {
audioSourceButton.iconName = "speaker-solid-28"
} else {
audioSourceButton.iconName = "speaker-bt-solid-28"
}
} else if UIDevice.current.isIPad {
// iPad *only* supports speaker mode, if there are no external
// devices connected, so we don't need to show the button unless
// we have alternate audio sources.
audioSourceButton.isHidden = true
} else {
// If there are no external audio sources, and video is enabled,
// speaker mode is always enabled so we don't need to show the button.
audioSourceButton.isHidden = !isLocalVideoMuted
// No bluetooth audio detected
audioSourceButton.iconName = "speaker-solid-28"
audioSourceButton.showDropdownArrow = false
}
bottomStackView.isHidden = call.groupCall.localDeviceState.joinState == .joined
let startCallText = NSLocalizedString("GROUP_CALL_START_BUTTON", comment: "Button to start a group call")
let joinCallText = NSLocalizedString("GROUP_CALL_JOIN_BUTTON", comment: "Button to join an ongoing group call")
if call.groupCall.isFull {
joinButton.isEnabled = false
} else if call.groupCall.localDeviceState.joinState == .joining {
joinButton.isEnabled = true
joinButton.isUserInteractionEnabled = false
joinButtonActivityIndicator.startAnimating()
joinButton.setTitle("", for: .normal)
} else {
joinButton.isEnabled = true
joinButton.isUserInteractionEnabled = true
joinButtonActivityIndicator.stopAnimating()
let deviceCount = call.groupCall.peekInfo?.deviceCount ?? 0
joinButton.setTitle(deviceCount == 0 ? startCallText : joinCallText, for: .normal)
}
}
required init(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func createButton(iconName: String, action: Selector) -> CallButton {
let button = CallButton(iconName: iconName)
button.addTarget(delegate, action: action, for: .touchUpInside)
button.setContentHuggingHorizontalHigh()
button.setCompressionResistanceHorizontalLow()
button.alpha = 0.9
return button
}
}
extension CallControls: CallObserver {
func groupCallLocalDeviceStateChanged(_ call: SignalCall) {
owsAssertDebug(call.isGroupCall)
updateControls()
}
func groupCallPeekChanged(_ call: SignalCall) {
updateControls()
}
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
updateControls()
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
updateControls()
}
}
extension CallControls: CallAudioServiceDelegate {
func callAudioServiceDidChangeAudioSession(_ callAudioService: CallAudioService) {
updateControls()
}
func callAudioServiceDidChangeAudioSource(_ callAudioService: CallAudioService, audioSource: AudioSource?) {
updateControls()
}
}

View File

@ -0,0 +1,343 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
@objc
protocol CallHeaderDelegate: AnyObject {
func didTapBackButton()
func didTapMembersButton()
}
class CallHeader: UIView {
// MARK: - Views
private lazy var dateFormatter: DateFormatter = {
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = "HH:mm:ss"
dateFormatter.timeZone = TimeZone(identifier: "UTC")!
dateFormatter.locale = Locale(identifier: "en_US")
return dateFormatter
}()
private var callDurationTimer: Timer?
private let callTitleLabel = MarqueeLabel()
private let callStatusLabel = UILabel()
private let groupMembersButton = GroupMembersButton()
private let call: SignalCall
private weak var delegate: CallHeaderDelegate!
init(call: SignalCall, delegate: CallHeaderDelegate) {
self.call = call
self.delegate = delegate
super.init(frame: .zero)
call.addObserverAndSyncState(observer: self)
let gradientLayer = CAGradientLayer()
gradientLayer.colors = [
UIColor.ows_blackAlpha60.cgColor,
UIColor.black.withAlphaComponent(0).cgColor
]
let gradientView = OWSLayerView(frame: .zero) { view in
gradientLayer.frame = view.bounds
}
gradientView.layer.addSublayer(gradientLayer)
addSubview(gradientView)
gradientView.autoPinEdgesToSuperviewEdges()
let hStack = UIStackView()
hStack.axis = .horizontal
hStack.spacing = 13
hStack.layoutMargins = UIEdgeInsets(top: 0, left: 8, bottom: 0, right: 8)
hStack.isLayoutMarginsRelativeArrangement = true
addSubview(hStack)
hStack.autoPinWidthToSuperview()
hStack.autoPinEdge(toSuperviewMargin: .top)
hStack.autoPinEdge(toSuperviewEdge: .bottom, withInset: 46)
// Back button
let backButton = UIButton()
let backButtonImage = CurrentAppContext().isRTL ? #imageLiteral(resourceName: "NavBarBackRTL") : #imageLiteral(resourceName: "NavBarBack")
backButton.setTemplateImage(backButtonImage, tintColor: .ows_white)
backButton.autoSetDimensions(to: CGSize(square: 40))
backButton.imageEdgeInsets = UIEdgeInsets(top: -12, leading: -18, bottom: 0, trailing: 0)
backButton.addTarget(delegate, action: #selector(CallHeaderDelegate.didTapBackButton), for: .touchUpInside)
addShadow(to: backButton)
hStack.addArrangedSubview(backButton)
// vStack
let vStack = UIStackView()
vStack.axis = .vertical
vStack.spacing = 4
hStack.addArrangedSubview(vStack)
// Name Label
callTitleLabel.type = .continuous
// This feels pretty slow when you're initially waiting for it, but when you're overlaying video calls, anything faster is distracting.
callTitleLabel.speed = .duration(30.0)
callTitleLabel.animationCurve = .linear
callTitleLabel.fadeLength = 10.0
callTitleLabel.animationDelay = 5
// Add trailing space after the name scrolls before it wraps around and scrolls back in.
callTitleLabel.trailingBuffer = ScaleFromIPhone5(80.0)
callTitleLabel.font = UIFont.ows_dynamicTypeHeadlineClamped.ows_semibold
callTitleLabel.textAlignment = .center
callTitleLabel.textColor = UIColor.white
addShadow(to: callTitleLabel)
vStack.addArrangedSubview(callTitleLabel)
// Status label
callStatusLabel.font = UIFont.ows_dynamicTypeFootnoteClamped
callStatusLabel.textAlignment = .center
callStatusLabel.textColor = UIColor.white
addShadow(to: callStatusLabel)
vStack.addArrangedSubview(callStatusLabel)
// Group members button
groupMembersButton.addTarget(
delegate,
action: #selector(CallHeaderDelegate.didTapMembersButton),
for: .touchUpInside
)
addShadow(to: groupMembersButton)
hStack.addArrangedSubview(groupMembersButton)
updateCallTitleLabel()
updateCallStatusLabel()
updateGroupMembersButton()
}
deinit { call.removeObserver(self) }
private func addShadow(to view: UIView) {
view.layer.shadowOffset = .zero
view.layer.shadowOpacity = 0.25
view.layer.shadowRadius = 4
}
private func updateCallStatusLabel() {
let callStatusText: String
switch call.groupCall.localDeviceState.joinState {
case .notJoined, .joining:
callStatusText = ""
case .joined:
let callDuration = call.connectionDuration()
let callDurationDate = Date(timeIntervalSinceReferenceDate: callDuration)
var formattedDate = dateFormatter.string(from: callDurationDate)
if formattedDate.hasPrefix("00:") {
// Don't show the "hours" portion of the date format unless the
// call duration is at least 1 hour.
formattedDate = String(formattedDate[formattedDate.index(formattedDate.startIndex, offsetBy: 3)...])
} else {
// If showing the "hours" portion of the date format, strip any leading
// zeroes.
if formattedDate.hasPrefix("0") {
formattedDate = String(formattedDate[formattedDate.index(formattedDate.startIndex, offsetBy: 1)...])
}
}
callStatusText = String(format: CallStrings.callStatusFormat, formattedDate)
}
callStatusLabel.text = callStatusText
callStatusLabel.isHidden = call.groupCall.localDeviceState.joinState != .joined || call.groupCall.remoteDeviceStates.count > 1
}
func updateCallTitleLabel() {
let callTitleText: String
if call.groupCall.localDeviceState.connectionState == .reconnecting {
callTitleText = NSLocalizedString(
"GROUP_CALL_RECONNECTING",
comment: "Text indicating that the user has lost their connection to the call and we are reconnecting."
)
} else {
var isFirstMemberPresenting = false
let memberNames: [String] = databaseStorage.read { transaction in
if self.call.groupCall.localDeviceState.joinState == .joined {
let sortedDeviceStates = self.call.groupCall.remoteDeviceStates.sortedByAddedTime
isFirstMemberPresenting = sortedDeviceStates.first?.presenting == true
return sortedDeviceStates.map { self.contactsManager.displayName(for: $0.address, transaction: transaction) }
} else {
return self.call.groupCall.peekInfo?.joinedMembers
.map { self.contactsManager.displayName(for: SignalServiceAddress(uuid: $0), transaction: transaction) } ?? []
}
}
switch call.groupCall.localDeviceState.joinState {
case .joined:
switch memberNames.count {
case 0:
callTitleText = NSLocalizedString(
"GROUP_CALL_NO_ONE_HERE",
comment: "Text explaining that you are the only person currently in the group call"
)
case 1:
if isFirstMemberPresenting {
let formatString = NSLocalizedString(
"GROUP_CALL_PRESENTING_FORMAT",
comment: "Text explaining that a member is presenting. Embeds {member name}"
)
callTitleText = String(format: formatString, memberNames[0])
} else {
callTitleText = memberNames[0]
}
default:
if isFirstMemberPresenting {
let formatString = NSLocalizedString(
"GROUP_CALL_PRESENTING_FORMAT",
comment: "Text explaining that a member is presenting. Embeds {member name}"
)
callTitleText = String(format: formatString, memberNames[0])
} else {
callTitleText = ""
}
}
default:
switch memberNames.count {
case 0:
callTitleText = ""
case 1:
let formatString = NSLocalizedString(
"GROUP_CALL_ONE_PERSON_HERE_FORMAT",
comment: "Text explaining that there is one person in the group call. Embeds {member name}"
)
callTitleText = String(format: formatString, memberNames[0])
case 2:
let formatString = NSLocalizedString(
"GROUP_CALL_TWO_PEOPLE_HERE_FORMAT",
comment: "Text explaining that there are two people in the group call. Embeds {{ %1$@ participant1, %2$@ participant2 }}"
)
callTitleText = String(format: formatString, memberNames[0], memberNames[1])
case 3:
let formatString = NSLocalizedString(
"GROUP_CALL_THREE_PEOPLE_HERE_FORMAT",
comment: "Text explaining that there are three people in the group call. Embeds {{ %1$@ participant1, %2$@ participant2 }}"
)
callTitleText = String(format: formatString, memberNames[0], memberNames[1])
default:
let formatString = NSLocalizedString(
"GROUP_CALL_MANY_PEOPLE_HERE_FORMAT",
comment: "Text explaining that there are more than three people in the group call. Embeds {{ %1$@ participant1, %2$@ participant2, %3$@ participantCount-2 }}"
)
callTitleText = String(format: formatString, memberNames[0], memberNames[1], OWSFormat.formatInt(memberNames.count - 2))
}
}
}
callTitleLabel.text = callTitleText
callTitleLabel.isHidden = callTitleText.isEmpty
}
func updateGroupMembersButton() {
let isJoined = call.groupCall.localDeviceState.joinState == .joined
let remoteMemberCount = isJoined ? call.groupCall.remoteDeviceStates.count : Int(call.groupCall.peekInfo?.deviceCount ?? 0)
groupMembersButton.updateMemberCount(remoteMemberCount + (isJoined ? 1 : 0))
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
extension CallHeader: CallObserver {
func groupCallLocalDeviceStateChanged(_ call: SignalCall) {
owsAssertDebug(call.isGroupCall)
if call.groupCall.localDeviceState.joinState == .joined {
if callDurationTimer == nil {
let kDurationUpdateFrequencySeconds = 1 / 20.0
callDurationTimer = WeakTimer.scheduledTimer(
timeInterval: TimeInterval(kDurationUpdateFrequencySeconds),
target: self,
userInfo: nil,
repeats: true
) {[weak self] _ in
self?.updateCallStatusLabel()
}
}
} else {
callDurationTimer?.invalidate()
callDurationTimer = nil
}
updateCallTitleLabel()
updateCallStatusLabel()
updateGroupMembersButton()
}
func groupCallPeekChanged(_ call: SignalCall) {
updateCallTitleLabel()
updateGroupMembersButton()
}
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
updateCallTitleLabel()
updateGroupMembersButton()
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
callDurationTimer?.invalidate()
callDurationTimer = nil
updateCallTitleLabel()
updateCallStatusLabel()
updateGroupMembersButton()
}
}
private class GroupMembersButton: UIButton {
private let iconImageView = UIImageView()
private let countLabel = UILabel()
override init(frame: CGRect) {
super.init(frame: frame)
autoSetDimension(.height, toSize: 40)
iconImageView.contentMode = .scaleAspectFit
iconImageView.setTemplateImage(#imageLiteral(resourceName: "group-solid-24"), tintColor: .ows_white)
addSubview(iconImageView)
iconImageView.autoPinEdge(toSuperviewEdge: .leading)
iconImageView.autoSetDimensions(to: CGSize(square: 22))
iconImageView.autoPinEdge(toSuperviewEdge: .top, withInset: 2)
countLabel.font = UIFont.ows_dynamicTypeFootnoteClamped.ows_monospaced
countLabel.textColor = .ows_white
addSubview(countLabel)
countLabel.autoPinEdge(.leading, to: .trailing, of: iconImageView, withOffset: 5)
countLabel.autoPinEdge(toSuperviewEdge: .trailing, withInset: 5)
countLabel.autoAlignAxis(.horizontal, toSameAxisOf: iconImageView)
countLabel.setContentHuggingHorizontalHigh()
countLabel.setCompressionResistanceHorizontalHigh()
}
func updateMemberCount(_ count: Int) {
countLabel.text = String(OWSFormat.formatInt(count))
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override var isHighlighted: Bool {
didSet {
alpha = isHighlighted ? 0.5 : 1
}
}
}

View File

@ -0,0 +1,146 @@
//
// Copyright (c) 2020 Open Whisper Systems. All rights reserved.
//
import Foundation
class GroupCallErrorView: UIView {
var forceCompactAppearance: Bool = false {
didSet { configure() }
}
var iconImage: UIImage? {
didSet {
if let iconImage = iconImage {
iconView.setTemplateImage(iconImage, tintColor: .ows_white)
miniButton.setTemplateImage(iconImage, tintColor: .ows_white)
} else {
iconView.image = nil
miniButton.setImage(nil, for: .normal)
}
}
}
var labelText: String? {
didSet {
label.text = labelText
configure()
}
}
var userTapAction: ((GroupCallErrorView) -> Void)?
// MARK: - Views
private let iconView: UIImageView = UIImageView()
private let label: UILabel = {
let label = UILabel()
label.font = UIFont.ows_dynamicTypeSubheadline
label.adjustsFontForContentSizeCategory = true
label.textAlignment = .center
label.textColor = .ows_white
label.numberOfLines = 0
return label
}()
private let button: UIButton = {
let buttonLabel = NSLocalizedString(
"GROUP_CALL_ERROR_DETAILS",
comment: "A button to receive more info about not seeing a participant in group call grid")
let button = UIButton()
button.backgroundColor = .ows_gray75
button.contentEdgeInsets = UIEdgeInsets(top: 3, leading: 12, bottom: 3, trailing: 12)
button.layer.cornerRadius = 12
button.clipsToBounds = true
button.titleLabel?.textAlignment = .center
button.titleLabel?.font = UIFont.ows_dynamicTypeSubheadline.ows_semibold
button.setTitle(buttonLabel, for: .normal)
button.addTarget(self, action: #selector(didTapButton), for: .touchUpInside)
return button
}()
private let miniButton: UIButton = {
let button = UIButton()
button.contentVerticalAlignment = .fill
button.contentHorizontalAlignment = .fill
button.addTarget(self, action: #selector(didTapButton), for: .touchUpInside)
return button
}()
override init(frame: CGRect) {
super.init(frame: frame)
let stackView = UIStackView(arrangedSubviews: [
iconView,
label,
button,
])
stackView.axis = .vertical
stackView.alignment = .center
stackView.distribution = .fill
stackView.setCustomSpacing(12, after: iconView)
stackView.setCustomSpacing(16, after: label)
insetsLayoutMarginsFromSafeArea = false
addSubview(miniButton)
addSubview(stackView)
stackView.autoPinWidthToSuperviewMargins()
stackView.autoVCenterInSuperview()
stackView.autoPinEdge(toSuperviewMargin: .top, relation: .greaterThanOrEqual)
stackView.autoPinEdge(toSuperviewMargin: .bottom, relation: .greaterThanOrEqual)
miniButton.autoCenterInSuperview()
iconView.setCompressionResistanceHigh()
button.setCompressionResistanceHigh()
iconView.autoSetDimensions(to: CGSize(width: 24, height: 24))
button.autoSetDimension(.height, toSize: 24, relation: .greaterThanOrEqual)
miniButton.autoSetDimensions(to: CGSize(width: 24, height: 24))
configure()
}
override var bounds: CGRect {
didSet { configure() }
}
override var frame: CGRect {
didSet { configure() }
}
private func configure() {
let isCompact = (bounds.width < 100) || (bounds.height < 100) || forceCompactAppearance
iconView.isHidden = isCompact
label.isHidden = isCompact
button.isHidden = isCompact
miniButton.isHidden = !isCompact
layoutIfNeeded()
// The error text is easily truncated in small cells with large dynamic type.
// If the label gets truncated, just hide it.
if !label.isHidden {
let widthBox = CGSize(width: label.bounds.width, height: .greatestFiniteMagnitude)
let labelDesiredHeight = label.sizeThatFits(widthBox).height
label.isHidden = (labelDesiredHeight > label.bounds.height)
}
}
@objc
private func didTapButton() {
userTapAction?(self)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}

View File

@ -0,0 +1,350 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
@objc
class GroupCallMemberSheet: InteractiveSheetViewController {
override var interactiveScrollViews: [UIScrollView] { [tableView] }
let tableView = UITableView(frame: .zero, style: .grouped)
let call: SignalCall
init(call: SignalCall) {
self.call = call
super.init()
call.addObserverAndSyncState(observer: self)
}
public required init() {
fatalError("init() has not been implemented")
}
deinit { call.removeObserver(self) }
// MARK: -
override public func viewDidLoad() {
super.viewDidLoad()
if UIAccessibility.isReduceTransparencyEnabled {
contentView.backgroundColor = .ows_blackAlpha80
} else {
let blurEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
contentView.addSubview(blurEffectView)
blurEffectView.autoPinEdgesToSuperviewEdges()
contentView.backgroundColor = .ows_blackAlpha40
}
tableView.dataSource = self
tableView.delegate = self
tableView.backgroundColor = .clear
tableView.separatorStyle = .none
tableView.tableHeaderView = UIView(frame: CGRect(origin: .zero, size: CGSize(width: 0, height: CGFloat.leastNormalMagnitude)))
contentView.addSubview(tableView)
tableView.autoPinEdgesToSuperviewEdges()
tableView.register(GroupCallMemberCell.self, forCellReuseIdentifier: GroupCallMemberCell.reuseIdentifier)
tableView.register(GroupCallEmptyCell.self, forCellReuseIdentifier: GroupCallEmptyCell.reuseIdentifier)
updateMembers()
}
// MARK: -
struct JoinedMember {
let address: SignalServiceAddress
let displayName: String
let comparableName: String
let isAudioMuted: Bool?
let isVideoMuted: Bool?
let isPresenting: Bool?
}
private var sortedMembers = [JoinedMember]()
func updateMembers() {
let unsortedMembers: [JoinedMember] = databaseStorage.read { transaction in
var members = [JoinedMember]()
if self.call.groupCall.localDeviceState.joinState == .joined {
members += self.call.groupCall.remoteDeviceStates.values.map { member in
let displayName: String
let comparableName: String
if member.address.isLocalAddress {
displayName = NSLocalizedString(
"GROUP_CALL_YOU_ON_ANOTHER_DEVICE",
comment: "Text describing the local user in the group call members sheet when connected from another device."
)
comparableName = displayName
} else {
displayName = self.contactsManager.displayName(for: member.address, transaction: transaction)
comparableName = self.contactsManager.comparableName(for: member.address, transaction: transaction)
}
return JoinedMember(
address: member.address,
displayName: displayName,
comparableName: comparableName,
isAudioMuted: member.audioMuted,
isVideoMuted: member.videoMuted,
isPresenting: member.presenting
)
}
guard let localAddress = self.tsAccountManager.localAddress else { return members }
let displayName = NSLocalizedString(
"GROUP_CALL_YOU",
comment: "Text describing the local user as a participant in a group call."
)
let comparableName = displayName
members.append(JoinedMember(
address: localAddress,
displayName: displayName,
comparableName: comparableName,
isAudioMuted: self.call.groupCall.isOutgoingAudioMuted,
isVideoMuted: self.call.groupCall.isOutgoingVideoMuted,
isPresenting: false
))
} else {
// If we're not yet in the call, `remoteDeviceStates` will not exist.
// We can get the list of joined members still, provided we are connected.
members += self.call.groupCall.peekInfo?.joinedMembers.map { uuid in
let address = SignalServiceAddress(uuid: uuid)
let displayName = self.contactsManager.displayName(for: address, transaction: transaction)
let comparableName = self.contactsManager.comparableName(for: address, transaction: transaction)
return JoinedMember(
address: address,
displayName: displayName,
comparableName: comparableName,
isAudioMuted: nil,
isVideoMuted: nil,
isPresenting: nil
)
} ?? []
}
return members
}
sortedMembers = unsortedMembers.sorted { $0.comparableName.caseInsensitiveCompare($1.comparableName) == .orderedAscending }
tableView.reloadData()
}
}
extension GroupCallMemberSheet: UITableViewDataSource, UITableViewDelegate {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return sortedMembers.count > 0 ? sortedMembers.count : 1
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
guard !sortedMembers.isEmpty else {
return tableView.dequeueReusableCell(withIdentifier: GroupCallEmptyCell.reuseIdentifier, for: indexPath)
}
let cell = tableView.dequeueReusableCell(withIdentifier: GroupCallMemberCell.reuseIdentifier, for: indexPath)
guard let memberCell = cell as? GroupCallMemberCell else {
owsFailDebug("unexpected cell type")
return cell
}
guard let member = sortedMembers[safe: indexPath.row] else {
owsFailDebug("missing member")
return cell
}
memberCell.configure(item: member)
return memberCell
}
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
let label = UILabel()
label.font = UIFont.ows_dynamicTypeSubheadlineClamped.ows_semibold
label.textColor = Theme.darkThemePrimaryColor
if sortedMembers.count > 1 {
let formatString = NSLocalizedString(
"GROUP_CALL_MANY_IN_THIS_CALL_FORMAT",
comment: "String indicating how many people are current in the call"
)
label.text = String(format: formatString, sortedMembers.count)
} else if sortedMembers.count > 0 {
label.text = NSLocalizedString(
"GROUP_CALL_ONE_IN_THIS_CALL",
comment: "String indicating one person is currently in the call"
)
} else {
label.text = nil
}
let labelContainer = UIView()
labelContainer.layoutMargins = UIEdgeInsets(top: 13, left: 16, bottom: 13, right: 16)
labelContainer.addSubview(label)
label.autoPinEdgesToSuperviewMargins()
return labelContainer
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat {
return UITableView.automaticDimension
}
func tableView(_ tableView: UITableView, heightForFooterInSection section: Int) -> CGFloat {
return .leastNormalMagnitude
}
}
// MARK: -
extension GroupCallMemberSheet: CallObserver {
func groupCallLocalDeviceStateChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateMembers()
}
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateMembers()
}
func groupCallPeekChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateMembers()
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateMembers()
}
}
private class GroupCallMemberCell: UITableViewCell {
static let reuseIdentifier = "GroupCallMemberCell"
let avatarView = ConversationAvatarView(diameterPoints: 36,
localUserDisplayMode: .asUser)
let nameLabel = UILabel()
let videoMutedIndicator = UIImageView()
let audioMutedIndicator = UIImageView()
let presentingIndicator = UIImageView()
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
backgroundColor = .clear
selectionStyle = .none
layoutMargins = UIEdgeInsets(top: 8, leading: 16, bottom: 8, trailing: 16)
avatarView.autoSetDimensions(to: CGSize(square: 36))
nameLabel.font = .ows_dynamicTypeBody
audioMutedIndicator.contentMode = .scaleAspectFit
audioMutedIndicator.setTemplateImage(#imageLiteral(resourceName: "mic-off-solid-28"), tintColor: .ows_white)
audioMutedIndicator.autoSetDimensions(to: CGSize(square: 16))
audioMutedIndicator.setContentHuggingHorizontalHigh()
let audioMutedWrapper = UIView()
audioMutedWrapper.addSubview(audioMutedIndicator)
audioMutedIndicator.autoPinEdgesToSuperviewEdges()
videoMutedIndicator.contentMode = .scaleAspectFit
videoMutedIndicator.setTemplateImage(#imageLiteral(resourceName: "video-off-solid-28"), tintColor: .ows_white)
videoMutedIndicator.autoSetDimensions(to: CGSize(square: 16))
videoMutedIndicator.setContentHuggingHorizontalHigh()
presentingIndicator.contentMode = .scaleAspectFit
presentingIndicator.setTemplateImage(#imageLiteral(resourceName: "share-screen-solid-28"), tintColor: .ows_white)
presentingIndicator.autoSetDimensions(to: CGSize(square: 16))
presentingIndicator.setContentHuggingHorizontalHigh()
// We share a wrapper for video muted and presenting states
// as they render in the same column.
let videoMutedAndPresentingWrapper = UIView()
videoMutedAndPresentingWrapper.addSubview(videoMutedIndicator)
videoMutedIndicator.autoPinEdgesToSuperviewEdges()
videoMutedAndPresentingWrapper.addSubview(presentingIndicator)
presentingIndicator.autoPinEdgesToSuperviewEdges()
let stackView = UIStackView(arrangedSubviews: [
avatarView,
UIView.spacer(withWidth: 8),
nameLabel,
UIView.spacer(withWidth: 16),
videoMutedAndPresentingWrapper,
UIView.spacer(withWidth: 16),
audioMutedWrapper
])
stackView.axis = .horizontal
stackView.alignment = .center
contentView.addSubview(stackView)
stackView.autoPinEdgesToSuperviewMargins()
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func configure(item: GroupCallMemberSheet.JoinedMember) {
nameLabel.textColor = Theme.darkThemePrimaryColor
videoMutedIndicator.isHidden = item.isVideoMuted != true || item.isPresenting == true
audioMutedIndicator.isHidden = item.isAudioMuted != true
presentingIndicator.isHidden = item.isPresenting != true
nameLabel.text = item.displayName
avatarView.configureWithSneakyTransaction(address: item.address)
}
}
private class GroupCallEmptyCell: UITableViewCell {
static let reuseIdentifier = "GroupCallEmptyCell"
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
super.init(style: style, reuseIdentifier: reuseIdentifier)
backgroundColor = .clear
selectionStyle = .none
layoutMargins = UIEdgeInsets(top: 8, leading: 16, bottom: 8, trailing: 16)
let imageView = UIImageView(image: #imageLiteral(resourceName: "sad-cat"))
imageView.contentMode = .scaleAspectFit
contentView.addSubview(imageView)
imageView.autoSetDimensions(to: CGSize(square: 160))
imageView.autoHCenterInSuperview()
imageView.autoPinTopToSuperviewMargin(withInset: 32)
let label = UILabel()
label.font = .ows_dynamicTypeSubheadlineClamped
label.textColor = Theme.darkThemePrimaryColor
label.text = NSLocalizedString("GROUP_CALL_NOBODY_IS_IN_YET",
comment: "Text explaining to the user that nobody has joined this call yet.")
label.numberOfLines = 0
label.lineBreakMode = .byWordWrapping
label.textAlignment = .center
contentView.addSubview(label)
label.autoPinWidthToSuperviewMargins()
label.autoPinBottomToSuperviewMargin()
label.autoPinEdge(.top, to: .bottom, of: imageView, withOffset: 16)
}
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}

View File

@ -0,0 +1,449 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
protocol GroupCallMemberViewDelegate: AnyObject {
func memberView(_: GroupCallMemberView, userRequestedInfoAboutError: GroupCallMemberView.ErrorState)
}
class GroupCallMemberView: UIView {
weak var delegate: GroupCallMemberViewDelegate?
let noVideoView = UIView()
let backgroundAvatarView = UIImageView()
let blurView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
let muteIndicatorImage = UIImageView()
lazy var muteLeadingConstraint = muteIndicatorImage.autoPinEdge(toSuperviewEdge: .leading, withInset: muteInsets)
lazy var muteBottomConstraint = muteIndicatorImage.autoPinEdge(toSuperviewEdge: .bottom, withInset: muteInsets)
lazy var muteHeightConstraint = muteIndicatorImage.autoSetDimension(.height, toSize: muteHeight)
var muteInsets: CGFloat {
layoutIfNeeded()
if width > 102 {
return 9
} else {
return 4
}
}
var muteHeight: CGFloat {
layoutIfNeeded()
if width > 200 && UIDevice.current.isIPad {
return 20
} else {
return 16
}
}
init() {
super.init(frame: .zero)
backgroundColor = .ows_gray90
clipsToBounds = true
addSubview(noVideoView)
noVideoView.autoPinEdgesToSuperviewEdges()
let overlayView = UIView()
overlayView.backgroundColor = .ows_blackAlpha40
noVideoView.addSubview(overlayView)
overlayView.autoPinEdgesToSuperviewEdges()
backgroundAvatarView.contentMode = .scaleAspectFill
noVideoView.addSubview(backgroundAvatarView)
backgroundAvatarView.autoPinEdgesToSuperviewEdges()
noVideoView.addSubview(blurView)
blurView.autoPinEdgesToSuperviewEdges()
muteIndicatorImage.contentMode = .scaleAspectFit
muteIndicatorImage.setTemplateImage(#imageLiteral(resourceName: "mic-off-solid-28"), tintColor: .ows_white)
addSubview(muteIndicatorImage)
muteIndicatorImage.autoMatch(.width, to: .height, of: muteIndicatorImage)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
enum ErrorState {
case blocked(SignalServiceAddress)
case noMediaKeys(SignalServiceAddress)
}
}
class GroupCallLocalMemberView: GroupCallMemberView {
let videoView = LocalVideoView()
let videoOffIndicatorImage = UIImageView()
let videoOffLabel = UILabel()
var videoOffIndicatorWidth: CGFloat {
if width > 102 {
return 28
} else {
return 16
}
}
override var bounds: CGRect {
didSet { updateDimensions() }
}
override var frame: CGRect {
didSet { updateDimensions() }
}
lazy var videoOffIndicatorWidthConstraint = videoOffIndicatorImage.autoSetDimension(.width, toSize: videoOffIndicatorWidth)
lazy var callFullLabel: UILabel = {
let label = UILabel()
label.numberOfLines = 0
label.lineBreakMode = .byWordWrapping
label.font = .ows_dynamicTypeSubheadline
label.textAlignment = .center
label.textColor = Theme.darkThemePrimaryColor
return label
}()
lazy var callFullStack: UIStackView = {
let callFullStack = UIStackView()
callFullStack.axis = .vertical
callFullStack.spacing = 8
let imageView = UIImageView(image: #imageLiteral(resourceName: "sad-cat"))
imageView.contentMode = .scaleAspectFit
imageView.autoSetDimensions(to: CGSize(square: 200))
callFullStack.addArrangedSubview(imageView)
let titleLabel = UILabel()
titleLabel.text = NSLocalizedString(
"GROUP_CALL_IS_FULL",
comment: "Text explaining the group call is full"
)
titleLabel.font = UIFont.ows_dynamicTypeSubheadline.ows_semibold
titleLabel.textAlignment = .center
titleLabel.textColor = Theme.darkThemePrimaryColor
callFullStack.addArrangedSubview(titleLabel)
callFullStack.addArrangedSubview(callFullLabel)
return callFullStack
}()
override init() {
super.init()
videoOffIndicatorImage.contentMode = .scaleAspectFit
videoOffIndicatorImage.setTemplateImage(#imageLiteral(resourceName: "video-off-solid-28"), tintColor: .ows_white)
noVideoView.addSubview(videoOffIndicatorImage)
videoOffIndicatorImage.autoMatch(.height, to: .width, of: videoOffIndicatorImage)
videoOffIndicatorImage.autoCenterInSuperview()
videoOffLabel.font = .ows_dynamicTypeSubheadline
videoOffLabel.text = NSLocalizedString("CALLING_MEMBER_VIEW_YOUR_CAMERA_IS_OFF",
comment: "Indicates to the user that their camera is currently off.")
videoOffLabel.textAlignment = .center
videoOffLabel.textColor = Theme.darkThemePrimaryColor
noVideoView.addSubview(videoOffLabel)
videoOffLabel.autoPinWidthToSuperview()
videoOffLabel.autoPinEdge(.top, to: .bottom, of: videoOffIndicatorImage, withOffset: 10)
videoView.contentMode = .scaleAspectFill
insertSubview(videoView, belowSubview: muteIndicatorImage)
videoView.frame = bounds
addSubview(callFullStack)
callFullStack.autoAlignAxis(.horizontal, toSameAxisOf: self, withOffset: -30)
callFullStack.autoPinWidthToSuperview(withMargin: 16)
layer.shadowOffset = .zero
layer.shadowOpacity = 0.25
layer.shadowRadius = 4
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private var hasBeenConfigured = false
func configure(call: SignalCall, isFullScreen: Bool = false) {
hasBeenConfigured = true
videoView.isHidden = call.groupCall.isOutgoingVideoMuted
videoView.captureSession = call.videoCaptureController.captureSession
noVideoView.isHidden = !videoView.isHidden
if isFullScreen,
call.groupCall.isFull,
case .notJoined = call.groupCall.localDeviceState.joinState {
let text: String
if let maxDevices = call.groupCall.maxDevices {
let formatString = NSLocalizedString(
"GROUP_CALL_HAS_MAX_DEVICES_FORMAT",
comment: "An error displayed to the user when the group call ends because it has exceeded the max devices. Embeds {{max device count}}."
)
text = String(format: formatString, maxDevices)
} else {
text = NSLocalizedString(
"GROUP_CALL_HAS_MAX_DEVICES_UNKNOWN_COUNT",
comment: "An error displayed to the user when the group call ends because it has exceeded the max devices."
)
}
callFullLabel.text = text
callFullStack.isHidden = false
videoOffLabel.isHidden = true
videoOffIndicatorImage.isHidden = true
} else {
callFullStack.isHidden = true
videoOffLabel.isHidden = !videoView.isHidden || !isFullScreen
videoOffIndicatorImage.isHidden = !videoView.isHidden
}
guard let localAddress = tsAccountManager.localAddress else {
return owsFailDebug("missing local address")
}
backgroundAvatarView.image = profileManager.localProfileAvatarImage()
muteIndicatorImage.isHidden = isFullScreen || !call.groupCall.isOutgoingAudioMuted
muteLeadingConstraint.constant = muteInsets
muteBottomConstraint.constant = -muteInsets
muteHeightConstraint.constant = muteHeight
videoOffIndicatorWidthConstraint.constant = videoOffIndicatorWidth
noVideoView.backgroundColor = ChatColors.avatarColor(forAddress: localAddress)
layer.cornerRadius = isFullScreen ? 0 : 10
clipsToBounds = true
}
private func updateDimensions() {
guard hasBeenConfigured else { return }
videoView.frame = bounds
muteLeadingConstraint.constant = muteInsets
muteBottomConstraint.constant = -muteInsets
muteHeightConstraint.constant = muteHeight
videoOffIndicatorWidthConstraint.constant = videoOffIndicatorWidth
}
}
class GroupCallRemoteMemberView: GroupCallMemberView {
private weak var videoView: GroupCallRemoteVideoView?
var deferredReconfigTimer: Timer?
let errorView = GroupCallErrorView()
let avatarView = ConversationAvatarView(diameterPoints: 0,
localUserDisplayMode: .asUser)
let spinner = UIActivityIndicatorView(style: .whiteLarge)
lazy var avatarWidthConstraint = avatarView.autoSetDimension(.width, toSize: CGFloat(avatarDiameter))
var isCallMinimized: Bool = false {
didSet {
// Currently only updated for the speaker view, since that's the only visible cell
// while minimized.
errorView.forceCompactAppearance = isCallMinimized
errorView.isUserInteractionEnabled = !isCallMinimized
}
}
override var bounds: CGRect {
didSet { updateDimensions() }
}
override var frame: CGRect {
didSet { updateDimensions() }
}
var avatarDiameter: UInt {
layoutIfNeeded()
if width > 180 {
return 112
} else if width > 102 {
return 96
} else if width > 36 {
return UInt(width) - 36
} else {
return 16
}
}
let mode: Mode
enum Mode: Equatable {
case videoGrid, videoOverflow, speaker
}
init(mode: Mode) {
self.mode = mode
super.init()
noVideoView.insertSubview(avatarView, belowSubview: muteIndicatorImage)
noVideoView.insertSubview(errorView, belowSubview: muteIndicatorImage)
noVideoView.insertSubview(spinner, belowSubview: muteIndicatorImage)
avatarView.autoCenterInSuperview()
errorView.autoPinEdgesToSuperviewEdges()
spinner.autoCenterInSuperview()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private var hasBeenConfigured = false
func configure(call: SignalCall, device: RemoteDeviceState) {
hasBeenConfigured = true
deferredReconfigTimer?.invalidate()
let profileImage = databaseStorage.read { transaction -> UIImage? in
avatarView.configure(address: device.address,
diameterPoints: avatarDiameter,
localUserDisplayMode: .asUser,
transaction: transaction)
avatarWidthConstraint.constant = CGFloat(avatarDiameter)
return self.contactsManagerImpl.avatarImage(forAddress: device.address,
shouldValidate: true,
transaction: transaction)
}
backgroundAvatarView.image = profileImage
muteIndicatorImage.isHidden = mode == .speaker || device.audioMuted != true
muteLeadingConstraint.constant = muteInsets
muteBottomConstraint.constant = -muteInsets
muteHeightConstraint.constant = muteHeight
noVideoView.backgroundColor = ChatColors.avatarColor(forAddress: device.address)
configureRemoteVideo(device: device)
let isRemoteDeviceBlocked = blockingManager.isAddressBlocked(device.address)
let errorDeferralInterval: TimeInterval = 5.0
let addedDate = Date(millisecondsSince1970: device.addedTime)
let connectionDuration = -addedDate.timeIntervalSinceNow
// Hide these views. They'll be unhidden below.
[errorView, avatarView, videoView, spinner].forEach { $0?.isHidden = true }
if !device.mediaKeysReceived, !isRemoteDeviceBlocked, connectionDuration < errorDeferralInterval {
// No media keys, but that's expected since we just joined the call.
// Schedule a timer to re-check and show a spinner in the meantime
spinner.isHidden = false
if !spinner.isAnimating { spinner.startAnimating() }
let configuredDemuxId = device.demuxId
let scheduledInterval = errorDeferralInterval - connectionDuration
deferredReconfigTimer = Timer.scheduledTimer(
withTimeInterval: scheduledInterval,
repeats: false,
block: { [weak self] _ in
guard let self = self else { return }
guard call.isGroupCall, let groupCall = call.groupCall else { return }
guard let updatedState = groupCall.remoteDeviceStates.values
.first(where: { $0.demuxId == configuredDemuxId }) else { return }
self.configure(call: call, device: updatedState)
})
} else if !device.mediaKeysReceived {
// No media keys. Display error view
errorView.isHidden = false
configureErrorView(for: device.address, isBlocked: isRemoteDeviceBlocked)
} else if let videoView = videoView, device.videoTrack != nil {
// We have a video track! If we don't know the mute state, show both.
// Otherwise, show one or the other.
videoView.isHidden = (device.videoMuted == true)
avatarView.isHidden = (device.videoMuted == false)
} else {
// No video. Display avatar
avatarView.isHidden = false
}
}
func clearConfiguration() {
deferredReconfigTimer?.invalidate()
cleanupVideoViews()
noVideoView.backgroundColor = .ows_black
backgroundAvatarView.image = nil
avatarView.image = nil
[errorView, spinner, muteIndicatorImage].forEach { $0.isHidden = true }
}
private func updateDimensions() {
guard hasBeenConfigured else { return }
videoView?.frame = bounds
muteLeadingConstraint.constant = muteInsets
muteBottomConstraint.constant = -muteInsets
muteHeightConstraint.constant = muteHeight
avatarWidthConstraint.constant = CGFloat(avatarDiameter)
}
func cleanupVideoViews() {
if videoView?.superview == self { videoView?.removeFromSuperview() }
videoView = nil
}
func configureRemoteVideo(device: RemoteDeviceState) {
if videoView?.superview == self { videoView?.removeFromSuperview() }
let newVideoView = callService.groupCallRemoteVideoManager.remoteVideoView(for: device, mode: mode)
insertSubview(newVideoView, belowSubview: muteIndicatorImage)
newVideoView.frame = bounds
newVideoView.isScreenShare = device.sharingScreen == true
videoView = newVideoView
owsAssertDebug(videoView != nil, "Missing remote video view")
}
func configureErrorView(for address: SignalServiceAddress, isBlocked: Bool) {
let displayName: String
if address.isLocalAddress {
displayName = NSLocalizedString(
"GROUP_CALL_YOU_ON_ANOTHER_DEVICE",
comment: "Text describing the local user in the group call members sheet when connected from another device.")
} else {
displayName = self.contactsManager.displayName(for: address)
}
let blockFormat = NSLocalizedString(
"GROUP_CALL_BLOCKED_USER_FORMAT",
comment: "String displayed in group call grid cell when a user is blocked. Embeds {user's name}")
let missingKeyFormat = NSLocalizedString(
"GROUP_CALL_MISSING_MEDIA_KEYS_FORMAT",
comment: "String displayed in cell when media from a user can't be displayed in group call grid. Embeds {user's name}")
let labelFormat = isBlocked ? blockFormat : missingKeyFormat
let label = String(format: labelFormat, arguments: [displayName])
let image = isBlocked ? UIImage(named: "block-24") : UIImage(named: "error-solid-24")
errorView.iconImage = image
errorView.labelText = label
errorView.userTapAction = { [weak self] _ in
guard let self = self else { return }
if isBlocked {
self.delegate?.memberView(self, userRequestedInfoAboutError: .blocked(address))
} else {
self.delegate?.memberView(self, userRequestedInfoAboutError: .noMediaKeys(address))
}
}
}
}
extension RemoteDeviceState {
var address: SignalServiceAddress {
return SignalServiceAddress(uuid: userId)
}
}

View File

@ -0,0 +1,268 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
class GroupCallNotificationView: UIView {
private let call: SignalCall
private struct ActiveMember: Hashable {
let demuxId: UInt32
let uuid: UUID
var address: SignalServiceAddress { return SignalServiceAddress(uuid: uuid) }
}
private var activeMembers = Set<ActiveMember>()
private var membersPendingJoinNotification = Set<ActiveMember>()
private var membersPendingLeaveNotification = Set<ActiveMember>()
init(call: SignalCall) {
self.call = call
super.init(frame: .zero)
call.addObserverAndSyncState(observer: self)
isUserInteractionEnabled = false
}
deinit { call.removeObserver(self) }
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private var hasJoined = false
private func updateActiveMembers() {
let newActiveMembers = Set(call.groupCall.remoteDeviceStates.values.map {
ActiveMember(demuxId: $0.demuxId, uuid: $0.userId)
})
if hasJoined {
let joinedMembers = newActiveMembers.subtracting(activeMembers)
let leftMembers = activeMembers.subtracting(newActiveMembers)
membersPendingJoinNotification.subtract(leftMembers)
membersPendingJoinNotification.formUnion(joinedMembers)
membersPendingLeaveNotification.subtract(joinedMembers)
membersPendingLeaveNotification.formUnion(leftMembers)
} else {
hasJoined = call.groupCall.localDeviceState.joinState == .joined
}
activeMembers = newActiveMembers
presentNextNotificationIfNecessary()
}
private var isPresentingNotification = false
private func presentNextNotificationIfNecessary() {
guard !isPresentingNotification else { return }
guard let bannerView: BannerView = {
if membersPendingJoinNotification.count > 0 {
callService.audioService.playJoinSound()
let addresses = membersPendingJoinNotification.map { $0.address }
membersPendingJoinNotification.removeAll()
return BannerView(addresses: addresses, action: .join)
} else if membersPendingLeaveNotification.count > 0 {
callService.audioService.playLeaveSound()
let addresses = membersPendingLeaveNotification.map { $0.address }
membersPendingLeaveNotification.removeAll()
return BannerView(addresses: addresses, action: .leave)
} else {
return nil
}
}() else { return }
isPresentingNotification = true
addSubview(bannerView)
bannerView.autoHCenterInSuperview()
// Prefer to be full width, but don't exceed the maximum width
bannerView.autoSetDimension(.width, toSize: 512, relation: .lessThanOrEqual)
bannerView.autoMatch(
.width,
to: .width,
of: self,
withOffset: -(layoutMargins.left + layoutMargins.right),
relation: .lessThanOrEqual
)
NSLayoutConstraint.autoSetPriority(.defaultHigh) {
bannerView.autoPinWidthToSuperviewMargins()
}
let onScreenConstraint = bannerView.autoPinEdge(toSuperviewMargin: .top)
onScreenConstraint.isActive = false
let offScreenConstraint = bannerView.autoPinEdge(.bottom, to: .top, of: self)
layoutIfNeeded()
firstly(on: .main) {
UIView.animate(.promise, duration: 0.35) {
offScreenConstraint.isActive = false
onScreenConstraint.isActive = true
self.layoutIfNeeded()
}
}.then(on: .main) { _ in
UIView.animate(.promise, duration: 0.35, delay: 2, options: .curveEaseInOut) {
onScreenConstraint.isActive = false
offScreenConstraint.isActive = true
self.layoutIfNeeded()
}
}.done(on: .main) { _ in
bannerView.removeFromSuperview()
self.isPresentingNotification = false
self.presentNextNotificationIfNecessary()
}
}
}
extension GroupCallNotificationView: CallObserver {
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateActiveMembers()
}
func groupCallPeekChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateActiveMembers()
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
hasJoined = false
activeMembers.removeAll()
membersPendingJoinNotification.removeAll()
membersPendingLeaveNotification.removeAll()
updateActiveMembers()
}
}
private class BannerView: UIView {
enum Action: Equatable { case join, leave }
init(addresses: [SignalServiceAddress], action: Action) {
super.init(frame: .zero)
owsAssertDebug(!addresses.isEmpty)
autoSetDimension(.height, toSize: 64, relation: .greaterThanOrEqual)
layer.cornerRadius = 8
clipsToBounds = true
if UIAccessibility.isReduceTransparencyEnabled {
backgroundColor = .ows_blackAlpha80
} else {
let blurEffectView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
addSubview(blurEffectView)
blurEffectView.autoPinEdgesToSuperviewEdges()
backgroundColor = .ows_blackAlpha40
}
let displayNames = databaseStorage.read { transaction in
return addresses.map { address in
return (
displayName: self.contactsManager.displayName(for: address, transaction: transaction),
comparableName: self.contactsManager.comparableName(for: address, transaction: transaction)
)
}
}.sorted { $0.comparableName.caseInsensitiveCompare($1.comparableName) == .orderedAscending }
.map { $0.displayName }
let actionText: String
if displayNames.count > 2 {
let formatText = action == .join
? NSLocalizedString(
"GROUP_CALL_NOTIFICATION_MANY_JOINED_FORMAT",
comment: "Copy explaining that many new users have joined the group call. Embeds {first member name}, {second member name}, {number of additional members}"
)
: NSLocalizedString(
"GROUP_CALL_NOTIFICATION_MANY_LEFT_FORMAT",
comment: "Copy explaining that many users have left the group call. Embeds {first member name}, {second member name}, {number of additional members}"
)
actionText = String(format: formatText, displayNames[0], displayNames[1], displayNames.count - 2)
} else if displayNames.count > 1 {
let formatText = action == .join
? NSLocalizedString(
"GROUP_CALL_NOTIFICATION_TWO_JOINED_FORMAT",
comment: "Copy explaining that two users have joined the group call. Embeds {first member name}, {second member name}"
)
: NSLocalizedString(
"GROUP_CALL_NOTIFICATION_TWO_LEFT_FORMAT",
comment: "Copy explaining that two users have left the group call. Embeds {first member name}, {second member name}"
)
actionText = String(format: formatText, displayNames[0], displayNames[1])
} else {
let formatText = action == .join
? NSLocalizedString(
"GROUP_CALL_NOTIFICATION_ONE_JOINED_FORMAT",
comment: "Copy explaining that a user has joined the group call. Embeds {member name}"
)
: NSLocalizedString(
"GROUP_CALL_NOTIFICATION_ONE_LEFT_FORMAT",
comment: "Copy explaining that a user has left the group call. Embeds {member name}"
)
actionText = String(format: formatText, displayNames[0])
}
let hStack = UIStackView()
hStack.spacing = 12
hStack.axis = .horizontal
hStack.isLayoutMarginsRelativeArrangement = true
hStack.layoutMargins = UIEdgeInsets(top: 12, leading: 12, bottom: 12, trailing: 12)
addSubview(hStack)
hStack.autoPinEdgesToSuperviewEdges()
if addresses.count == 1, let address = addresses.first {
let avatarContainer = UIView()
hStack.addArrangedSubview(avatarContainer)
avatarContainer.autoSetDimension(.width, toSize: 40)
let avatarView = UIImageView()
avatarView.layer.cornerRadius = 20
avatarView.clipsToBounds = true
avatarContainer.addSubview(avatarView)
avatarView.autoPinWidthToSuperview()
avatarView.autoVCenterInSuperview()
avatarView.autoMatch(.height, to: .width, of: avatarView)
if address.isLocalAddress,
let avatarImage = profileManager.localProfileAvatarImage() {
avatarView.image = avatarImage
} else {
let avatar = Self.avatarBuilder.avatarImageWithSneakyTransaction(forAddress: address,
diameterPoints: 40,
localUserDisplayMode: .asUser)
avatarView.image = avatar
}
}
let label = UILabel()
hStack.addArrangedSubview(label)
label.setCompressionResistanceHorizontalHigh()
label.numberOfLines = 0
label.font = UIFont.ows_dynamicTypeSubheadlineClamped.ows_semibold
label.textColor = .ows_white
label.text = actionText
hStack.addArrangedSubview(.hStretchingSpacer())
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}

View File

@ -0,0 +1,54 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
class GroupCallSwipeToastView: UIView {
private let imageView: UIImageView = {
let view = UIImageView()
view.setTemplateImageName("arrow-up-20", tintColor: .ows_white)
return view
}()
private let label: UILabel = {
let label = UILabel()
label.font = UIFont.ows_dynamicTypeBody2
label.textColor = .ows_gray05
label.numberOfLines = 0
label.lineBreakMode = .byWordWrapping
return label
}()
var text: String? {
get { label.text }
set { label.text = newValue }
}
override init(frame: CGRect) {
super.init(frame: frame)
layer.cornerRadius = 8
clipsToBounds = true
isUserInteractionEnabled = false
let blurView = UIVisualEffectView(effect: UIBlurEffect(style: .dark))
addSubview(blurView)
let stackView = UIStackView(arrangedSubviews: [
imageView,
label
])
stackView.axis = .horizontal
stackView.alignment = .center
stackView.spacing = 8
addSubview(stackView)
blurView.autoPinEdgesToSuperviewEdges()
stackView.autoPinEdgesToSuperviewEdges(with: UIEdgeInsets(top: 12, left: 12, bottom: 12, right: 12))
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}

View File

@ -0,0 +1,32 @@
//
// Copyright (c) 2020 Open Whisper Systems. All rights reserved.
//
import Foundation
@objc
public class GroupCallTooltip: TooltipView {
@objc
public class func present(fromView: UIView,
widthReferenceView: UIView,
tailReferenceView: UIView,
wasTappedBlock: (() -> Void)?) -> GroupCallTooltip {
return GroupCallTooltip(fromView: fromView, widthReferenceView: widthReferenceView, tailReferenceView: tailReferenceView, wasTappedBlock: wasTappedBlock)
}
public override func bubbleContentView() -> UIView {
let label = UILabel()
label.text = NSLocalizedString(
"GROUP_CALL_START_TOOLTIP",
comment: "Tooltip highlighting group calls."
)
label.font = UIFont.ows_dynamicTypeSubheadline
label.textColor = UIColor.ows_white
return horizontalStack(forSubviews: [label])
}
public override var bubbleColor: UIColor { .ows_accentGreen }
public override var tailDirection: TooltipView.TailDirection { .up }
}

View File

@ -0,0 +1,203 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
class GroupCallVideoGrid: UICollectionView {
weak var memberViewDelegate: GroupCallMemberViewDelegate?
let layout: GroupCallVideoGridLayout
let call: SignalCall
init(call: SignalCall) {
self.call = call
self.layout = GroupCallVideoGridLayout()
super.init(frame: .zero, collectionViewLayout: layout)
call.addObserverAndSyncState(observer: self)
layout.delegate = self
register(GroupCallVideoGridCell.self, forCellWithReuseIdentifier: GroupCallVideoGridCell.reuseIdentifier)
dataSource = self
delegate = self
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit { call.removeObserver(self) }
}
extension GroupCallVideoGrid: UICollectionViewDelegate {
func collectionView(_ collectionView: UICollectionView, didEndDisplaying cell: UICollectionViewCell, forItemAt indexPath: IndexPath) {
guard let cell = cell as? GroupCallVideoGridCell else { return }
cell.cleanupVideoViews()
}
func collectionView(_ collectionView: UICollectionView, willDisplay cell: UICollectionViewCell, forItemAt indexPath: IndexPath) {
guard let cell = cell as? GroupCallVideoGridCell else { return }
guard let remoteDevice = gridRemoteDeviceStates[safe: indexPath.row] else {
return owsFailDebug("missing member address")
}
cell.configureRemoteVideo(device: remoteDevice)
}
}
extension GroupCallVideoGrid: UICollectionViewDataSource {
var gridRemoteDeviceStates: [RemoteDeviceState] {
let remoteDeviceStates = call.groupCall.remoteDeviceStates.sortedBySpeakerTime
return Array(remoteDeviceStates[0..<min(maxItems, call.groupCall.remoteDeviceStates.count)]).sortedByAddedTime
}
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return gridRemoteDeviceStates.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(
withReuseIdentifier: GroupCallVideoGridCell.reuseIdentifier,
for: indexPath
) as! GroupCallVideoGridCell
guard let remoteDevice = gridRemoteDeviceStates[safe: indexPath.row] else {
owsFailDebug("missing member address")
return cell
}
cell.setMemberViewDelegate(memberViewDelegate)
cell.configure(call: call, device: remoteDevice)
return cell
}
}
extension GroupCallVideoGrid: CallObserver {
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
reloadData()
}
func groupCallPeekChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
reloadData()
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
reloadData()
}
}
extension GroupCallVideoGrid: GroupCallVideoGridLayoutDelegate {
var maxColumns: Int {
if CurrentAppContext().frame.width > 1080 {
return 4
} else if CurrentAppContext().frame.width > 768 {
return 3
} else {
return 2
}
}
var maxRows: Int {
if CurrentAppContext().frame.height > 1024 {
return 4
} else {
return 3
}
}
var maxItems: Int { maxColumns * maxRows }
func deviceState(for index: Int) -> RemoteDeviceState? {
return gridRemoteDeviceStates[safe: index]
}
}
class GroupCallVideoGridCell: UICollectionViewCell {
static let reuseIdentifier = "GroupCallVideoGridCell"
private let memberView = GroupCallRemoteMemberView(mode: .videoGrid)
override init(frame: CGRect) {
super.init(frame: frame)
contentView.addSubview(memberView)
memberView.autoPinEdgesToSuperviewEdges()
contentView.layer.cornerRadius = 10
contentView.clipsToBounds = true
}
func configure(call: SignalCall, device: RemoteDeviceState) {
memberView.configure(call: call, device: device)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func cleanupVideoViews() {
memberView.cleanupVideoViews()
}
func configureRemoteVideo(device: RemoteDeviceState) {
memberView.configureRemoteVideo(device: device)
}
func setMemberViewDelegate(_ delegate: GroupCallMemberViewDelegate?) {
memberView.delegate = delegate
}
}
extension Sequence where Element: RemoteDeviceState {
/// The first person to join the call is the first item in the list.
/// Members that are presenting are always put at the top of the list.
var sortedByAddedTime: [RemoteDeviceState] {
return sorted { lhs, rhs in
if lhs.presenting != rhs.presenting {
return lhs.presenting ?? false
} else if lhs.mediaKeysReceived != rhs.mediaKeysReceived {
return lhs.mediaKeysReceived
} else if lhs.addedTime != rhs.addedTime {
return lhs.addedTime < rhs.addedTime
} else {
return lhs.demuxId < rhs.demuxId
}
}
}
/// The most recent speaker is the first item in the list.
/// Members that are presenting are always put at the top of the list.
var sortedBySpeakerTime: [RemoteDeviceState] {
return sorted { lhs, rhs in
if lhs.presenting != rhs.presenting {
return lhs.presenting ?? false
} else if lhs.mediaKeysReceived != rhs.mediaKeysReceived {
return lhs.mediaKeysReceived
} else if lhs.speakerTime != rhs.speakerTime {
return lhs.speakerTime > rhs.speakerTime
} else {
return lhs.demuxId < rhs.demuxId
}
}
}
}
extension Dictionary where Value: RemoteDeviceState {
/// The first person to join the call is the first item in the list.
var sortedByAddedTime: [RemoteDeviceState] {
return values.sortedByAddedTime
}
/// The most recent speaker is the first item in the list.
var sortedBySpeakerTime: [RemoteDeviceState] {
return values.sortedBySpeakerTime
}
}

View File

@ -0,0 +1,158 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
protocol GroupCallVideoGridLayoutDelegate: AnyObject {
var maxColumns: Int { get }
var maxRows: Int { get }
var maxItems: Int { get }
func deviceState(for index: Int) -> RemoteDeviceState?
}
class GroupCallVideoGridLayout: UICollectionViewLayout {
public weak var delegate: GroupCallVideoGridLayoutDelegate?
private var itemAttributesMap = [Int: UICollectionViewLayoutAttributes]()
private var contentSize = CGSize.zero
// MARK: Initializers and Factory Methods
@available(*, unavailable, message: "use other constructor instead.")
required init?(coder aDecoder: NSCoder) {
notImplemented()
}
override init() {
super.init()
}
// MARK: Methods
override func invalidateLayout() {
super.invalidateLayout()
itemAttributesMap.removeAll()
}
override func invalidateLayout(with context: UICollectionViewLayoutInvalidationContext) {
super.invalidateLayout(with: context)
itemAttributesMap.removeAll()
}
override func prepare() {
super.prepare()
guard let collectionView = collectionView else { return }
guard let delegate = delegate else { return }
let vInset: CGFloat = 6
let hInset: CGFloat = 6
let vSpacing: CGFloat = 6
let hSpacing: CGFloat = 6
let maxColumns = delegate.maxColumns
let maxRows = delegate.maxRows
let numberOfItems = min(collectionView.numberOfItems(inSection: 0), delegate.maxItems)
guard numberOfItems > 0 else { return }
// We evenly distribute items across rows, up to the max
// column count. If an item is alone on a row, it should
// expand across all columns.
let possibleGrids = (1...maxColumns).reduce(
into: [(rows: Int, columns: Int)]()
) { result, columns in
let rows = Int(ceil(CGFloat(numberOfItems) / CGFloat(columns)))
if let previousRows = result.last?.rows, previousRows == rows { return }
result.append((rows, columns))
}.filter { $0.columns <= maxColumns && $0.rows <= maxRows }
.sorted { lhs, rhs in
// We prefer to render square grids (e.g. 2x2, 3x3, etc.) but it's
// not always possible depending on how many items we have available.
// If a square aspect ratio is not possible, we'll defer to having more
// rows than columns.
let lhsDistanceFromSquare = CGFloat(lhs.rows) / CGFloat(lhs.columns) - 1
let rhsDistanceFromSquare = CGFloat(rhs.rows) / CGFloat(rhs.columns) - 1
if lhsDistanceFromSquare >= 0 && rhsDistanceFromSquare >= 0 {
return lhsDistanceFromSquare < rhsDistanceFromSquare
} else {
return lhsDistanceFromSquare > rhsDistanceFromSquare
}
}
guard let (numberOfRows, numberOfColumns) = possibleGrids.first else { return owsFailDebug("missing grid") }
let totalViewWidth = collectionView.width
let totalViewHeight = collectionView.height
let verticalSpacersWidth = (2 * vInset) + (vSpacing * (CGFloat(numberOfRows) - 1))
let verticalCellSpace = totalViewHeight - verticalSpacersWidth
let rowHeight = verticalCellSpace / CGFloat(numberOfRows)
// The last row may have less columns than the previous rows,
// if there is an odd number of videos. Each row should always
// expand the full width of the collection view.
var columnWidthPerRow = [CGFloat]()
for row in 1...numberOfRows {
let numberOfColumnsForRow: Int
if row == numberOfRows {
numberOfColumnsForRow = numberOfItems - ((row - 1) * numberOfColumns)
} else {
numberOfColumnsForRow = numberOfColumns
}
let horizontalSpacersWidth = (2 * hInset) + (hSpacing * (CGFloat(numberOfColumnsForRow) - 1))
let horizontalCellSpace = totalViewWidth - horizontalSpacersWidth
let columnWidth = horizontalCellSpace / CGFloat(numberOfColumnsForRow)
columnWidthPerRow.append(columnWidth)
}
for index in 0..<numberOfItems {
let indexPath = NSIndexPath(item: index, section: 0)
let itemAttributes = UICollectionViewLayoutAttributes(forCellWith: indexPath as IndexPath)
let row = ceil(CGFloat(index + 1) / CGFloat(numberOfColumns)) - 1
let yPosition = (row * rowHeight) + vInset + (CGFloat(row) * vSpacing)
let columnWidth = columnWidthPerRow[Int(row)]
let column = CGFloat(index % numberOfColumns)
let xPosition = (column * columnWidth) + vInset + (CGFloat(column) * vSpacing)
itemAttributes.frame = CGRect(x: xPosition, y: yPosition, width: columnWidth, height: rowHeight)
itemAttributesMap[index] = itemAttributes
}
contentSize = collectionView.frame.size
}
override func layoutAttributesForElements(in rect: CGRect) -> [UICollectionViewLayoutAttributes]? {
return itemAttributesMap.values.filter { itemAttributes in
return itemAttributes.frame.intersects(rect)
}
}
override func layoutAttributesForItem(at indexPath: IndexPath) -> UICollectionViewLayoutAttributes? {
return itemAttributesMap[indexPath.row]
}
override var collectionViewContentSize: CGSize {
return contentSize
}
override func shouldInvalidateLayout(forBoundsChange newBounds: CGRect) -> Bool {
return true
}
}

View File

@ -0,0 +1,258 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
protocol GroupCallVideoOverflowDelegate: AnyObject {
var firstOverflowMemberIndex: Int { get }
func updateVideoOverflowTrailingConstraint()
}
class GroupCallVideoOverflow: UICollectionView {
weak var memberViewDelegate: GroupCallMemberViewDelegate?
weak var overflowDelegate: GroupCallVideoOverflowDelegate?
let call: SignalCall
class var itemHeight: CGFloat {
return UIDevice.current.isIPad ? 96 : 72
}
private var hasInitialized = false
private var isAnyRemoteDeviceScreenSharing = false {
didSet {
guard oldValue != isAnyRemoteDeviceScreenSharing else { return }
updateOrientationOverride()
}
}
init(call: SignalCall, delegate: GroupCallVideoOverflowDelegate) {
self.call = call
self.overflowDelegate = delegate
let layout = UICollectionViewFlowLayout()
layout.itemSize = CGSize(square: Self.itemHeight)
layout.minimumLineSpacing = 4
layout.scrollDirection = .horizontal
super.init(frame: .zero, collectionViewLayout: layout)
backgroundColor = .clear
alpha = 0
showsHorizontalScrollIndicator = false
contentInset = UIEdgeInsets(top: 0, leading: 16, bottom: 0, trailing: 16)
// We want the collection view contents to render in the
// inverse of the type direction.
semanticContentAttribute = CurrentAppContext().isRTL ? .forceLeftToRight : .forceRightToLeft
autoSetDimension(.height, toSize: Self.itemHeight)
register(GroupCallVideoOverflowCell.self, forCellWithReuseIdentifier: GroupCallVideoOverflowCell.reuseIdentifier)
dataSource = self
self.delegate = self
call.addObserverAndSyncState(observer: self)
hasInitialized = true
NotificationCenter.default.addObserver(
self,
selector: #selector(updateOrientationOverride),
name: UIDevice.orientationDidChangeNotification,
object: nil
)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
deinit { call.removeObserver(self) }
private enum OrientationOverride {
case landscapeLeft
case landscapeRight
}
private var orientationOverride: OrientationOverride? {
didSet {
guard orientationOverride != oldValue else { return }
reloadData()
}
}
@objc
func updateOrientationOverride() {
// If we're on iPhone and screen sharing, we want to allow
// the user to change the orientation. We fake this by
// manually transforming the cells.
guard !UIDevice.current.isIPad && isAnyRemoteDeviceScreenSharing else {
orientationOverride = nil
return
}
switch UIDevice.current.orientation {
case .faceDown, .faceUp, .unknown:
// Do nothing, assume the last orientation was already applied.
break
case .portrait, .portraitUpsideDown:
// Clear any override
orientationOverride = nil
case .landscapeLeft:
orientationOverride = .landscapeLeft
case .landscapeRight:
orientationOverride = .landscapeRight
@unknown default:
break
}
}
private var isAnimating = false
private var hadVisibleCells = false
override func reloadData() {
guard !isAnimating else { return }
defer {
if hasInitialized { overflowDelegate?.updateVideoOverflowTrailingConstraint() }
}
let hasVisibleCells = overflowedRemoteDeviceStates.count > 0
if hasVisibleCells != hadVisibleCells {
hadVisibleCells = hasVisibleCells
isAnimating = true
if hasVisibleCells { super.reloadData() }
UIView.animate(
withDuration: 0.15,
animations: { self.alpha = hasVisibleCells ? 1 : 0 }
) { _ in
self.isAnimating = false
self.reloadData()
}
} else {
super.reloadData()
}
}
}
extension GroupCallVideoOverflow: UICollectionViewDelegate {
func collectionView(_ collectionView: UICollectionView, didEndDisplaying cell: UICollectionViewCell, forItemAt indexPath: IndexPath) {
guard let cell = cell as? GroupCallVideoOverflowCell else { return }
cell.cleanupVideoViews()
}
func collectionView(_ collectionView: UICollectionView, willDisplay cell: UICollectionViewCell, forItemAt indexPath: IndexPath) {
guard let cell = cell as? GroupCallVideoOverflowCell else { return }
guard let remoteDevice = overflowedRemoteDeviceStates[safe: indexPath.row] else {
return owsFailDebug("missing member address")
}
cell.configureRemoteVideo(device: remoteDevice)
if let orientationOverride = orientationOverride {
switch orientationOverride {
case .landscapeRight:
cell.transform = .init(rotationAngle: -.halfPi)
case .landscapeLeft:
cell.transform = .init(rotationAngle: .halfPi)
}
} else {
cell.transform = .identity
}
}
}
extension GroupCallVideoOverflow: UICollectionViewDataSource {
var overflowedRemoteDeviceStates: [RemoteDeviceState] {
guard let firstOverflowMemberIndex = overflowDelegate?.firstOverflowMemberIndex else { return [] }
let joinedRemoteDeviceStates = call.groupCall.remoteDeviceStates.sortedBySpeakerTime
guard joinedRemoteDeviceStates.count > firstOverflowMemberIndex else { return [] }
// We reverse this as we're rendering in the inverted direction.
return Array(joinedRemoteDeviceStates[firstOverflowMemberIndex..<joinedRemoteDeviceStates.count]).sortedByAddedTime.reversed()
}
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return overflowedRemoteDeviceStates.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(
withReuseIdentifier: GroupCallVideoOverflowCell.reuseIdentifier,
for: indexPath
) as! GroupCallVideoOverflowCell
guard let remoteDevice = overflowedRemoteDeviceStates[safe: indexPath.row] else {
owsFailDebug("missing member address")
return cell
}
cell.setMemberViewDelegate(memberViewDelegate)
cell.configure(call: call, device: remoteDevice)
return cell
}
}
extension GroupCallVideoOverflow: CallObserver {
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
isAnyRemoteDeviceScreenSharing = call.groupCall.remoteDeviceStates.values.first { $0.sharingScreen == true } != nil
reloadData()
}
func groupCallPeekChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
reloadData()
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
reloadData()
}
}
class GroupCallVideoOverflowCell: UICollectionViewCell {
static let reuseIdentifier = "GroupCallVideoOverflowCell"
private let memberView = GroupCallRemoteMemberView(mode: .videoOverflow)
override init(frame: CGRect) {
super.init(frame: frame)
contentView.addSubview(memberView)
memberView.autoPinEdgesToSuperviewEdges()
contentView.layer.cornerRadius = 10
contentView.clipsToBounds = true
}
func configure(call: SignalCall, device: RemoteDeviceState) {
memberView.configure(call: call, device: device)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func cleanupVideoViews() {
memberView.cleanupVideoViews()
}
func configureRemoteVideo(device: RemoteDeviceState) {
memberView.configureRemoteVideo(device: device)
}
func setMemberViewDelegate(_ delegate: GroupCallMemberViewDelegate?) {
memberView.delegate = delegate
}
}

View File

@ -0,0 +1,859 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalRingRTC
// TODO: Eventually add 1:1 call support to this view
// and replace CallViewController
class GroupCallViewController: UIViewController {
private let thread: TSGroupThread?
private let call: SignalCall
private var groupCall: GroupCall { call.groupCall }
private lazy var callControls = CallControls(call: call, delegate: self)
private lazy var callHeader = CallHeader(call: call, delegate: self)
private lazy var notificationView = GroupCallNotificationView(call: call)
private lazy var videoGrid = GroupCallVideoGrid(call: call)
private lazy var videoOverflow = GroupCallVideoOverflow(call: call, delegate: self)
private let localMemberView = GroupCallLocalMemberView()
private let speakerView = GroupCallRemoteMemberView(mode: .speaker)
private var didUserEverSwipeToSpeakerView = true
private var didUserEverSwipeToScreenShare = true
private let swipeToastView = GroupCallSwipeToastView()
private var speakerPage = UIView()
private let scrollView = UIScrollView()
private var isCallMinimized = false {
didSet { speakerView.isCallMinimized = isCallMinimized }
}
private var isAutoScrollingToScreenShare = false
private var isAnyRemoteDeviceScreenSharing = false {
didSet {
guard oldValue != isAnyRemoteDeviceScreenSharing else { return }
// Scroll to speaker view when presenting begins.
if isAnyRemoteDeviceScreenSharing {
isAutoScrollingToScreenShare = true
scrollView.setContentOffset(CGPoint(x: 0, y: speakerPage.frame.origin.y), animated: true)
}
}
}
lazy var tapGesture = UITapGestureRecognizer(target: self, action: #selector(didTouchRootView))
lazy var videoOverflowTopConstraint = videoOverflow.autoPinEdge(toSuperviewEdge: .top)
lazy var videoOverflowTrailingConstraint = videoOverflow.autoPinEdge(toSuperviewEdge: .trailing)
var shouldRemoteVideoControlsBeHidden = false {
didSet { updateCallUI() }
}
var hasUnresolvedSafetyNumberMismatch = false
private static let keyValueStore = SDSKeyValueStore(collection: "GroupCallViewController")
private static let didUserSwipeToSpeakerViewKey = "didUserSwipeToSpeakerView"
private static let didUserSwipeToScreenShareKey = "didUserSwipeToScreenShare"
init(call: SignalCall) {
// TODO: Eventually unify UI for group and individual calls
owsAssertDebug(call.isGroupCall)
self.call = call
self.thread = call.thread as? TSGroupThread
super.init(nibName: nil, bundle: nil)
call.addObserverAndSyncState(observer: self)
videoGrid.memberViewDelegate = self
videoOverflow.memberViewDelegate = self
speakerView.delegate = self
localMemberView.delegate = self
SDSDatabaseStorage.shared.asyncRead { readTx in
self.didUserEverSwipeToSpeakerView = Self.keyValueStore.getBool(
Self.didUserSwipeToSpeakerViewKey,
defaultValue: false,
transaction: readTx
)
self.didUserEverSwipeToScreenShare = Self.keyValueStore.getBool(
Self.didUserSwipeToScreenShareKey,
defaultValue: false,
transaction: readTx
)
} completion: {
self.updateSwipeToastView()
}
}
@discardableResult
@objc(presentLobbyForThread:)
class func presentLobby(thread: TSGroupThread) -> Bool {
guard tsAccountManager.isOnboarded() else {
Logger.warn("aborting due to user not being onboarded.")
OWSActionSheets.showActionSheet(title: NSLocalizedString(
"YOU_MUST_COMPLETE_ONBOARDING_BEFORE_PROCEEDING",
comment: "alert body shown when trying to use features in the app before completing registration-related setup."
))
return false
}
guard let frontmostViewController = UIApplication.shared.frontmostViewController else {
owsFailDebug("could not identify frontmostViewController")
return false
}
frontmostViewController.ows_askForMicrophonePermissions { granted in
guard granted == true else {
Logger.warn("aborting due to missing microphone permissions.")
frontmostViewController.ows_showNoMicrophonePermissionActionSheet()
return
}
frontmostViewController.ows_askForCameraPermissions { granted in
guard granted else {
Logger.warn("aborting due to missing camera permissions.")
return
}
guard let groupCall = Self.callService.buildAndConnectGroupCallIfPossible(
thread: thread
) else {
return owsFailDebug("Failed to build group call")
}
// Dismiss the group calling megaphone once someone opens the lobby.
ExperienceUpgradeManager.clearExperienceUpgradeWithSneakyTransaction(.groupCallsMegaphone)
// Dismiss the group call tooltip
self.preferences.setWasGroupCallTooltipShown()
let vc = GroupCallViewController(call: groupCall)
vc.modalTransitionStyle = .crossDissolve
OWSWindowManager.shared.startCall(vc)
}
}
return true
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func loadView() {
view = UIView()
view.clipsToBounds = true
view.backgroundColor = .ows_black
scrollView.delegate = self
view.addSubview(scrollView)
scrollView.isPagingEnabled = true
scrollView.showsVerticalScrollIndicator = false
scrollView.contentInsetAdjustmentBehavior = .never
scrollView.alwaysBounceVertical = false
scrollView.autoPinEdgesToSuperviewEdges()
view.addSubview(callHeader)
callHeader.autoPinWidthToSuperview()
callHeader.autoPinEdge(toSuperviewEdge: .top)
view.addSubview(notificationView)
notificationView.autoPinEdgesToSuperviewEdges()
view.addSubview(callControls)
callControls.autoPinWidthToSuperview()
callControls.autoPinEdge(toSuperviewEdge: .bottom)
view.addSubview(videoOverflow)
videoOverflow.autoPinEdge(toSuperviewEdge: .leading)
scrollView.addSubview(videoGrid)
scrollView.addSubview(speakerPage)
scrollView.addSubview(swipeToastView)
swipeToastView.autoPinEdge(.bottom, to: .bottom, of: videoGrid, withOffset: -22)
swipeToastView.autoHCenterInSuperview()
swipeToastView.autoPinEdge(toSuperviewMargin: .leading, relation: .greaterThanOrEqual)
swipeToastView.autoPinEdge(toSuperviewMargin: .trailing, relation: .greaterThanOrEqual)
view.addGestureRecognizer(tapGesture)
updateCallUI()
}
override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
super.viewWillTransition(to: size, with: coordinator)
let wasOnSpeakerPage = scrollView.contentOffset.y >= view.height
coordinator.animate(alongsideTransition: { _ in
self.updateCallUI(size: size)
self.videoGrid.reloadData()
self.videoOverflow.reloadData()
self.scrollView.contentOffset = wasOnSpeakerPage ? CGPoint(x: 0, y: size.height) : .zero
}, completion: nil)
}
private var hasAppeared = false
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
guard !hasAppeared else { return }
hasAppeared = true
guard let splitViewSnapshot = SignalApp.shared().snapshotSplitViewController(afterScreenUpdates: false) else {
return owsFailDebug("failed to snapshot rootViewController")
}
view.superview?.insertSubview(splitViewSnapshot, belowSubview: view)
splitViewSnapshot.autoPinEdgesToSuperviewEdges()
view.transform = .scale(1.5)
view.alpha = 0
UIView.animate(withDuration: 0.2, animations: {
self.view.alpha = 1
self.view.transform = .identity
}) { _ in
splitViewSnapshot.removeFromSuperview()
}
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
if hasUnresolvedSafetyNumberMismatch {
resolveSafetyNumberMismatch()
}
}
private var hasOverflowMembers: Bool { videoGrid.maxItems < groupCall.remoteDeviceStates.count }
private func updateScrollViewFrames(size: CGSize? = nil, controlsAreHidden: Bool) {
view.layoutIfNeeded()
let size = size ?? view.frame.size
if groupCall.remoteDeviceStates.count < 2 || groupCall.localDeviceState.joinState != .joined {
videoGrid.frame = .zero
videoGrid.isHidden = true
speakerPage.frame = CGRect(
x: 0,
y: 0,
width: size.width,
height: size.height
)
scrollView.contentSize = size
scrollView.contentOffset = .zero
scrollView.isScrollEnabled = false
} else {
let wasVideoGridHidden = videoGrid.isHidden
scrollView.isScrollEnabled = true
videoGrid.isHidden = false
videoGrid.frame = CGRect(
x: 0,
y: view.safeAreaInsets.top,
width: size.width,
height: size.height - view.safeAreaInsets.top - (controlsAreHidden ? 16 : callControls.height) - (hasOverflowMembers ? videoOverflow.height + 32 : 0)
)
speakerPage.frame = CGRect(
x: 0,
y: size.height,
width: size.width,
height: size.height
)
scrollView.contentSize = CGSize(width: size.width, height: size.height * 2)
if wasVideoGridHidden {
scrollView.contentOffset = .zero
}
}
}
func updateVideoOverflowTrailingConstraint() {
var trailingConstraintConstant = -(GroupCallVideoOverflow.itemHeight * ReturnToCallViewController.pipSize.aspectRatio + 4)
if view.width + trailingConstraintConstant > videoOverflow.contentSize.width {
trailingConstraintConstant += 16
}
videoOverflowTrailingConstraint.constant = trailingConstraintConstant
view.layoutIfNeeded()
}
private func updateMemberViewFrames(size: CGSize? = nil, controlsAreHidden: Bool) {
view.layoutIfNeeded()
let size = size ?? view.frame.size
let yMax = (controlsAreHidden ? size.height - 16 : callControls.frame.minY) - 16
videoOverflowTopConstraint.constant = yMax - videoOverflow.height
updateVideoOverflowTrailingConstraint()
localMemberView.removeFromSuperview()
speakerView.removeFromSuperview()
switch groupCall.localDeviceState.joinState {
case .joined:
if groupCall.remoteDeviceStates.count > 0 {
speakerPage.addSubview(speakerView)
speakerView.autoPinEdgesToSuperviewEdges()
view.addSubview(localMemberView)
if groupCall.remoteDeviceStates.count > 1 {
let pipWidth = GroupCallVideoOverflow.itemHeight * ReturnToCallViewController.pipSize.aspectRatio
let pipHeight = GroupCallVideoOverflow.itemHeight
localMemberView.frame = CGRect(
x: size.width - pipWidth - 16,
y: videoOverflow.frame.origin.y,
width: pipWidth,
height: pipHeight
)
} else {
let pipWidth = ReturnToCallViewController.pipSize.width
let pipHeight = ReturnToCallViewController.pipSize.height
localMemberView.frame = CGRect(
x: size.width - pipWidth - 16,
y: yMax - pipHeight,
width: pipWidth,
height: pipHeight
)
}
} else {
speakerPage.addSubview(localMemberView)
localMemberView.frame = CGRect(origin: .zero, size: size)
}
case .notJoined, .joining:
speakerPage.addSubview(localMemberView)
localMemberView.frame = CGRect(origin: .zero, size: size)
}
}
func updateSwipeToastView() {
let isSpeakerViewAvailable = groupCall.remoteDeviceStates.count >= 2 && groupCall.localDeviceState.joinState == .joined
guard isSpeakerViewAvailable else {
swipeToastView.isHidden = true
return
}
if isAnyRemoteDeviceScreenSharing {
if didUserEverSwipeToScreenShare {
swipeToastView.isHidden = true
return
}
} else if didUserEverSwipeToSpeakerView {
swipeToastView.isHidden = true
return
}
swipeToastView.alpha = 1.0 - (scrollView.contentOffset.y / view.height)
swipeToastView.text = isAnyRemoteDeviceScreenSharing
? NSLocalizedString(
"GROUP_CALL_SCREEN_SHARE_TOAST",
comment: "Toast view text informing user about swiping to screen share"
)
: NSLocalizedString(
"GROUP_CALL_SPEAKER_VIEW_TOAST",
comment: "Toast view text informing user about swiping to speaker view"
)
if scrollView.contentOffset.y >= view.height {
swipeToastView.isHidden = true
if isAnyRemoteDeviceScreenSharing {
if !isAutoScrollingToScreenShare {
didUserEverSwipeToScreenShare = true
SDSDatabaseStorage.shared.asyncWrite { writeTx in
Self.keyValueStore.setBool(true, key: Self.didUserSwipeToScreenShareKey, transaction: writeTx)
}
}
} else {
didUserEverSwipeToSpeakerView = true
SDSDatabaseStorage.shared.asyncWrite { writeTx in
Self.keyValueStore.setBool(true, key: Self.didUserSwipeToSpeakerViewKey, transaction: writeTx)
}
}
} else if swipeToastView.isHidden {
swipeToastView.alpha = 0
swipeToastView.isHidden = false
UIView.animate(withDuration: 0.2, delay: 3.0, options: []) {
self.swipeToastView.alpha = 1
}
}
}
func updateCallUI(size: CGSize? = nil) {
let localDevice = groupCall.localDeviceState
localMemberView.configure(
call: call,
isFullScreen: localDevice.joinState != .joined || groupCall.remoteDeviceStates.isEmpty
)
if let speakerState = groupCall.remoteDeviceStates.sortedBySpeakerTime.first {
speakerView.configure(
call: call,
device: speakerState
)
} else {
speakerView.clearConfiguration()
}
// Setting the speakerphone before we join the call will fail,
// but we can re-apply the setting here in case it did not work.
if groupCall.isOutgoingVideoMuted && !callService.audioService.hasExternalInputs {
callService.audioService.requestSpeakerphone(isEnabled: callControls.audioSourceButton.isSelected)
}
guard !isCallMinimized else { return }
let hideRemoteControls = shouldRemoteVideoControlsBeHidden && !groupCall.remoteDeviceStates.isEmpty
let remoteControlsAreHidden = callControls.isHidden && callHeader.isHidden
if hideRemoteControls != remoteControlsAreHidden {
callControls.isHidden = false
callHeader.isHidden = false
UIView.animate(withDuration: 0.15, animations: {
self.callControls.alpha = hideRemoteControls ? 0 : 1
self.callHeader.alpha = hideRemoteControls ? 0 : 1
self.updateMemberViewFrames(size: size, controlsAreHidden: hideRemoteControls)
self.updateScrollViewFrames(size: size, controlsAreHidden: hideRemoteControls)
self.view.layoutIfNeeded()
}) { _ in
self.callControls.isHidden = hideRemoteControls
self.callHeader.isHidden = hideRemoteControls
}
} else {
updateMemberViewFrames(size: size, controlsAreHidden: hideRemoteControls)
updateScrollViewFrames(size: size, controlsAreHidden: hideRemoteControls)
}
scheduleControlTimeoutIfNecessary()
updateSwipeToastView()
}
func dismissCall() {
callService.terminate(call: call)
guard let splitViewSnapshot = SignalApp.shared().snapshotSplitViewController(afterScreenUpdates: false) else {
OWSWindowManager.shared.endCall(self)
return owsFailDebug("failed to snapshot rootViewController")
}
view.superview?.insertSubview(splitViewSnapshot, belowSubview: view)
splitViewSnapshot.autoPinEdgesToSuperviewEdges()
UIView.animate(withDuration: 0.2, animations: {
self.view.alpha = 0
}) { _ in
splitViewSnapshot.removeFromSuperview()
OWSWindowManager.shared.endCall(self)
}
}
override var preferredStatusBarStyle: UIStatusBarStyle {
return .lightContent
}
override var prefersHomeIndicatorAutoHidden: Bool {
return true
}
// MARK: - Video control timeout
@objc func didTouchRootView(sender: UIGestureRecognizer) {
shouldRemoteVideoControlsBeHidden = !shouldRemoteVideoControlsBeHidden
}
private var controlTimeoutTimer: Timer?
private func scheduleControlTimeoutIfNecessary() {
if groupCall.remoteDeviceStates.isEmpty || shouldRemoteVideoControlsBeHidden {
controlTimeoutTimer?.invalidate()
controlTimeoutTimer = nil
}
guard controlTimeoutTimer == nil else { return }
controlTimeoutTimer = .weakScheduledTimer(
withTimeInterval: 5,
target: self,
selector: #selector(timeoutControls),
userInfo: nil,
repeats: false
)
}
@objc
private func timeoutControls() {
controlTimeoutTimer?.invalidate()
controlTimeoutTimer = nil
guard !isCallMinimized && !groupCall.remoteDeviceStates.isEmpty && !shouldRemoteVideoControlsBeHidden else { return }
shouldRemoteVideoControlsBeHidden = true
}
}
extension GroupCallViewController: CallViewControllerWindowReference {
var localVideoViewReference: UIView { localMemberView }
var remoteVideoViewReference: UIView { speakerView }
var remoteVideoAddress: SignalServiceAddress {
guard let firstMember = groupCall.remoteDeviceStates.sortedByAddedTime.first else {
return tsAccountManager.localAddress!
}
return firstMember.address
}
@objc
public func returnFromPip(pipWindow: UIWindow) {
// The call "pip" uses our remote and local video views since only
// one `AVCaptureVideoPreviewLayer` per capture session is supported.
// We need to re-add them when we return to this view.
guard speakerView.superview != speakerPage && localMemberView.superview != view else {
return owsFailDebug("unexpectedly returned to call while we own the video views")
}
guard let splitViewSnapshot = SignalApp.shared().snapshotSplitViewController(afterScreenUpdates: false) else {
return owsFailDebug("failed to snapshot rootViewController")
}
guard let pipSnapshot = pipWindow.snapshotView(afterScreenUpdates: false) else {
return owsFailDebug("failed to snapshot pip")
}
isCallMinimized = false
shouldRemoteVideoControlsBeHidden = false
animateReturnFromPip(pipSnapshot: pipSnapshot, pipFrame: pipWindow.frame, splitViewSnapshot: splitViewSnapshot)
}
private func animateReturnFromPip(pipSnapshot: UIView, pipFrame: CGRect, splitViewSnapshot: UIView) {
guard let window = view.window else { return owsFailDebug("missing window") }
view.superview?.insertSubview(splitViewSnapshot, belowSubview: view)
splitViewSnapshot.autoPinEdgesToSuperviewEdges()
let originalContentOffset = scrollView.contentOffset
view.frame = pipFrame
view.addSubview(pipSnapshot)
pipSnapshot.autoPinEdgesToSuperviewEdges()
view.layoutIfNeeded()
UIView.animate(withDuration: 0.2, animations: {
pipSnapshot.alpha = 0
self.view.frame = window.frame
self.updateCallUI()
self.scrollView.contentOffset = originalContentOffset
self.view.layoutIfNeeded()
}) { _ in
splitViewSnapshot.removeFromSuperview()
pipSnapshot.removeFromSuperview()
if self.hasUnresolvedSafetyNumberMismatch {
self.resolveSafetyNumberMismatch()
}
}
}
func resolveSafetyNumberMismatch() {
if !isCallMinimized, CurrentAppContext().isAppForegroundAndActive() {
presentSafetyNumberChangeSheetIfNecessary { [weak self] success in
guard let self = self else { return }
if success {
self.groupCall.resendMediaKeys()
self.hasUnresolvedSafetyNumberMismatch = false
} else {
self.dismissCall()
}
}
} else {
Self.notificationPresenter.notifyForGroupCallSafetyNumberChange(inThread: call.thread)
}
}
func presentSafetyNumberChangeSheetIfNecessary(completion: @escaping (Bool) -> Void) {
let localDeviceHasNotJoined = groupCall.localDeviceState.joinState == .notJoined
let currentParticipantAddresses = groupCall.remoteDeviceStates.map { $0.value.address }
// If we haven't joined the call yet, we want to alert for all members of the group
// If we are in the call, we only care about safety numbers for the active call participants
let addressesToAlert = call.thread.recipientAddresses.filter { memberAddress in
let isUntrusted = Self.identityManager.untrustedIdentityForSending(to: memberAddress) != nil
let isMemberInCall = currentParticipantAddresses.contains(memberAddress)
// We want to alert for safety number changes of all members if we haven't joined yet
// If we're already in the call, we only care about active call participants
return isUntrusted && (isMemberInCall || localDeviceHasNotJoined)
}
// There are no unverified addresses that we're currently concerned about. No need to show a sheet
guard addressesToAlert.count > 0 else { return completion(true) }
let startCallString = NSLocalizedString("GROUP_CALL_START_BUTTON", comment: "Button to start a group call")
let joinCallString = NSLocalizedString("GROUP_CALL_JOIN_BUTTON", comment: "Button to join an ongoing group call")
let continueCallString = NSLocalizedString("GROUP_CALL_CONTINUE_BUTTON", comment: "Button to continue an ongoing group call")
let leaveCallString = NSLocalizedString("GROUP_CALL_LEAVE_BUTTON", comment: "Button to leave a group call")
let cancelString = CommonStrings.cancelButton
let approveText: String
let denyText: String
if localDeviceHasNotJoined {
let deviceCount = call.groupCall.peekInfo?.deviceCount ?? 0
approveText = deviceCount > 0 ? joinCallString : startCallString
denyText = cancelString
} else {
approveText = continueCallString
denyText = leaveCallString
}
let sheet = SafetyNumberConfirmationSheet(
addressesToConfirm: addressesToAlert,
confirmationText: approveText,
cancelText: denyText,
theme: .translucentDark) { didApprove in
if didApprove {
SDSDatabaseStorage.shared.asyncWrite { writeTx in
let identityManager = Self.identityManager
for address in addressesToAlert {
guard let identityKey = identityManager.identityKey(for: address, transaction: writeTx) else { return }
let currentState = identityManager.verificationState(for: address, transaction: writeTx)
let newState = (currentState == .noLongerVerified) ? .default : currentState
identityManager.setVerificationState(newState,
identityKey: identityKey,
address: address,
isUserInitiatedChange: true,
transaction: writeTx)
}
} completion: {
completion(true)
}
} else {
completion(false)
}
}
sheet.allowsDismissal = localDeviceHasNotJoined
present(sheet, animated: true, completion: nil)
}
}
extension GroupCallViewController: CallObserver {
func groupCallLocalDeviceStateChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateCallUI()
}
func groupCallRemoteDeviceStatesChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
isAnyRemoteDeviceScreenSharing = call.groupCall.remoteDeviceStates.values.first { $0.sharingScreen == true } != nil
updateCallUI()
}
func groupCallPeekChanged(_ call: SignalCall) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
updateCallUI()
}
func groupCallEnded(_ call: SignalCall, reason: GroupCallEndReason) {
AssertIsOnMainThread()
owsAssertDebug(call.isGroupCall)
defer { updateCallUI() }
guard reason != .deviceExplicitlyDisconnected else { return }
let title: String
if reason == .hasMaxDevices {
if let maxDevices = groupCall.maxDevices {
let formatString = NSLocalizedString(
"GROUP_CALL_HAS_MAX_DEVICES_FORMAT",
comment: "An error displayed to the user when the group call ends because it has exceeded the max devices. Embeds {{max device count}}."
)
title = String(format: formatString, maxDevices)
} else {
title = NSLocalizedString(
"GROUP_CALL_HAS_MAX_DEVICES_UNKNOWN_COUNT",
comment: "An error displayed to the user when the group call ends because it has exceeded the max devices."
)
}
} else {
owsFailDebug("Group call ended with reason \(reason)")
title = NSLocalizedString(
"GROUP_CALL_UNEXPECTEDLY_ENDED",
comment: "An error displayed to the user when the group call unexpectedly ends."
)
}
let actionSheet = ActionSheetController(title: title)
actionSheet.addAction(ActionSheetAction(
title: CommonStrings.okButton,
style: .default,
handler: { [weak self] _ in
guard reason == .hasMaxDevices else { return }
self?.dismissCall()
}
))
presentActionSheet(actionSheet)
}
func callMessageSendFailedUntrustedIdentity(_ call: SignalCall) {
AssertIsOnMainThread()
guard call == self.call else { return owsFailDebug("Unexpected call \(call)") }
if !hasUnresolvedSafetyNumberMismatch {
hasUnresolvedSafetyNumberMismatch = true
resolveSafetyNumberMismatch()
}
}
}
extension GroupCallViewController: CallControlsDelegate {
func didPressHangup(sender: UIButton) {
dismissCall()
}
func didPressAudioSource(sender: UIButton) {
if callService.audioService.hasExternalInputs {
callService.audioService.presentRoutePicker()
} else {
sender.isSelected = !sender.isSelected
callService.audioService.requestSpeakerphone(isEnabled: sender.isSelected)
}
}
func didPressMute(sender: UIButton) {
sender.isSelected = !sender.isSelected
callService.updateIsLocalAudioMuted(isLocalAudioMuted: sender.isSelected)
}
func didPressVideo(sender: UIButton) {
sender.isSelected = !sender.isSelected
callService.updateIsLocalVideoMuted(isLocalVideoMuted: !sender.isSelected)
// When turning off video, default speakerphone to on.
if !sender.isSelected && !callService.audioService.hasExternalInputs {
callControls.audioSourceButton.isSelected = true
callService.audioService.requestSpeakerphone(isEnabled: true)
}
}
func didPressFlipCamera(sender: UIButton) {
sender.isSelected = !sender.isSelected
callService.updateCameraSource(call: call, isUsingFrontCamera: !sender.isSelected)
}
func didPressCancel(sender: UIButton) {
dismissCall()
}
func didPressJoin(sender: UIButton) {
presentSafetyNumberChangeSheetIfNecessary { [weak self] success in
guard let self = self else { return }
if success {
self.callService.joinGroupCallIfNecessary(self.call)
}
}
}
}
extension GroupCallViewController: CallHeaderDelegate {
func didTapBackButton() {
if groupCall.localDeviceState.joinState == .joined {
isCallMinimized = true
OWSWindowManager.shared.leaveCallView()
} else {
dismissCall()
}
}
func didTapMembersButton() {
let sheet = GroupCallMemberSheet(call: call)
present(sheet, animated: true)
}
}
extension GroupCallViewController: GroupCallVideoOverflowDelegate {
var firstOverflowMemberIndex: Int {
if scrollView.contentOffset.y >= view.height {
return 1
} else {
return videoGrid.maxItems
}
}
}
extension GroupCallViewController: UIScrollViewDelegate {
func scrollViewDidScroll(_ scrollView: UIScrollView) {
// If we changed pages, update the overflow view.
if scrollView.contentOffset.y == 0 || scrollView.contentOffset.y == view.height {
videoOverflow.reloadData()
updateCallUI()
}
if isAutoScrollingToScreenShare {
isAutoScrollingToScreenShare = scrollView.contentOffset.y != speakerView.frame.origin.y
}
updateSwipeToastView()
}
}
extension GroupCallViewController: GroupCallMemberViewDelegate {
func memberView(_ view: GroupCallMemberView, userRequestedInfoAboutError error: GroupCallMemberView.ErrorState) {
let title: String
let message: String
switch error {
case let .blocked(address):
message = NSLocalizedString(
"GROUP_CALL_BLOCKED_ALERT_MESSAGE",
comment: "Message body for alert explaining that a group call participant is blocked")
let titleFormat = NSLocalizedString(
"GROUP_CALL_BLOCKED_ALERT_TITLE_FORMAT",
comment: "Title for alert explaining that a group call participant is blocked. Embeds {{ user's name }}")
let displayName = contactsManager.displayName(for: address)
title = String(format: titleFormat, displayName)
case let .noMediaKeys(address):
message = NSLocalizedString(
"GROUP_CALL_NO_KEYS_ALERT_MESSAGE",
comment: "Message body for alert explaining that a group call participant cannot be displayed because of missing keys")
let titleFormat = NSLocalizedString(
"GROUP_CALL_NO_KEYS_ALERT_TITLE_FORMAT",
comment: "Title for alert explaining that a group call participant cannot be displayed because of missing keys. Embeds {{ user's name }}")
let displayName = contactsManager.displayName(for: address)
title = String(format: titleFormat, displayName)
}
let actionSheet = ActionSheetController(title: title, message: message, theme: .translucentDark)
actionSheet.addAction(ActionSheetAction(title: CommonStrings.okButton))
presentActionSheet(actionSheet)
}
}

View File

@ -0,0 +1,152 @@
//
// Copyright (c) 2020 Open Whisper Systems. All rights reserved.
//
import UIKit
import CallKit
import SignalServiceKit
/**
* Requests actions from CallKit
*
* @Discussion:
* Based on SpeakerboxCallManager, from the Apple CallKit Example app. Though, it's responsibilities are mostly
* mirrored (and delegated from) CallKitCallUIAdaptee.
* TODO: Would it simplify things to merge this into CallKitCallUIAdaptee?
*/
final class CallKitCallManager: NSObject {
let callController = CXCallController()
let showNamesOnCallScreen: Bool
@objc
static let kAnonymousCallHandlePrefix = "Signal:"
required init(showNamesOnCallScreen: Bool) {
AssertIsOnMainThread()
self.showNamesOnCallScreen = showNamesOnCallScreen
super.init()
// We cannot assert singleton here, because this class gets rebuilt when the user changes relevant call settings
}
// MARK: Actions
func startCall(_ call: SignalCall) {
let handle: CXHandle
if showNamesOnCallScreen {
let type: CXHandle.HandleType
let value: String
if let phoneNumber = call.individualCall.remoteAddress.phoneNumber {
type = .phoneNumber
value = phoneNumber
} else {
type = .generic
value = call.individualCall.remoteAddress.stringForDisplay
}
handle = CXHandle(type: type, value: value)
} else {
let callKitId = CallKitCallManager.kAnonymousCallHandlePrefix + call.individualCall.localId.uuidString
handle = CXHandle(type: .generic, value: callKitId)
CallKitIdStore.setAddress(call.individualCall.remoteAddress, forCallKitId: callKitId)
}
let startCallAction = CXStartCallAction(call: call.individualCall.localId, handle: handle)
startCallAction.isVideo = call.individualCall.hasLocalVideo
let transaction = CXTransaction()
transaction.addAction(startCallAction)
requestTransaction(transaction)
}
func localHangup(call: SignalCall) {
let endCallAction = CXEndCallAction(call: call.individualCall.localId)
let transaction = CXTransaction()
transaction.addAction(endCallAction)
requestTransaction(transaction)
}
func setHeld(call: SignalCall, onHold: Bool) {
let setHeldCallAction = CXSetHeldCallAction(call: call.individualCall.localId, onHold: onHold)
let transaction = CXTransaction()
transaction.addAction(setHeldCallAction)
requestTransaction(transaction)
}
func setIsMuted(call: SignalCall, isMuted: Bool) {
let muteCallAction = CXSetMutedCallAction(call: call.individualCall.localId, muted: isMuted)
let transaction = CXTransaction()
transaction.addAction(muteCallAction)
requestTransaction(transaction)
}
func answer(call: SignalCall) {
let answerCallAction = CXAnswerCallAction(call: call.individualCall.localId)
let transaction = CXTransaction()
transaction.addAction(answerCallAction)
requestTransaction(transaction)
}
private func requestTransaction(_ transaction: CXTransaction) {
callController.request(transaction) { error in
if let error = error {
Logger.error("Error requesting transaction: \(error)")
} else {
Logger.debug("Requested transaction successfully")
}
}
}
// MARK: Call Management
private(set) var calls = [SignalCall]()
func callWithLocalId(_ localId: UUID) -> SignalCall? {
guard let index = calls.firstIndex(where: { $0.individualCall.localId == localId }) else {
return nil
}
return calls[index]
}
func addCall(_ call: SignalCall) {
Logger.verbose("call: \(call)")
owsAssertDebug(call.isIndividualCall)
call.individualCall.wasReportedToSystem = true
calls.append(call)
}
func removeCall(_ call: SignalCall) {
Logger.verbose("call: \(call)")
owsAssertDebug(call.isIndividualCall)
call.individualCall.wasRemovedFromSystem = true
guard calls.removeFirst(where: { $0 === call }) != nil else {
Logger.warn("no call matching: \(call) to remove")
return
}
}
func removeAllCalls() {
Logger.verbose("")
calls.forEach { $0.individualCall.wasRemovedFromSystem = true }
calls.removeAll()
}
}
fileprivate extension Array {
mutating func removeFirst(where predicate: (Element) throws -> Bool) rethrows -> Element? {
guard let index = try firstIndex(where: predicate) else {
return nil
}
return remove(at: index)
}
}

View File

@ -0,0 +1,457 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import UIKit
import CallKit
import AVFoundation
import SignalServiceKit
import SignalMessaging
/**
* Connects user interface to the CallService using CallKit.
*
* User interface is routed to the CallManager which requests CXCallActions, and if the CXProvider accepts them,
* their corresponding consequences are implmented in the CXProviderDelegate methods, e.g. using the CallService
*/
final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
private let callManager: CallKitCallManager
private let showNamesOnCallScreen: Bool
private let provider: CXProvider
private let audioActivity: AudioActivity
// CallKit handles incoming ringer stop/start for us. Yay!
let hasManualRinger = false
// Instantiating more than one CXProvider can cause us to miss call transactions, so
// we maintain the provider across Adaptees using a singleton pattern
private static var _sharedProvider: CXProvider?
class func sharedProvider(useSystemCallLog: Bool) -> CXProvider {
let configuration = buildProviderConfiguration(useSystemCallLog: useSystemCallLog)
if let sharedProvider = self._sharedProvider {
sharedProvider.configuration = configuration
return sharedProvider
} else {
SwiftSingletons.register(self)
let provider = CXProvider(configuration: configuration)
_sharedProvider = provider
return provider
}
}
// The app's provider configuration, representing its CallKit capabilities
class func buildProviderConfiguration(useSystemCallLog: Bool) -> CXProviderConfiguration {
let localizedName = NSLocalizedString("APPLICATION_NAME", comment: "Name of application")
let providerConfiguration = CXProviderConfiguration(localizedName: localizedName)
providerConfiguration.supportsVideo = true
// Default maximumCallGroups is 2. We previously overrode this value to be 1.
//
// The terminology can be confusing. Even though we don't currently support "group calls"
// *every* call is in a call group. Our call groups all just happen to be "groups" with 1
// call in them.
//
// maximumCallGroups limits how many different calls CallKit can know about at one time.
// Exceeding this limit will cause CallKit to error when reporting an additional call.
//
// Generally for us, the number of call groups is 1 or 0, *however* when handling a rapid
// sequence of offers and hangups, due to the async nature of CXTransactions, there can
// be a brief moment where the old limit of 1 caused CallKit to fail the newly reported
// call, even though we were properly requesting hangup of the old call before reporting the
// new incoming call.
//
// Specifically after 10 or so rapid fire call/hangup/call/hangup, eventually an incoming
// call would fail to report due to CXErrorCodeRequestTransactionErrorMaximumCallGroupsReached
//
// ...so that's why we no longer use the non-default value of 1, which I assume was only ever
// set to 1 out of confusion.
// providerConfiguration.maximumCallGroups = 1
providerConfiguration.maximumCallsPerCallGroup = 1
providerConfiguration.supportedHandleTypes = [.phoneNumber, .generic]
let iconMaskImage = #imageLiteral(resourceName: "signal-logo-128")
providerConfiguration.iconTemplateImageData = iconMaskImage.pngData()
// We don't set the ringtoneSound property, so that we use either the
// default iOS ringtone OR the custom ringtone associated with this user's
// system contact.
providerConfiguration.includesCallsInRecents = useSystemCallLog
return providerConfiguration
}
init(showNamesOnCallScreen: Bool, useSystemCallLog: Bool) {
AssertIsOnMainThread()
Logger.debug("")
self.callManager = CallKitCallManager(showNamesOnCallScreen: showNamesOnCallScreen)
self.provider = type(of: self).sharedProvider(useSystemCallLog: useSystemCallLog)
self.audioActivity = AudioActivity(audioDescription: "[CallKitCallUIAdaptee]", behavior: .call)
self.showNamesOnCallScreen = showNamesOnCallScreen
super.init()
// We cannot assert singleton here, because this class gets rebuilt when the user changes relevant call settings
self.provider.setDelegate(self, queue: nil)
}
// MARK: CallUIAdaptee
func startOutgoingCall(call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
// make sure we don't terminate audio session during call
_ = self.audioSession.startAudioActivity(call.audioActivity)
// Add the new outgoing call to the app's list of calls.
// So we can find it in the provider delegate callbacks.
callManager.addCall(call)
callManager.startCall(call)
}
// Called from CallService after call has ended to clean up any remaining CallKit call state.
func failCall(_ call: SignalCall, error: SignalCall.CallError) {
AssertIsOnMainThread()
Logger.info("")
switch error {
case .timeout(description: _):
provider.reportCall(with: call.individualCall.localId, endedAt: Date(), reason: CXCallEndedReason.unanswered)
default:
provider.reportCall(with: call.individualCall.localId, endedAt: Date(), reason: CXCallEndedReason.failed)
}
callManager.removeCall(call)
}
func reportIncomingCall(_ call: SignalCall, callerName: String, completion: @escaping (Error?) -> Void) {
AssertIsOnMainThread()
Logger.info("")
// Construct a CXCallUpdate describing the incoming call, including the caller.
let update = CXCallUpdate()
if showNamesOnCallScreen {
update.localizedCallerName = contactsManager.displayName(for: call.individualCall.remoteAddress)
if let phoneNumber = call.individualCall.remoteAddress.phoneNumber {
update.remoteHandle = CXHandle(type: .phoneNumber, value: phoneNumber)
}
} else {
let callKitId = CallKitCallManager.kAnonymousCallHandlePrefix + call.individualCall.localId.uuidString
update.remoteHandle = CXHandle(type: .generic, value: callKitId)
CallKitIdStore.setAddress(call.individualCall.remoteAddress, forCallKitId: callKitId)
update.localizedCallerName = NSLocalizedString("CALLKIT_ANONYMOUS_CONTACT_NAME", comment: "The generic name used for calls if CallKit privacy is enabled")
}
update.hasVideo = call.individualCall.offerMediaType == .video
disableUnsupportedFeatures(callUpdate: update)
// Report the incoming call to the system
provider.reportNewIncomingCall(with: call.individualCall.localId, update: update) { error in
/*
Only add incoming call to the app's list of calls if the call was allowed (i.e. there was no error)
since calls may be "denied" for various legitimate reasons. See CXErrorCodeIncomingCallError.
*/
guard error == nil else {
completion(error)
Logger.error("failed to report new incoming call, error: \(error!)")
return
}
completion(nil)
self.showCall(call)
self.callManager.addCall(call)
}
}
func answerCall(localId: UUID) {
AssertIsOnMainThread()
Logger.info("")
owsFailDebug("CallKit should answer calls via system call screen, not via notifications.")
}
func answerCall(_ call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
callManager.answer(call: call)
}
private var ignoreFirstUnuteAfterRemoteAnswer = false
func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
self.provider.reportOutgoingCall(with: call.individualCall.localId, connectedAt: nil)
let update = CXCallUpdate()
disableUnsupportedFeatures(callUpdate: update)
provider.reportCall(with: call.individualCall.localId, updated: update)
// When we tell CallKit about the call, it tries
// to unmute the call. We can work around this
// by ignoring the next "unmute" request from
// CallKit after the call is answered.
ignoreFirstUnuteAfterRemoteAnswer = call.individualCall.isMuted
}
func localHangupCall(localId: UUID) {
AssertIsOnMainThread()
owsFailDebug("CallKit should decline calls via system call screen, not via notifications.")
}
func localHangupCall(_ call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
callManager.localHangup(call: call)
}
func remoteDidHangupCall(_ call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
provider.reportCall(with: call.individualCall.localId, endedAt: nil, reason: CXCallEndedReason.remoteEnded)
callManager.removeCall(call)
}
func remoteBusy(_ call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
provider.reportCall(with: call.individualCall.localId, endedAt: nil, reason: CXCallEndedReason.unanswered)
callManager.removeCall(call)
}
func didAnswerElsewhere(call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
provider.reportCall(with: call.individualCall.localId, endedAt: nil, reason: .answeredElsewhere)
callManager.removeCall(call)
}
func didDeclineElsewhere(call: SignalCall) {
AssertIsOnMainThread()
Logger.info("")
provider.reportCall(with: call.individualCall.localId, endedAt: nil, reason: .declinedElsewhere)
callManager.removeCall(call)
}
func setIsMuted(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
Logger.info("")
callManager.setIsMuted(call: call, isMuted: isMuted)
}
func setHasLocalVideo(call: SignalCall, hasLocalVideo: Bool) {
AssertIsOnMainThread()
Logger.debug("")
let update = CXCallUpdate()
update.hasVideo = hasLocalVideo
// Update the CallKit UI.
provider.reportCall(with: call.individualCall.localId, updated: update)
self.callService.updateIsLocalVideoMuted(isLocalVideoMuted: !hasLocalVideo)
}
// MARK: CXProviderDelegate
func providerDidReset(_ provider: CXProvider) {
AssertIsOnMainThread()
Logger.info("")
// End any ongoing calls if the provider resets, and remove them from the app's list of calls,
// since they are no longer valid.
callService.individualCallService.handleCallKitProviderReset()
// Remove all calls from the app's list of calls.
callManager.removeAllCalls()
}
func provider(_ provider: CXProvider, perform action: CXStartCallAction) {
AssertIsOnMainThread()
Logger.info("CXStartCallAction")
guard let call = callManager.callWithLocalId(action.callUUID) else {
Logger.error("unable to find call")
return
}
// We can't wait for long before fulfilling the CXAction, else CallKit will show a "Failed Call". We don't
// actually need to wait for the outcome of the handleOutgoingCall promise, because it handles any errors by
// manually failing the call.
self.callService.individualCallService.handleOutgoingCall(call)
action.fulfill()
self.provider.reportOutgoingCall(with: call.individualCall.localId, startedConnectingAt: nil)
// Update the name used in the CallKit UI for outgoing calls when the user prefers not to show names
// in ther notifications
if !showNamesOnCallScreen {
let update = CXCallUpdate()
update.localizedCallerName = NSLocalizedString("CALLKIT_ANONYMOUS_CONTACT_NAME",
comment: "The generic name used for calls if CallKit privacy is enabled")
provider.reportCall(with: call.individualCall.localId, updated: update)
}
}
func provider(_ provider: CXProvider, perform action: CXAnswerCallAction) {
AssertIsOnMainThread()
Logger.info("Received \(#function) CXAnswerCallAction")
// Retrieve the instance corresponding to the action's call UUID
guard let call = callManager.callWithLocalId(action.callUUID) else {
owsFailDebug("call as unexpectedly nil")
action.fail()
return
}
self.callService.individualCallService.handleAcceptCall(call)
action.fulfill()
}
public func provider(_ provider: CXProvider, perform action: CXEndCallAction) {
AssertIsOnMainThread()
Logger.info("Received \(#function) CXEndCallAction")
guard let call = callManager.callWithLocalId(action.callUUID) else {
Logger.error("trying to end unknown call with localId: \(action.callUUID)")
action.fail()
return
}
self.callService.individualCallService.handleLocalHangupCall(call)
// Signal to the system that the action has been successfully performed.
action.fulfill()
// Remove the ended call from the app's list of calls.
self.callManager.removeCall(call)
}
public func provider(_ provider: CXProvider, perform action: CXSetHeldCallAction) {
AssertIsOnMainThread()
Logger.info("Received \(#function) CXSetHeldCallAction")
guard let call = callManager.callWithLocalId(action.callUUID) else {
action.fail()
return
}
// Update the IndividualCall's underlying hold state.
self.callService.individualCallService.setIsOnHold(call: call, isOnHold: action.isOnHold)
// Signal to the system that the action has been successfully performed.
action.fulfill()
}
public func provider(_ provider: CXProvider, perform action: CXSetMutedCallAction) {
AssertIsOnMainThread()
Logger.info("Received \(#function) CXSetMutedCallAction")
guard nil != callManager.callWithLocalId(action.callUUID) else {
Logger.info("Failing CXSetMutedCallAction for unknown (ended?) call: \(action.callUUID)")
action.fail()
return
}
defer { ignoreFirstUnuteAfterRemoteAnswer = false }
guard !ignoreFirstUnuteAfterRemoteAnswer || action.isMuted else { return }
self.callService.updateIsLocalAudioMuted(isLocalAudioMuted: action.isMuted)
action.fulfill()
}
public func provider(_ provider: CXProvider, perform action: CXSetGroupCallAction) {
AssertIsOnMainThread()
Logger.warn("unimplemented \(#function) for CXSetGroupCallAction")
}
public func provider(_ provider: CXProvider, perform action: CXPlayDTMFCallAction) {
AssertIsOnMainThread()
Logger.warn("unimplemented \(#function) for CXPlayDTMFCallAction")
}
func provider(_ provider: CXProvider, timedOutPerforming action: CXAction) {
AssertIsOnMainThread()
if #available(iOS 13, *), let muteAction = action as? CXSetMutedCallAction {
guard callManager.callWithLocalId(muteAction.callUUID) != nil else {
// When a call is over, if it was muted, CallKit "helpfully" attempts to unmute the
// call with "CXSetMutedCallAction", presumably to help us clean up state.
//
// That is, it calls func provider(_ provider: CXProvider, perform action: CXSetMutedCallAction)
//
// We don't need this - we have our own mechanism for coallescing audio state, so
// we acknowledge the action, but perform a no-op.
//
// However, regardless of fulfilling or failing the action, the action "times out"
// on iOS13. CallKit similarly "auto unmutes" ended calls on iOS12, but on iOS12
// it doesn't timeout.
//
// Presumably this is a regression in iOS13 - so we ignore it.
// #RADAR FB7568405
Logger.info("ignoring timeout for CXSetMutedCallAction for ended call: \(muteAction.callUUID)")
return
}
}
owsFailDebug("Timed out while performing \(action)")
}
func provider(_ provider: CXProvider, didActivate audioSession: AVAudioSession) {
AssertIsOnMainThread()
Logger.debug("Received")
_ = self.audioSession.startAudioActivity(self.audioActivity)
self.audioSession.isRTCAudioEnabled = true
}
func provider(_ provider: CXProvider, didDeactivate audioSession: AVAudioSession) {
AssertIsOnMainThread()
Logger.debug("Received")
self.audioSession.isRTCAudioEnabled = false
self.audioSession.endAudioActivity(self.audioActivity)
}
// MARK: - Util
private func disableUnsupportedFeatures(callUpdate: CXCallUpdate) {
// Call Holding is failing to restart audio when "swapping" calls on the CallKit screen
// until user returns to in-app call screen.
callUpdate.supportsHolding = false
// Not yet supported
callUpdate.supportsGrouping = false
callUpdate.supportsUngrouping = false
// Is there any reason to support this?
callUpdate.supportsDTMF = false
}
}

View File

@ -0,0 +1,312 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import PromiseKit
import CallKit
import SignalServiceKit
import SignalMessaging
import WebRTC
protocol CallUIAdaptee {
var notificationPresenter: NotificationPresenter { get }
var callService: CallService { get }
var hasManualRinger: Bool { get }
func startOutgoingCall(call: SignalCall)
func reportIncomingCall(_ call: SignalCall, callerName: String, completion: @escaping (Error?) -> Void)
func reportMissedCall(_ call: SignalCall, callerName: String)
func answerCall(localId: UUID)
func answerCall(_ call: SignalCall)
func recipientAcceptedCall(_ call: SignalCall)
func localHangupCall(localId: UUID)
func localHangupCall(_ call: SignalCall)
func remoteDidHangupCall(_ call: SignalCall)
func remoteBusy(_ call: SignalCall)
func didAnswerElsewhere(call: SignalCall)
func didDeclineElsewhere(call: SignalCall)
func failCall(_ call: SignalCall, error: SignalCall.CallError)
func setIsMuted(call: SignalCall, isMuted: Bool)
func setHasLocalVideo(call: SignalCall, hasLocalVideo: Bool)
func startAndShowOutgoingCall(address: SignalServiceAddress, hasLocalVideo: Bool)
}
// Shared default implementations
extension CallUIAdaptee {
internal func showCall(_ call: SignalCall) {
AssertIsOnMainThread()
let callViewController = IndividualCallViewController(call: call)
callViewController.modalTransitionStyle = .crossDissolve
OWSWindowManager.shared.startCall(callViewController)
}
internal func reportMissedCall(_ call: SignalCall, callerName: String) {
AssertIsOnMainThread()
notificationPresenter.presentMissedCall(call.individualCall, callerName: callerName)
}
internal func startAndShowOutgoingCall(address: SignalServiceAddress, hasLocalVideo: Bool) {
AssertIsOnMainThread()
guard let call = self.callService.buildOutgoingIndividualCallIfPossible(
address: address,
hasVideo: hasLocalVideo
) else {
// @integration This is not unexpected, it could happen if Bob tries
// to start an outgoing call at the same moment Alice has already
// sent him an Offer that is being processed.
Logger.info("found an existing call when trying to start outgoing call: \(address)")
return
}
Logger.debug("")
startOutgoingCall(call: call)
call.individualCall.hasLocalVideo = hasLocalVideo
self.showCall(call)
}
}
/**
* Notify the user of call related activities.
* Driven by either a CallKit or System notifications adaptee
*/
@objc
public class CallUIAdapter: NSObject, CallServiceObserver {
lazy var nonCallKitAdaptee = NonCallKitCallUIAdaptee()
lazy var callKitAdaptee: CallKitCallUIAdaptee? = {
if Platform.isSimulator {
// CallKit doesn't seem entirely supported in simulator.
// e.g. you can't receive calls in the call screen.
// So we use the non-CallKit call UI.
Logger.info("not using callkit adaptee for simulator.")
return nil
} else if CallUIAdapter.isCallkitDisabledForLocale {
Logger.info("not using callkit adaptee due to locale.")
return nil
} else {
Logger.info("using callkit adaptee for iOS11+")
let showNames = preferences.notificationPreviewType() != .noNameNoPreview
let useSystemCallLog = preferences.isSystemCallLogEnabled()
return CallKitCallUIAdaptee(showNamesOnCallScreen: showNames,
useSystemCallLog: useSystemCallLog)
}
}()
var defaultAdaptee: CallUIAdaptee { callKitAdaptee ?? nonCallKitAdaptee }
func adaptee(for call: SignalCall) -> CallUIAdaptee {
switch call.individualCall.callAdapterType {
case .nonCallKit: return nonCallKitAdaptee
case .default: return defaultAdaptee
}
}
public required override init() {
AssertIsOnMainThread()
super.init()
// We cannot assert singleton here, because this class gets rebuilt when the user changes relevant call settings
AppReadiness.runNowOrWhenAppDidBecomeReadySync {
self.callService.addObserverAndSyncState(observer: self)
}
}
@objc
public static var isCallkitDisabledForLocale: Bool {
let locale = Locale.current
guard let regionCode = locale.regionCode else {
if !Platform.isSimulator { owsFailDebug("Missing region code.") }
return false
}
// Apple has stopped approving apps that use CallKit functionality in mainland China.
// When the "CN" region is enabled, this check simply switches to the same pre-CallKit
// interface that is still used by everyone on iOS 9.
//
// For further reference: https://forums.developer.apple.com/thread/103083
return regionCode == "CN"
}
// MARK:
internal func reportIncomingCall(_ call: SignalCall, thread: TSContactThread) {
AssertIsOnMainThread()
Logger.info("remoteAddress: \(call.individualCall.remoteAddress)")
// make sure we don't terminate audio session during call
_ = audioSession.startAudioActivity(call.audioActivity)
let callerName = self.contactsManager.displayName(for: call.individualCall.remoteAddress)
Logger.verbose("callerName: \(callerName)")
adaptee(for: call).reportIncomingCall(call, callerName: callerName) { error in
AssertIsOnMainThread()
guard let error = error else { return }
owsFailDebug("Failed to report incoming call with error \(error)")
let nsError = error as NSError
Logger.warn("nsError: \(nsError.domain), \(nsError.code)")
if nsError.domain == CXErrorCodeIncomingCallError.errorDomain {
switch nsError.code {
case CXErrorCodeIncomingCallError.unknown.rawValue:
Logger.warn("unknown")
case CXErrorCodeIncomingCallError.unentitled.rawValue:
Logger.warn("unentitled")
case CXErrorCodeIncomingCallError.callUUIDAlreadyExists.rawValue:
Logger.warn("callUUIDAlreadyExists")
case CXErrorCodeIncomingCallError.filteredByDoNotDisturb.rawValue:
Logger.warn("filteredByDoNotDisturb")
case CXErrorCodeIncomingCallError.filteredByBlockList.rawValue:
Logger.warn("filteredByBlockList")
default:
Logger.warn("Unknown CXErrorCodeIncomingCallError")
}
}
self.callService.handleFailedCall(failedCall: call, error: error)
}
}
internal func reportMissedCall(_ call: SignalCall) {
AssertIsOnMainThread()
let callerName = self.contactsManager.displayName(for: call.individualCall.remoteAddress)
adaptee(for: call).reportMissedCall(call, callerName: callerName)
}
internal func startOutgoingCall(call: SignalCall) {
AssertIsOnMainThread()
adaptee(for: call).startOutgoingCall(call: call)
}
@objc public func answerCall(localId: UUID) {
AssertIsOnMainThread()
guard let call = self.callService.currentCall else {
owsFailDebug("No current call.")
return
}
guard call.individualCall.localId == localId else {
owsFailDebug("localId does not match current call")
return
}
adaptee(for: call).answerCall(localId: localId)
}
internal func answerCall(_ call: SignalCall) {
AssertIsOnMainThread()
adaptee(for: call).answerCall(call)
}
@objc public func startAndShowOutgoingCall(address: SignalServiceAddress, hasLocalVideo: Bool) {
AssertIsOnMainThread()
defaultAdaptee.startAndShowOutgoingCall(address: address, hasLocalVideo: hasLocalVideo)
}
internal func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread()
adaptee(for: call).recipientAcceptedCall(call)
}
internal func remoteDidHangupCall(_ call: SignalCall) {
AssertIsOnMainThread()
adaptee(for: call).remoteDidHangupCall(call)
}
internal func remoteBusy(_ call: SignalCall) {
AssertIsOnMainThread()
adaptee(for: call).remoteBusy(call)
}
internal func didAnswerElsewhere(call: SignalCall) {
adaptee(for: call).didAnswerElsewhere(call: call)
}
internal func didDeclineElsewhere(call: SignalCall) {
adaptee(for: call).didDeclineElsewhere(call: call)
}
internal func localHangupCall(localId: UUID) {
AssertIsOnMainThread()
guard let call = self.callService.currentCall else {
owsFailDebug("No current call.")
return
}
guard call.individualCall.localId == localId else {
owsFailDebug("localId does not match current call")
return
}
adaptee(for: call).localHangupCall(localId: localId)
}
internal func localHangupCall(_ call: SignalCall) {
AssertIsOnMainThread()
adaptee(for: call).localHangupCall(call)
}
internal func failCall(_ call: SignalCall, error: SignalCall.CallError) {
AssertIsOnMainThread()
adaptee(for: call).failCall(call, error: error)
}
internal func showCall(_ call: SignalCall) {
AssertIsOnMainThread()
adaptee(for: call).showCall(call)
}
internal func setIsMuted(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
// With CallKit, muting is handled by a CXAction, so it must go through the adaptee
adaptee(for: call).setIsMuted(call: call, isMuted: isMuted)
}
internal func setHasLocalVideo(call: SignalCall, hasLocalVideo: Bool) {
AssertIsOnMainThread()
adaptee(for: call).setHasLocalVideo(call: call, hasLocalVideo: hasLocalVideo)
}
internal func setCameraSource(call: SignalCall, isUsingFrontCamera: Bool) {
AssertIsOnMainThread()
callService.updateCameraSource(call: call, isUsingFrontCamera: isUsingFrontCamera)
}
// MARK: - CallServiceObserver
internal func didUpdateCall(from oldValue: SignalCall?, to newValue: SignalCall?) {
AssertIsOnMainThread()
guard let call = newValue, call.isIndividualCall else { return }
callService.audioService.handleRinging = adaptee(for: call).hasManualRinger
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,183 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
import Foundation
import SignalServiceKit
import SignalMessaging
/**
* Manage call related UI in a pre-CallKit world.
*/
class NonCallKitCallUIAdaptee: NSObject, CallUIAdaptee {
// Starting/Stopping incoming call ringing is our apps responsibility for the non CallKit interface.
let hasManualRinger = true
required override init() {
AssertIsOnMainThread()
super.init()
}
// MARK:
func startOutgoingCall(call: SignalCall) {
AssertIsOnMainThread()
// make sure we don't terminate audio session during call
let success = self.audioSession.startAudioActivity(call.audioActivity)
assert(success)
self.callService.individualCallService.handleOutgoingCall(call)
}
func reportIncomingCall(_ call: SignalCall, callerName: String, completion: @escaping (Error?) -> Void) {
AssertIsOnMainThread()
Logger.debug("")
self.showCall(call)
startNotifiyingForIncomingCall(call, callerName: callerName)
completion(nil)
}
private var incomingCallNotificationTimer: Timer?
private func startNotifiyingForIncomingCall(_ call: SignalCall, callerName: String) {
incomingCallNotificationTimer?.invalidate()
incomingCallNotificationTimer = nil
// present lock screen notification if we're in the background.
// we re-present the notifiation every 3 seconds to make sure
// the user sees that their phone is ringing
incomingCallNotificationTimer = Timer.scheduledTimer(withTimeInterval: 3, repeats: true) { [weak self] _ in
guard call.individualCall.state == .localRinging else {
self?.incomingCallNotificationTimer?.invalidate()
self?.incomingCallNotificationTimer = nil
return
}
if UIApplication.shared.applicationState == .active {
Logger.debug("skipping notification since app is already active.")
} else {
self?.notificationPresenter.presentIncomingCall(call.individualCall, callerName: callerName)
}
}
}
func answerCall(localId: UUID) {
AssertIsOnMainThread()
guard let call = self.callService.currentCall else {
owsFailDebug("No current call.")
return
}
guard call.individualCall.localId == localId else {
owsFailDebug("localId does not match current call")
return
}
self.answerCall(call)
}
func answerCall(_ call: SignalCall) {
AssertIsOnMainThread()
guard call.individualCall.localId == self.callService.currentCall?.individualCall.localId else {
owsFailDebug("localId does not match current call")
return
}
self.audioSession.isRTCAudioEnabled = true
self.callService.individualCallService.handleAcceptCall(call)
}
func recipientAcceptedCall(_ call: SignalCall) {
AssertIsOnMainThread()
self.audioSession.isRTCAudioEnabled = true
}
func localHangupCall(localId: UUID) {
AssertIsOnMainThread()
guard let call = self.callService.currentCall else {
owsFailDebug("No current call.")
return
}
guard call.individualCall.localId == localId else {
owsFailDebug("localId does not match current call")
return
}
self.localHangupCall(call)
}
func localHangupCall(_ call: SignalCall) {
AssertIsOnMainThread()
// If both parties hang up at the same moment,
// call might already be nil.
guard self.callService.currentCall == nil || call.individualCall.localId == self.callService.currentCall?.individualCall.localId else {
owsFailDebug("localId does not match current call")
return
}
self.callService.individualCallService.handleLocalHangupCall(call)
}
internal func remoteDidHangupCall(_ call: SignalCall) {
AssertIsOnMainThread()
Logger.debug("is no-op")
}
internal func remoteBusy(_ call: SignalCall) {
AssertIsOnMainThread()
Logger.debug("is no-op")
}
internal func didAnswerElsewhere(call: SignalCall) {
AssertIsOnMainThread()
Logger.debug("is no-op")
}
internal func didDeclineElsewhere(call: SignalCall) {
AssertIsOnMainThread()
Logger.debug("is no-op")
}
internal func failCall(_ call: SignalCall, error: SignalCall.CallError) {
AssertIsOnMainThread()
Logger.debug("is no-op")
}
func setIsMuted(call: SignalCall, isMuted: Bool) {
AssertIsOnMainThread()
guard call.individualCall.localId == self.callService.currentCall?.individualCall.localId else {
owsFailDebug("localId does not match current call")
return
}
self.callService.updateIsLocalAudioMuted(isLocalAudioMuted: isMuted)
}
func setHasLocalVideo(call: SignalCall, hasLocalVideo: Bool) {
AssertIsOnMainThread()
guard call.individualCall.localId == self.callService.currentCall?.individualCall.localId else {
owsFailDebug("localId does not match current call")
return
}
self.callService.updateIsLocalVideoMuted(isLocalVideoMuted: !hasLocalVideo)
}
}

View File

@ -0,0 +1,117 @@
//
// Copyright (c) 2020 Open Whisper Systems. All rights reserved.
//
import Foundation
class LocalVideoView: UIView {
private let localVideoCapturePreview = RTCCameraPreviewView()
var captureSession: AVCaptureSession? {
set { localVideoCapturePreview.captureSession = newValue }
get { localVideoCapturePreview.captureSession }
}
override var contentMode: UIView.ContentMode {
didSet { localVideoCapturePreview.contentMode = contentMode }
}
override init(frame: CGRect) {
super.init(frame: .zero)
addSubview(localVideoCapturePreview)
if Platform.isSimulator {
backgroundColor = .green
}
NotificationCenter.default.addObserver(
self,
selector: #selector(updateLocalVideoOrientation),
name: UIDevice.orientationDidChangeNotification,
object: nil
)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override var frame: CGRect {
didSet { updateLocalVideoOrientation() }
}
@objc
func updateLocalVideoOrientation() {
defer { localVideoCapturePreview.frame = bounds }
// iPad supports rotating this view controller directly,
// so we don't need to do anything here.
guard !UIDevice.current.isIPad else { return }
// We lock this view to portrait only on phones, but the
// local video capture will rotate with the device's
// orientation (so the remote party will render your video
// in the correct orientation). As such, we need to rotate
// the local video preview layer so it *looks* like we're
// also always capturing in portrait.
switch UIDevice.current.orientation {
case .portrait:
localVideoCapturePreview.transform = .identity
case .portraitUpsideDown:
localVideoCapturePreview.transform = .init(rotationAngle: .pi)
case .landscapeLeft:
localVideoCapturePreview.transform = .init(rotationAngle: .halfPi)
case .landscapeRight:
localVideoCapturePreview.transform = .init(rotationAngle: .pi + .halfPi)
case .faceUp, .faceDown, .unknown:
break
@unknown default:
break
}
}
}
extension RTCCameraPreviewView {
var previewLayer: AVCaptureVideoPreviewLayer? {
return layer as? AVCaptureVideoPreviewLayer
}
open override var contentMode: UIView.ContentMode {
set {
guard let previewLayer = previewLayer else {
return owsFailDebug("missing preview layer")
}
switch newValue {
case .scaleAspectFill:
previewLayer.videoGravity = .resizeAspectFill
case .scaleAspectFit:
previewLayer.videoGravity = .resizeAspect
case .scaleToFill:
previewLayer.videoGravity = .resize
default:
owsFailDebug("Unexpected contentMode")
}
}
get {
guard let previewLayer = previewLayer else {
owsFailDebug("missing preview layer")
return .scaleToFill
}
switch previewLayer.videoGravity {
case .resizeAspectFill:
return .scaleAspectFill
case .resizeAspect:
return .scaleAspectFit
case .resize:
return .scaleToFill
default:
owsFailDebug("Unexpected contentMode")
return .scaleToFill
}
}
}
}

View File

@ -0,0 +1,21 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
#import <WebRTC/RTCVideoRenderer.h>
NS_ASSUME_NONNULL_BEGIN
/**
* Drives the full screen remote video. This is *not* a swift class
* so we can take advantage of some compile time constants from WebRTC
*/
@interface RemoteVideoView : UIView <RTCVideoRenderer>
@property (nonatomic) BOOL isGroupCall;
@property (nonatomic) BOOL isScreenShare;
@property (nonatomic) BOOL isFullScreen;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,289 @@
//
// Copyright (c) 2021 Open Whisper Systems. All rights reserved.
//
#import "RemoteVideoView.h"
#import "UIFont+OWS.h"
#import "UIView+OWS.h"
#import <MetalKit/MetalKit.h>
#import <PureLayout/PureLayout.h>
#import <SignalCoreKit/Threading.h>
#import <SignalMessaging/SignalMessaging-Swift.h>
#import <SignalServiceKit/SignalServiceKit-Swift.h>
#import <WebRTC/RTCEAGLVideoView.h>
#import <WebRTC/RTCMTLVideoView.h>
#import <WebRTC/RTCVideoFrame.h>
#import <WebRTC/RTCVideoRenderer.h>
#import <objc/runtime.h>
NS_ASSUME_NONNULL_BEGIN
#if defined(__arm64__)
#define DEVICE_SUPPORTS_METAL 1
#else
#define DEVICE_SUPPORTS_METAL 0
#endif
#pragma mark -
@interface RemoteVideoView ()
@property (nonatomic, nullable) __kindof UIView<RTCVideoRenderer> *videoRenderer;
@property (nonatomic) BOOL applyDefaultRendererConfigurationOnNextFrame;
@end
#if DEVICE_SUPPORTS_METAL
@interface RemoteVideoView (Metal)
@property (nonatomic, readonly, nullable) RTCMTLVideoView *metalRenderer;
- (void)setupMetalRenderer;
@end
@implementation RemoteVideoView (Metal)
- (void)setupMetalRenderer
{
RTCMTLVideoView *rtcMetalView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
self.videoRenderer = rtcMetalView;
[self addSubview:rtcMetalView];
[rtcMetalView autoPinEdgesToSuperviewEdges];
// We want the rendered video to go edge-to-edge.
rtcMetalView.layoutMargins = UIEdgeInsetsZero;
// HACK: Although RTCMTLVideo view is positioned to the top edge of the screen
// It's inner (private) MTKView is below the status bar.
for (UIView *subview in [rtcMetalView subviews]) {
if ([subview isKindOfClass:[MTKView class]]) {
[subview autoPinEdgesToSuperviewEdges];
} else {
OWSFailDebug(@"New subviews added to MTLVideoView. Reconsider this hack.");
}
}
}
- (nullable RTCMTLVideoView *)metalRenderer
{
return (RTCMTLVideoView *)self.videoRenderer;
}
@end
#endif
#pragma mark -
@implementation RemoteVideoView
- (instancetype)init
{
self = [super init];
if (!self) {
return self;
}
#if DEVICE_SUPPORTS_METAL
[self setupMetalRenderer];
#endif
[self applyDefaultRendererConfiguration];
// Metal is not supported on the simulator, so we just set a
// background color for debugging purposes.
if (Platform.isSimulator) {
// For simulators just set a solid background color.
self.backgroundColor = [UIColor.blueColor colorWithAlphaComponent:0.4];
} else {
OWSAssertDebug(self.videoRenderer);
}
return self;
}
#pragma mark - RTCVideoRenderer
/** The size of the frame. */
- (void)setSize:(CGSize)size
{
[self.videoRenderer setSize:size];
}
/** The frame to be displayed. */
- (void)renderFrame:(nullable RTCVideoFrame *)frame
{
[self.videoRenderer renderFrame:frame];
#if DEVICE_SUPPORTS_METAL
DispatchMainThreadSafe(^{
if (self.applyDefaultRendererConfigurationOnNextFrame) {
self.applyDefaultRendererConfigurationOnNextFrame = NO;
[self applyDefaultRendererConfiguration];
}
if (self.isScreenShare) {
self.metalRenderer.videoContentMode = UIViewContentModeScaleAspectFit;
// Rotate the video so it's always right side up in landscape. We only
// allow portrait orientation in the calling views on iPhone so we don't
// get this for free. iPad allows all orientations so we can skip this.
if (self.isFullScreen && !UIDevice.currentDevice.isIPad) {
switch (UIDevice.currentDevice.orientation) {
case UIDeviceOrientationPortrait:
case UIDeviceOrientationPortraitUpsideDown:
// We don't have to do anything, the renderer will automatically
// make sure it's right-side-up.
self.metalRenderer.rotationOverride = nil;
break;
case UIDeviceOrientationLandscapeLeft:
switch (frame.rotation) {
// Portrait upside-down
case RTCVideoRotation_270:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_0);
break;
// Portrait
case RTCVideoRotation_90:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_180);
break;
// Landscape right
case RTCVideoRotation_180:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_270);
break;
// Landscape left
case RTCVideoRotation_0:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_90);
break;
}
break;
case UIDeviceOrientationLandscapeRight:
switch (frame.rotation) {
// Portrait upside-down
case RTCVideoRotation_270:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_180);
break;
// Portrait
case RTCVideoRotation_90:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_0);
break;
// Landscape right
case RTCVideoRotation_180:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_90);
break;
// Landscape left
case RTCVideoRotation_0:
self.metalRenderer.rotationOverride = @(RTCVideoRotation_270);
break;
}
break;
default:
// Do nothing if we're face down, up, etc.
// Assume we're already setup for the correct orientation.
break;
}
} else {
self.metalRenderer.rotationOverride = nil;
}
} else if (UIDevice.currentDevice.isIPad || self.isGroupCall) {
BOOL isLandscape = self.width > self.height;
BOOL remoteIsLandscape = frame.rotation == RTCVideoRotation_180 || frame.rotation == RTCVideoRotation_0;
BOOL isSquarish = (MAX(self.width, self.height) / MIN(self.width, self.height)) <= 1.2;
self.metalRenderer.rotationOverride = nil;
// If we're both in the same orientation, let the video fill the screen.
// Otherwise, fit the video to the screen size respecting the aspect ratio.
if (isLandscape == remoteIsLandscape || isSquarish) {
self.metalRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
} else {
self.metalRenderer.videoContentMode = UIViewContentModeScaleAspectFit;
}
} else {
// iPhones are locked to portrait mode. However, we want both
// portrait and portrait upside-down to be right side up in portrait.
// We want both landscape left and landscape right to be right side
// up in landscape. This means, sometimes we force the rotation to
// portrait, and sometimes we force the rotation to portrait upside
// down depending on the orientation of the incoming frames AND
// the device's current orientation, so that from the user's perspective
// everything always looks right-side-up.
switch (frame.rotation) {
// Portrait upside-down
case RTCVideoRotation_270:
// Portrait upside down renders in portrait
self.metalRenderer.rotationOverride = @(RTCVideoRotation_270);
break;
// Portrait
case RTCVideoRotation_90:
// Portrait renders in portrait
self.metalRenderer.rotationOverride = @(RTCVideoRotation_90);
break;
// Landscape right
case RTCVideoRotation_180:
// If the device is in landscape left, flip upside down
if (UIDevice.currentDevice.orientation == UIDeviceOrientationLandscapeLeft) {
self.metalRenderer.rotationOverride = @(RTCVideoRotation_270);
} else if (UIDevice.currentDevice.orientation == UIDeviceOrientationLandscapeRight) {
self.metalRenderer.rotationOverride = @(RTCVideoRotation_90);
}
break;
// Landscape left
case RTCVideoRotation_0:
// If the device is in landscape right, flip upside down
if (UIDevice.currentDevice.orientation == UIDeviceOrientationLandscapeRight) {
self.metalRenderer.rotationOverride = @(RTCVideoRotation_270);
} else if (UIDevice.currentDevice.orientation == UIDeviceOrientationLandscapeLeft) {
self.metalRenderer.rotationOverride = @(RTCVideoRotation_90);
}
break;
}
}
});
#endif
}
- (void)setIsScreenShare:(BOOL)isScreenShare
{
if (isScreenShare != _isScreenShare) {
self.applyDefaultRendererConfigurationOnNextFrame = YES;
}
_isScreenShare = isScreenShare;
}
- (void)setIsGroupCall:(BOOL)isGroupCall
{
if (isGroupCall != _isGroupCall) {
self.applyDefaultRendererConfigurationOnNextFrame = YES;
}
_isGroupCall = isGroupCall;
}
- (void)setIsFullScreen:(BOOL)isFullScreen
{
if (isFullScreen != _isFullScreen) {
self.applyDefaultRendererConfigurationOnNextFrame = YES;
}
_isFullScreen = isFullScreen;
}
- (void)applyDefaultRendererConfiguration
{
#if DEVICE_SUPPORTS_METAL
if (UIDevice.currentDevice.isIPad) {
self.metalRenderer.videoContentMode = UIViewContentModeScaleAspectFit;
self.metalRenderer.rotationOverride = nil;
} else {
self.metalRenderer.videoContentMode = UIViewContentModeScaleAspectFill;
self.metalRenderer.rotationOverride = nil;
}
#endif
}
@end
NS_ASSUME_NONNULL_END

View File

@ -456,9 +456,6 @@ public enum OnionRequestAPI {
}
} else if let message = json?["result"] as? String, message == "Loki Server error" {
// Do nothing
} else if case .server(let host, _, _, _, _) = destination, host == "116.203.70.33" && statusCode == 0 {
// FIXME: Temporary thing to kick out nodes that can't talk to the V2 OGS yet
handleUnspecificError()
} else if statusCode == 0 { // Timeout
// Do nothing
} else {

View File

@ -8,8 +8,6 @@ public final class SnodeMessage : NSObject, NSCoding { // NSObject/NSCoding conf
public let data: LosslessStringConvertible
/// The time to live for the message in milliseconds.
public let ttl: UInt64
/// When the proof of work was calculated.
///
/// - Note: Expressed as milliseconds since 00:00:00 UTC on 1 January 1970.
public let timestamp: UInt64