Rework appearance of voice messages and audio attachments.

// FREEBIE
This commit is contained in:
Matthew Chen 2017-05-11 15:26:37 -04:00
parent 9f2414b371
commit 96e155c75e
18 changed files with 404 additions and 165 deletions

View File

@ -3,10 +3,10 @@ source 'https://github.com/CocoaPods/Specs.git'
target 'Signal' do
pod 'SocketRocket', :git => 'https://github.com/facebook/SocketRocket.git'
pod 'AxolotlKit', git: 'https://github.com/WhisperSystems/SignalProtocolKit.git'
#pod 'AxolotlKit', path: '../SignalProtocolKit'
pod 'SignalServiceKit', git: 'https://github.com/WhisperSystems/SignalServiceKit.git'
#pod 'SignalServiceKit', path: '../SignalServiceKit'
#pod 'AxolotlKit', git: 'https://github.com/WhisperSystems/SignalProtocolKit.git'
pod 'AxolotlKit', path: '../SignalProtocolKit'
#pod 'SignalServiceKit', git: 'https://github.com/WhisperSystems/SignalServiceKit.git'
pod 'SignalServiceKit', path: '../SignalServiceKit'
pod 'OpenSSL'
pod 'JSQMessagesViewController', git: 'https://github.com/WhisperSystems/JSQMessagesViewController.git', branch: 'mkirk/position-edit-menu'
#pod 'JSQMessagesViewController' path: '../JSQMessagesViewController'

View File

@ -110,34 +110,28 @@ PODS:
- YapDatabase/SQLCipher/Core
DEPENDENCIES:
- AxolotlKit (from `https://github.com/WhisperSystems/SignalProtocolKit.git`)
- AxolotlKit (from `../SignalProtocolKit`)
- JSQMessagesViewController (from `https://github.com/WhisperSystems/JSQMessagesViewController.git`, branch `mkirk/position-edit-menu`)
- OpenSSL
- PureLayout
- SignalServiceKit (from `https://github.com/WhisperSystems/SignalServiceKit.git`)
- SignalServiceKit (from `../SignalServiceKit`)
- SocketRocket (from `https://github.com/facebook/SocketRocket.git`)
EXTERNAL SOURCES:
AxolotlKit:
:git: https://github.com/WhisperSystems/SignalProtocolKit.git
:path: ../SignalProtocolKit
JSQMessagesViewController:
:branch: mkirk/position-edit-menu
:git: https://github.com/WhisperSystems/JSQMessagesViewController.git
SignalServiceKit:
:git: https://github.com/WhisperSystems/SignalServiceKit.git
:path: ../SignalServiceKit
SocketRocket:
:git: https://github.com/facebook/SocketRocket.git
CHECKOUT OPTIONS:
AxolotlKit:
:commit: bce663486ac34c70594deae8260b3cd29dd086e9
:git: https://github.com/WhisperSystems/SignalProtocolKit.git
JSQMessagesViewController:
:commit: 7054e4b13ee5bcd6d524adb6dc9a726e8c466308
:git: https://github.com/WhisperSystems/JSQMessagesViewController.git
SignalServiceKit:
:commit: 2dc7c7cf292a3b97b356a67149c9d17684968f22
:git: https://github.com/WhisperSystems/SignalServiceKit.git
SocketRocket:
:commit: 877ac7438be3ad0b45ef5ca3969574e4b97112bf
:git: https://github.com/facebook/SocketRocket.git
@ -164,6 +158,6 @@ SPEC CHECKSUMS:
UnionFind: c33be5adb12983981d6e827ea94fc7f9e370f52d
YapDatabase: b1e43555a34a5298e23a045be96817a5ef0da58f
PODFILE CHECKSUM: 549de6756fe8eab98647be8561b3988361f62e85
PODFILE CHECKSUM: aa4b7d6fd28dbc083b0342a47c1c96ca54a2899e
COCOAPODS: 1.2.1

View File

@ -7,6 +7,7 @@
objects = {
/* Begin PBXBuildFile section */
34009B671EC4CB11001D95D1 /* OWSMath.m in Sources */ = {isa = PBXBuildFile; fileRef = 34009B661EC4CB11001D95D1 /* OWSMath.m */; };
3400C7931EAF89CD008A8584 /* SendExternalFileViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3400C7911EAF89CD008A8584 /* SendExternalFileViewController.m */; };
3400C7961EAF99F4008A8584 /* SelectThreadViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3400C7951EAF99F4008A8584 /* SelectThreadViewController.m */; };
3400C7991EAFB772008A8584 /* ThreadViewHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = 3400C7981EAFB772008A8584 /* ThreadViewHelper.m */; };
@ -71,6 +72,7 @@
34D5CCA91EAE3D30005515DB /* GroupViewHelper.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D5CCA81EAE3D30005515DB /* GroupViewHelper.m */; };
34D5CCB11EAE7E7F005515DB /* SelectRecipientViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 34D5CCB01EAE7E7F005515DB /* SelectRecipientViewController.m */; };
34DFCB851E8E04B500053165 /* AddToBlockListViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 34DFCB841E8E04B500053165 /* AddToBlockListViewController.m */; };
34E3E5681EC4B19400495BAC /* AudioProgressView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34E3E5671EC4B19400495BAC /* AudioProgressView.swift */; };
34FD93701E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m in Sources */ = {isa = PBXBuildFile; fileRef = 34FD936F1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m */; };
450573FE1E78A06D00615BB4 /* OWS103EnableVideoCalling.m in Sources */ = {isa = PBXBuildFile; fileRef = 450573FD1E78A06D00615BB4 /* OWS103EnableVideoCalling.m */; };
4505C2BF1E648EA300CEBF41 /* ExperienceUpgrade.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4505C2BE1E648EA300CEBF41 /* ExperienceUpgrade.swift */; };
@ -354,6 +356,8 @@
/* Begin PBXFileReference section */
1B5E7D6C9007F5E5761D79DD /* libPods-SignalTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-SignalTests.a"; sourceTree = BUILT_PRODUCTS_DIR; };
34009B651EC4CB11001D95D1 /* OWSMath.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSMath.h; sourceTree = "<group>"; };
34009B661EC4CB11001D95D1 /* OWSMath.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSMath.m; sourceTree = "<group>"; };
3400C7901EAF89CD008A8584 /* SendExternalFileViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SendExternalFileViewController.h; sourceTree = "<group>"; };
3400C7911EAF89CD008A8584 /* SendExternalFileViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SendExternalFileViewController.m; sourceTree = "<group>"; };
3400C7941EAF99F4008A8584 /* SelectThreadViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SelectThreadViewController.h; sourceTree = "<group>"; };
@ -469,6 +473,7 @@
34D5CCB01EAE7E7F005515DB /* SelectRecipientViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = SelectRecipientViewController.m; sourceTree = "<group>"; };
34DFCB831E8E04B400053165 /* AddToBlockListViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AddToBlockListViewController.h; sourceTree = "<group>"; };
34DFCB841E8E04B500053165 /* AddToBlockListViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AddToBlockListViewController.m; sourceTree = "<group>"; };
34E3E5671EC4B19400495BAC /* AudioProgressView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AudioProgressView.swift; sourceTree = "<group>"; };
34FD936E1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = OWSAnyTouchGestureRecognizer.h; path = views/OWSAnyTouchGestureRecognizer.h; sourceTree = "<group>"; };
34FD936F1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = OWSAnyTouchGestureRecognizer.m; path = views/OWSAnyTouchGestureRecognizer.m; sourceTree = "<group>"; };
450573FC1E78A06D00615BB4 /* OWS103EnableVideoCalling.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = OWS103EnableVideoCalling.h; path = Migrations/OWS103EnableVideoCalling.h; sourceTree = "<group>"; };
@ -1256,11 +1261,14 @@
34D5CC951EA6AFAD005515DB /* OWSContactsSyncing.m */,
45CD81F01DC03A22004C9430 /* OWSLogger.h */,
45CD81F11DC03A22004C9430 /* OWSLogger.m */,
34009B651EC4CB11001D95D1 /* OWSMath.h */,
34009B661EC4CB11001D95D1 /* OWSMath.m */,
45666F541D9B2827008FE134 /* OWSScrubbingLogFormatter.h */,
45666F551D9B2827008FE134 /* OWSScrubbingLogFormatter.m */,
4579431C1E7C8CE9008ED0C0 /* Pastelog.h */,
4579431D1E7C8CE9008ED0C0 /* Pastelog.m */,
450DF2041E0D74AC003D14BE /* Platform.swift */,
4542F0951EBB9E9A00C7EE92 /* Promise+retainUntilComplete.swift */,
76EB04F518170B33006006FC /* StringUtil.h */,
76EB04F618170B33006006FC /* StringUtil.m */,
345670FF1E89A5F1006EE662 /* ThreadUtil.h */,
@ -1273,7 +1281,6 @@
76EB04FB18170B33006006FC /* Util.h */,
45F170D51E315310003FC1F2 /* Weak.swift */,
45F170CB1E310E22003FC1F2 /* WeakTimer.swift */,
4542F0951EBB9E9A00C7EE92 /* Promise+retainUntilComplete.swift */,
);
path = util;
sourceTree = "<group>";
@ -1291,6 +1298,7 @@
isa = PBXGroup;
children = (
452EA09D1EA7ABE00078744B /* AttachmentPointerView.swift */,
34E3E5671EC4B19400495BAC /* AudioProgressView.swift */,
45F3AEB51DFDE7900080CE33 /* AvatarImageView.swift */,
451764291DE939FD00EDB8B9 /* ContactCell.swift */,
451764281DE939FD00EDB8B9 /* ContactCell.xib */,
@ -2063,6 +2071,7 @@
76EB058218170B33006006FC /* Environment.m in Sources */,
34B3F8921E8DF1710035BE1A /* SignalAttachment.swift in Sources */,
45464DBC1DFA041F001D3FD6 /* DataChannelMessage.swift in Sources */,
34E3E5681EC4B19400495BAC /* AudioProgressView.swift in Sources */,
450DF2051E0D74AC003D14BE /* Platform.swift in Sources */,
3472229F1EB22FFE00E53955 /* AddToGroupViewController.m in Sources */,
45666F561D9B2827008FE134 /* OWSScrubbingLogFormatter.m in Sources */,
@ -2086,6 +2095,7 @@
45F170AC1E2F0351003FC1F2 /* CallAudioSession.swift in Sources */,
34B3F8801E8DF1700035BE1A /* InviteFlow.swift in Sources */,
B68EF9BB1C0B1EBD009C3DCD /* FLAnimatedImageView.m in Sources */,
34009B671EC4CB11001D95D1 /* OWSMath.m in Sources */,
45F170CC1E310E22003FC1F2 /* WeakTimer.swift in Sources */,
34B3F8871E8DF1700035BE1A /* NotificationSettingsViewController.m in Sources */,
A5E9D4BB1A65FAD800E4481C /* TSVideoAttachmentAdapter.m in Sources */,

View File

@ -4,36 +4,43 @@
},
"DVTSourceControlWorkspaceBlueprintWorkingCopyStatesKey" : {
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++ED4C31A" : 0,
"37054CE35CE656680D6FFFA9EE19249E0D149C5E+++901E7D4" : 0,
"8176314449001F06FB0E5B588C62133EAA2FE911+++31C7255" : 9223372036854775807,
"8176314449001F06FB0E5B588C62133EAA2FE911+++72E8629" : 9223372036854775807,
"D74FB800F048CB516BB4BC70047F7CC676D291B9+++375B249" : 0,
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++0BB03DB" : 0,
"01DE8628B025BC69C8C7D8B4612D57BE2C08B62C+++6A1C9FC" : 0,
"ABB939127996C66F7E852A780552ADEEF03C6B13+++69179A3" : 0,
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++ED4C31A" : 0,
"90530B99EB0008E7A50951FDFBE02169118FA649+++EF2C0B3" : 0,
"D74FB800F048CB516BB4BC70047F7CC676D291B9+++375B249" : 0,
"37054CE35CE656680D6FFFA9EE19249E0D149C5E+++901E7D4" : 0,
"8176314449001F06FB0E5B588C62133EAA2FE911+++E19D6E3" : 9223372036854775807,
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++03D0758" : 0,
"37054CE35CE656680D6FFFA9EE19249E0D149C5E+++E57A04A" : 0,
"8176314449001F06FB0E5B588C62133EAA2FE911+++E19D6E3" : 9223372036854775807,
"90530B99EB0008E7A50951FDFBE02169118FA649+++EF2C0B3" : 0
"8176314449001F06FB0E5B588C62133EAA2FE911+++31C7255" : 9223372036854775807
},
"DVTSourceControlWorkspaceBlueprintIdentifierKey" : "D0F297E7-A82D-4657-A941-96B268F80ABC",
"DVTSourceControlWorkspaceBlueprintWorkingCopyPathsKey" : {
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++ED4C31A" : "Signal-iOS\/",
"37054CE35CE656680D6FFFA9EE19249E0D149C5E+++901E7D4" : "SignalServiceKit\/",
"8176314449001F06FB0E5B588C62133EAA2FE911+++31C7255" : "Signal-iOS-5\/Carthage\/",
"8176314449001F06FB0E5B588C62133EAA2FE911+++72E8629" : "Signal-iOS-2\/Carthage\/",
"D74FB800F048CB516BB4BC70047F7CC676D291B9+++375B249" : "Signal-iOS\/Pods\/",
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++0BB03DB" : "Signal-iOS-2\/",
"01DE8628B025BC69C8C7D8B4612D57BE2C08B62C+++6A1C9FC" : "SignalProtocolKit\/",
"ABB939127996C66F7E852A780552ADEEF03C6B13+++69179A3" : "SocketRocket\/",
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++ED4C31A" : "Signal-iOS\/",
"90530B99EB0008E7A50951FDFBE02169118FA649+++EF2C0B3" : "JSQMessagesViewController\/",
"D74FB800F048CB516BB4BC70047F7CC676D291B9+++375B249" : "Signal-iOS\/Pods\/",
"37054CE35CE656680D6FFFA9EE19249E0D149C5E+++901E7D4" : "SignalServiceKit\/",
"8176314449001F06FB0E5B588C62133EAA2FE911+++E19D6E3" : "Signal-iOS\/Carthage\/",
"5D79A077E31B3FE97A3C6613CBFFDD71C314D14C+++03D0758" : "Signal-iOS-5\/",
"37054CE35CE656680D6FFFA9EE19249E0D149C5E+++E57A04A" : "SignalServiceKit\/",
"8176314449001F06FB0E5B588C62133EAA2FE911+++E19D6E3" : "Signal-iOS\/Carthage\/",
"90530B99EB0008E7A50951FDFBE02169118FA649+++EF2C0B3" : "JSQMessagesViewController\/"
"8176314449001F06FB0E5B588C62133EAA2FE911+++31C7255" : "Signal-iOS-5\/Carthage\/"
},
"DVTSourceControlWorkspaceBlueprintNameKey" : "Signal",
"DVTSourceControlWorkspaceBlueprintVersion" : 204,
"DVTSourceControlWorkspaceBlueprintRelativePathToProjectKey" : "Signal.xcworkspace",
"DVTSourceControlWorkspaceBlueprintRemoteRepositoriesKey" : [
{
"DVTSourceControlWorkspaceBlueprintRemoteRepositoryURLKey" : "github.com:WhisperSystems\/SignalProtocolKit.git",
"DVTSourceControlWorkspaceBlueprintRemoteRepositorySystemKey" : "com.apple.dt.Xcode.sourcecontrol.Git",
"DVTSourceControlWorkspaceBlueprintRemoteRepositoryIdentifierKey" : "01DE8628B025BC69C8C7D8B4612D57BE2C08B62C+++6A1C9FC"
},
{
"DVTSourceControlWorkspaceBlueprintRemoteRepositoryURLKey" : "github.com:WhisperSystems\/SignalProtocolKit.git",
"DVTSourceControlWorkspaceBlueprintRemoteRepositorySystemKey" : "com.apple.dt.Xcode.sourcecontrol.Git",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.8 KiB

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 2.1 KiB

View File

@ -107,7 +107,7 @@ NS_ASSUME_NONNULL_BEGIN
const CGFloat kBubbleTailWidth = 6.f;
CGRect contentFrame = CGRectMake(self.incoming ? kBubbleTailWidth : 0.f,
self.vMargin,
viewSize.width - kBubbleTailWidth - (self.incoming ? 10 : 15),
viewSize.width - kBubbleTailWidth - 15,
viewSize.height - self.vMargin * 2);
UIImage *image = [UIImage imageNamed:(self.incoming ? @"file-black-40" : @"file-white-40")];

View File

@ -6,6 +6,7 @@
#import "AttachmentUploadView.h"
#import "JSQMediaItem+OWS.h"
#import "MIMETypeUtil.h"
#import "Signal-Swift.h"
#import "TSAttachmentStream.h"
#import "TSMessagesManager.h"
#import "TSStorageManager+keyingMaterial.h"
@ -20,13 +21,14 @@
NS_ASSUME_NONNULL_BEGIN
@interface TSVideoAttachmentAdapter ()
@interface TSVideoAttachmentAdapter () <AudioProgressViewDelegate>
@property (nonatomic) UIImage *image;
@property (nonatomic, nullable) UIView *cachedMediaView;
@property (nonatomic) TSAttachmentStream *attachment;
@property (nonatomic, nullable) UIButton *audioPlayPauseButton;
@property (nonatomic, nullable) UILabel *audioBottomLabel;
@property (nonatomic, nullable) AudioProgressView *audioProgressView;
@property (nonatomic) BOOL incoming;
@property (nonatomic, nullable) AttachmentUploadView *attachmentUploadView;
@property (nonatomic) BOOL isAudioPlaying;
@ -59,9 +61,10 @@ NS_ASSUME_NONNULL_BEGIN
- (void)clearAllViews
{
[_cachedMediaView removeFromSuperview];
_cachedMediaView = nil;
_attachmentUploadView = nil;
[self.cachedMediaView removeFromSuperview];
self.cachedMediaView = nil;
self.attachmentUploadView = nil;
self.audioProgressView = nil;
}
- (void)clearCachedMediaViews
@ -89,7 +92,11 @@ NS_ASSUME_NONNULL_BEGIN
OWSAssert([NSThread isMainThread]);
self.audioProgressSeconds = progress;
self.audioDurationSeconds = duration;
if (duration > 0) {
self.audioDurationSeconds = duration;
}
[self updateAudioProgressView];
[self updateAudioBottomLabel];
}
@ -102,35 +109,52 @@ NS_ASSUME_NONNULL_BEGIN
[ViewControllerUtils formatDurationSeconds:(long)round(self.audioProgressSeconds)],
[ViewControllerUtils formatDurationSeconds:(long)round(self.audioDurationSeconds)]];
} else {
NSError *error;
unsigned long long fileSize =
[[NSFileManager defaultManager] attributesOfItemAtPath:self.attachment.filePath error:&error].fileSize;
OWSAssert(!error);
NSString *bottomText = [ViewControllerUtils formatFileSize:fileSize];
self.audioBottomLabel.text = bottomText;
self.audioBottomLabel.text = [NSString
stringWithFormat:@"%@", [ViewControllerUtils formatDurationSeconds:(long)round(self.audioDurationSeconds)]];
}
}
- (void)setAudioIcon:(UIImage *)image
- (void)setAudioIcon:(UIImage *)icon iconColor:(UIColor *)iconColor
{
[_audioPlayPauseButton setImage:image forState:UIControlStateNormal];
[_audioPlayPauseButton setImage:image forState:UIControlStateDisabled];
_audioPlayPauseButton.layer.opacity = 0.8f;
icon = [icon imageWithRenderingMode:UIImageRenderingModeAlwaysTemplate];
[_audioPlayPauseButton setImage:icon forState:UIControlStateNormal];
[_audioPlayPauseButton setImage:icon forState:UIControlStateDisabled];
_audioPlayPauseButton.imageView.tintColor = iconColor;
}
- (void)setAudioIconToPlay {
[self setAudioIcon:[UIImage imageNamed:(self.incoming ? @"audio_play_black_40" : @"audio_play_white_40")]];
[self setAudioIcon:[UIImage imageNamed:@"audio_play_black_40"]
iconColor:[self audioColorWithOpacity:self.incoming ? 0.2f : 0.1f]];
}
- (void)setAudioIconToPause {
[self setAudioIcon:[UIImage imageNamed:(self.incoming ? @"audio_pause_black_40" : @"audio_pause_white_40")]];
[self setAudioIcon:[UIImage imageNamed:@"audio_pause_black_40"]
iconColor:[self audioColorWithOpacity:self.incoming ? 0.2f : 0.1f]];
}
- (void)setIsAudioPlaying:(BOOL)isAudioPlaying
{
_isAudioPlaying = isAudioPlaying;
[self updateAudioProgressView];
}
- (void)updateAudioProgressView
{
[self.audioProgressView
setProgress:(self.audioDurationSeconds > 0 ? self.audioProgressSeconds / self.audioDurationSeconds : 0.f)];
self.audioProgressView.horizontalBarColor = [self audioColorWithOpacity:0.75f];
self.audioProgressView.progressColor
= (self.isAudioPlaying ? [self audioColorWithOpacity:self.incoming ? 0.2f : 0.1f]
: [self audioColorWithOpacity:0.4f]);
}
#pragma mark - JSQMessageMediaData protocol
- (CGFloat)bubbleHeight
- (CGFloat)audioBubbleHeight
{
return 35.f;
return 45.f;
}
- (CGFloat)iconSize
@ -140,111 +164,32 @@ NS_ASSUME_NONNULL_BEGIN
- (CGFloat)vMargin
{
return 10.f;
return 5.f;
}
- (UIColor *)audioTextColor
{
return (self.incoming ? [UIColor colorWithWhite:0.2 alpha:1.f] : [UIColor whiteColor]);
return (self.incoming ? [UIColor colorWithWhite:0.2f alpha:1.f] : [UIColor whiteColor]);
}
- (UIColor *)audioColorWithOpacity:(CGFloat)alpha
{
return [self.audioTextColor blendWithColor:self.bubbleBackgroundColor alpha:alpha];
}
- (UIColor *)bubbleBackgroundColor
{
return self.incoming ? [UIColor jsq_messageBubbleLightGrayColor] : [UIColor ows_materialBlueColor];
}
- (UIView *)mediaView {
CGSize size = [self mediaViewDisplaySize];
if ([self isVideo]) {
if (self.cachedMediaView == nil) {
UIImageView *imageView = [[UIImageView alloc] initWithImage:self.image];
imageView.contentMode = UIViewContentModeScaleAspectFill;
imageView.frame = CGRectMake(0.0f, 0.0f, size.width, size.height);
imageView.clipsToBounds = YES;
[JSQMessagesMediaViewBubbleImageMasker applyBubbleImageMaskToMediaView:imageView
isOutgoing:self.appliesMediaViewMaskAsOutgoing];
self.cachedMediaView = imageView;
UIImage *img = [UIImage imageNamed:@"play_button"];
UIImageView *videoPlayButton = [[UIImageView alloc] initWithImage:img];
videoPlayButton.frame = CGRectMake((size.width / 2) - 18, (size.height / 2) - 18, 37, 37);
[self.cachedMediaView addSubview:videoPlayButton];
if (!_incoming) {
self.attachmentUploadView = [[AttachmentUploadView alloc] initWithAttachment:self.attachment
superview:imageView
attachmentStateCallback:^(BOOL isAttachmentReady) {
videoPlayButton.hidden = !isAttachmentReady;
}];
}
self.cachedMediaView = [self createVideoMediaView];
}
} else if ([self isAudio]) {
if (self.cachedMediaView == nil) {
CGSize viewSize = [self mediaViewDisplaySize];
UIColor *textColor = [self audioTextColor];
_cachedMediaView = [[UIView alloc] initWithFrame:CGRectMake(0.f, 0.f, viewSize.width, viewSize.height)];
_cachedMediaView.backgroundColor
= self.incoming ? [UIColor jsq_messageBubbleLightGrayColor] : [UIColor ows_materialBlueColor];
[JSQMessagesMediaViewBubbleImageMasker applyBubbleImageMaskToMediaView:_cachedMediaView
isOutgoing:!self.incoming];
const CGFloat kBubbleTailWidth = 6.f;
CGRect contentFrame = CGRectMake(self.incoming ? kBubbleTailWidth : 0.f,
self.vMargin,
viewSize.width - kBubbleTailWidth - (self.incoming ? 10 : 15),
viewSize.height - self.vMargin * 2);
CGRect iconFrame = CGRectMake(round(contentFrame.origin.x + 10.f),
round(contentFrame.origin.y + (contentFrame.size.height - self.iconSize) * 0.5f),
self.iconSize,
self.iconSize);
_audioPlayPauseButton = [[UIButton alloc] initWithFrame:iconFrame];
_audioPlayPauseButton.enabled = NO;
[_cachedMediaView addSubview:_audioPlayPauseButton];
const CGFloat kLabelHSpacing = 3;
const CGFloat kLabelVSpacing = 2;
NSString *topText =
[self.attachment.filename stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceCharacterSet]];
if (topText.length < 1) {
topText = [MIMETypeUtil fileExtensionForMIMEType:self.attachment.contentType].uppercaseString;
}
if (topText.length < 1) {
topText = NSLocalizedString(@"GENERIC_ATTACHMENT_LABEL", @"A label for generic attachments.");
}
UILabel *topLabel = [UILabel new];
topLabel.text = topText;
topLabel.textColor = textColor;
topLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
topLabel.font = [UIFont ows_regularFontWithSize:ScaleFromIPhone5To7Plus(13.f, 15.f)];
[topLabel sizeToFit];
[_cachedMediaView addSubview:topLabel];
UILabel *audioBottomLabel = [UILabel new];
self.audioBottomLabel = audioBottomLabel;
[self updateAudioBottomLabel];
audioBottomLabel.textColor = [textColor colorWithAlphaComponent:0.85f];
audioBottomLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
audioBottomLabel.font = [UIFont ows_regularFontWithSize:ScaleFromIPhone5To7Plus(11.f, 13.f)];
[audioBottomLabel sizeToFit];
[_cachedMediaView addSubview:audioBottomLabel];
CGRect topLabelFrame = CGRectZero;
topLabelFrame.size = topLabel.bounds.size;
topLabelFrame.origin.x = round(iconFrame.origin.x + iconFrame.size.width + kLabelHSpacing);
topLabelFrame.origin.y = round(contentFrame.origin.y
+ (contentFrame.size.height
- (topLabel.frame.size.height + audioBottomLabel.frame.size.height + kLabelVSpacing))
* 0.5f);
topLabelFrame.size.width
= round((contentFrame.origin.x + contentFrame.size.width) - topLabelFrame.origin.x);
topLabel.frame = topLabelFrame;
CGRect audioBottomLabelFrame = topLabelFrame;
audioBottomLabelFrame.origin.y += topLabelFrame.size.height + kLabelVSpacing;
audioBottomLabel.frame = audioBottomLabelFrame;
if (!self.incoming) {
self.attachmentUploadView = [[AttachmentUploadView alloc] initWithAttachment:self.attachment
superview:_cachedMediaView
attachmentStateCallback:nil];
}
self.cachedMediaView = [self createAudioMediaView];
}
if (self.isAudioPlaying) {
@ -259,10 +204,150 @@ NS_ASSUME_NONNULL_BEGIN
return self.cachedMediaView;
}
- (UIView *)createVideoMediaView
{
OWSAssert([self isVideo]);
CGSize size = [self mediaViewDisplaySize];
UIImageView *imageView = [[UIImageView alloc] initWithImage:self.image];
imageView.contentMode = UIViewContentModeScaleAspectFill;
imageView.frame = CGRectMake(0.0f, 0.0f, size.width, size.height);
imageView.clipsToBounds = YES;
[JSQMessagesMediaViewBubbleImageMasker applyBubbleImageMaskToMediaView:imageView
isOutgoing:self.appliesMediaViewMaskAsOutgoing];
UIImage *img = [UIImage imageNamed:@"play_button"];
UIImageView *videoPlayButton = [[UIImageView alloc] initWithImage:img];
videoPlayButton.frame = CGRectMake((size.width / 2) - 18, (size.height / 2) - 18, 37, 37);
[imageView addSubview:videoPlayButton];
if (!_incoming) {
self.attachmentUploadView = [[AttachmentUploadView alloc] initWithAttachment:self.attachment
superview:imageView
attachmentStateCallback:^(BOOL isAttachmentReady) {
videoPlayButton.hidden = !isAttachmentReady;
}];
}
return imageView;
}
- (BOOL)isVoiceMessage
{
OWSAssert([self isAudio]);
return (self.attachment.isVoiceMessage || self.attachment.filename.length < 1);
}
- (UIView *)createAudioMediaView
{
OWSAssert([self isAudio]);
[self ensureAudioDurationSeconds];
CGSize viewSize = [self mediaViewDisplaySize];
UIColor *textColor = [self audioTextColor];
UIView *mediaView = [[UIView alloc] initWithFrame:CGRectMake(0.f, 0.f, viewSize.width, viewSize.height)];
mediaView.backgroundColor = self.bubbleBackgroundColor;
[JSQMessagesMediaViewBubbleImageMasker applyBubbleImageMaskToMediaView:mediaView isOutgoing:!self.incoming];
const CGFloat kBubbleTailWidth = 6.f;
CGRect contentFrame = CGRectMake(self.incoming ? kBubbleTailWidth : 0.f,
self.vMargin,
viewSize.width - kBubbleTailWidth - 15,
viewSize.height - self.vMargin * 2);
CGRect iconFrame = CGRectMake((CGFloat)round(contentFrame.origin.x + 5.f),
(CGFloat)round(contentFrame.origin.y + (contentFrame.size.height - self.iconSize) * 0.5f),
self.iconSize,
self.iconSize);
_audioPlayPauseButton = [[UIButton alloc] initWithFrame:iconFrame];
_audioPlayPauseButton.enabled = NO;
[mediaView addSubview:_audioPlayPauseButton];
const CGFloat kLabelHSpacing = 3;
const CGFloat kLabelVSpacing = 2;
NSString *topText = [[self.attachment.filename stringByDeletingPathExtension]
stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceCharacterSet]];
if (topText.length < 1) {
topText = [MIMETypeUtil fileExtensionForMIMEType:self.attachment.contentType].uppercaseString;
}
if (topText.length < 1) {
topText = NSLocalizedString(@"GENERIC_ATTACHMENT_LABEL", @"A label for generic attachments.");
}
if (self.isVoiceMessage) {
topText = nil;
}
UILabel *topLabel = [UILabel new];
topLabel.text = topText;
topLabel.textColor = [textColor colorWithAlphaComponent:0.85f];
topLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
topLabel.font = [UIFont ows_regularFontWithSize:ScaleFromIPhone5To7Plus(11.f, 13.f)];
[topLabel sizeToFit];
[mediaView addSubview:topLabel];
AudioProgressView *audioProgressView = [AudioProgressView new];
self.audioProgressView = audioProgressView;
audioProgressView.delegate = self;
[self updateAudioProgressView];
[mediaView addSubview:audioProgressView];
UILabel *bottomLabel = [UILabel new];
self.audioBottomLabel = bottomLabel;
[self updateAudioBottomLabel];
bottomLabel.textColor = [textColor colorWithAlphaComponent:0.85f];
bottomLabel.lineBreakMode = NSLineBreakByTruncatingMiddle;
bottomLabel.font = [UIFont ows_regularFontWithSize:ScaleFromIPhone5To7Plus(11.f, 13.f)];
[bottomLabel sizeToFit];
[mediaView addSubview:bottomLabel];
const CGFloat topLabelHeight = ceil(topLabel.font.lineHeight);
const CGFloat kAudioProgressViewHeight = 12.f;
const CGFloat bottomLabelHeight = ceil(bottomLabel.font.lineHeight);
CGRect labelsBounds = CGRectZero;
labelsBounds.origin.x = (CGFloat)round(iconFrame.origin.x + iconFrame.size.width + kLabelHSpacing);
labelsBounds.size.width = contentFrame.origin.x + contentFrame.size.width - labelsBounds.origin.x;
labelsBounds.size.height = topLabelHeight + kAudioProgressViewHeight + bottomLabelHeight + kLabelVSpacing * 2;
labelsBounds.origin.y
= (CGFloat)round(contentFrame.origin.y + (contentFrame.size.height - labelsBounds.size.height) * 0.5f);
topLabel.frame = CGRectMake(labelsBounds.origin.x, labelsBounds.origin.y, labelsBounds.size.width, topLabelHeight);
audioProgressView.frame = CGRectMake(labelsBounds.origin.x,
labelsBounds.origin.y + topLabelHeight + kLabelVSpacing,
labelsBounds.size.width,
kAudioProgressViewHeight);
bottomLabel.frame = CGRectMake(labelsBounds.origin.x,
labelsBounds.origin.y + topLabelHeight + kAudioProgressViewHeight + kLabelVSpacing * 2,
labelsBounds.size.width,
bottomLabelHeight);
if (!self.incoming) {
self.attachmentUploadView = [[AttachmentUploadView alloc] initWithAttachment:self.attachment
superview:mediaView
attachmentStateCallback:nil];
}
return mediaView;
}
- (void)ensureAudioDurationSeconds
{
if (self.audioDurationSeconds == 0.f) {
NSError *error;
AVAudioPlayer *audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:self.fileURL error:&error];
OWSAssert(!error);
if (!error) {
self.audioDurationSeconds = (CGFloat)[audioPlayer duration];
}
}
}
- (CGSize)mediaViewDisplaySize {
CGSize size = [super mediaViewDisplaySize];
if ([self isAudio]) {
size.height = ceil(self.bubbleHeight + self.vMargin * 2);
size.height = (CGFloat)ceil(self.audioBubbleHeight + self.vMargin * 2);
} else if ([self isVideo]) {
return [self ows_adjustBubbleSize:size forImage:self.image];
}
@ -333,6 +418,13 @@ NS_ASSUME_NONNULL_BEGIN
}
}
#pragma mark - AudioProgressViewDelegate
- (void)AudioProgressViewWasScrubbedWithProgress:(CGFloat)progress
{
// TODO:
}
#pragma mark - OWSMessageMediaAdapter
- (void)setCellVisible:(BOOL)isVisible

View File

@ -22,4 +22,6 @@
+ (UIColor *)backgroundColorForContact:(NSString *)contactIdentifier;
+ (UIColor *)colorWithRGBHex:(unsigned long)value;
- (UIColor *)blendWithColor:(UIColor *)otherColor alpha:(CGFloat)alpha;
@end

View File

@ -3,6 +3,7 @@
//
#import "Cryptography.h"
#import "OWSMath.h"
#import "UIColor+OWS.h"
@implementation UIColor (OWS)
@ -125,4 +126,17 @@
return [UIColor colorWithRed:red green:green blue:blue alpha:1.f];
}
- (UIColor *)blendWithColor:(UIColor *)otherColor alpha:(CGFloat)alpha
{
CGFloat r0, g0, b0, a0;
[self getRed:&r0 green:&g0 blue:&b0 alpha:&a0];
CGFloat r1, g1, b1, a1;
[otherColor getRed:&r1 green:&g1 blue:&b1 alpha:&a1];
return [UIColor colorWithRed:CGFloatLerp(r0, r1, alpha)
green:CGFloatLerp(g0, g1, alpha)
blue:CGFloatLerp(b0, b1, alpha)
alpha:CGFloatLerp(a0, a1, alpha)];
}
@end

View File

@ -2,31 +2,9 @@
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
#import "OWSMath.h"
#import "UIView+OWS.h"
// TODO: We'll eventually want to promote these into an OWSMath.h header.
static inline CGFloat Clamp(CGFloat value, CGFloat minValue, CGFloat maxValue)
{
return MAX(minValue, MIN(maxValue, value));
}
static inline CGFloat Clamp01(CGFloat value)
{
return Clamp(value, 0.f, 1.f);
}
static inline CGFloat CGFloatLerp(CGFloat left, CGFloat right, CGFloat alpha)
{
alpha = Clamp01(alpha);
return (left * (1.f - alpha)) + (right * alpha);
}
static inline CGFloat CGFloatInverseLerp(CGFloat value, CGFloat minValue, CGFloat maxValue)
{
return (value - minValue) / (maxValue - minValue);
}
static inline CGFloat ScreenShortDimension()
{
return MIN([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);

30
Signal/src/util/OWSMath.h Normal file
View File

@ -0,0 +1,30 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
NS_ASSUME_NONNULL_BEGIN
// TODO: We'll eventually want to promote these into an OWSMath.h header.
static inline CGFloat Clamp(CGFloat value, CGFloat minValue, CGFloat maxValue)
{
return MAX(minValue, MIN(maxValue, value));
}
static inline CGFloat Clamp01(CGFloat value)
{
return Clamp(value, 0.f, 1.f);
}
static inline CGFloat CGFloatLerp(CGFloat left, CGFloat right, CGFloat alpha)
{
alpha = Clamp01(alpha);
return (left * (1.f - alpha)) + (right * alpha);
}
static inline CGFloat CGFloatInverseLerp(CGFloat value, CGFloat minValue, CGFloat maxValue)
{
return (value - minValue) / (maxValue - minValue);
}
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,9 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
#import "OWSMath.h"
NS_ASSUME_NONNULL_BEGIN
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,103 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
import UIKit
@objc protocol AudioProgressViewDelegate: class {
func AudioProgressViewWasScrubbed(progress: CGFloat)
}
@objc class AudioProgressView: UIView {
public weak var delegate: AudioProgressViewDelegate?
override var bounds: CGRect {
didSet {
if oldValue != bounds {
updateSubviews()
}
}
}
override var frame: CGRect {
didSet {
if oldValue != frame {
updateSubviews()
}
}
}
var horizontalBarColor = UIColor.black {
didSet {
updateContent()
}
}
var progressColor = UIColor.blue {
didSet {
updateContent()
}
}
private let horizontalBarLayer: CAShapeLayer
private let progressLayer: CAShapeLayer
var progress: CGFloat = 0 {
didSet {
if oldValue != progress {
updateContent()
}
}
}
@available(*, unavailable, message:"use delegate: constructor instead.")
required init?(coder aDecoder: NSCoder) {
self.horizontalBarLayer = CAShapeLayer()
self.progressLayer = CAShapeLayer()
super.init(coder: aDecoder)
assertionFailure()
}
public required init() {
self.horizontalBarLayer = CAShapeLayer()
self.progressLayer = CAShapeLayer()
super.init(frame:CGRect.zero)
self.layer.addSublayer(self.horizontalBarLayer)
self.layer.addSublayer(self.progressLayer)
}
internal func updateSubviews() {
AssertIsOnMainThread()
self.horizontalBarLayer.frame = self.bounds
self.progressLayer.frame = self.bounds
updateContent()
}
internal func updateContent() {
AssertIsOnMainThread()
let horizontalBarPath = UIBezierPath()
let horizontalBarHeightFraction = CGFloat(0.25)
let horizontalBarHeight = bounds.size.height * horizontalBarHeightFraction
horizontalBarPath.append(UIBezierPath(rect: CGRect(x: 0, y:(bounds.size.height - horizontalBarHeight) * 0.5, width:bounds.size.width, height:horizontalBarHeight)))
horizontalBarLayer.path = horizontalBarPath.cgPath
horizontalBarLayer.fillColor = horizontalBarColor.cgColor
let progressHeight = bounds.self.height
let progressWidth = progressHeight * 0.15
let progressX = (bounds.self.width - progressWidth) * max(0.0, min(1.0, progress))
let progressBounds = CGRect(x:progressX, y:0, width:progressWidth, height:progressHeight)
let progressCornerRadius = progressWidth * 0.5
let progressPath = UIBezierPath()
progressPath.append(UIBezierPath(roundedRect: progressBounds, cornerRadius: progressCornerRadius))
progressLayer.path = progressPath.cgPath
progressLayer.fillColor = progressColor.cgColor
}
}