Sketch out message metadata view.

* Show message metadata view from conversation view.
* Pull out MediaMessageView class.
* Track recipient read timestamps.
* Add per-recipient status to message metadata view.
* Add share button to message metadata view.

// FREEBIE
This commit is contained in:
Matthew Chen 2017-09-19 10:36:23 -04:00
parent 3bb8f4aad5
commit 9f9ac746d1
19 changed files with 985 additions and 358 deletions

View File

@ -81,6 +81,8 @@
34C42D661F4734ED0072EC04 /* OWSContactOffersInteraction.m in Sources */ = {isa = PBXBuildFile; fileRef = 34C42D631F4734ED0072EC04 /* OWSContactOffersInteraction.m */; };
34C42D671F4734ED0072EC04 /* TSUnreadIndicatorInteraction.m in Sources */ = {isa = PBXBuildFile; fileRef = 34C42D651F4734ED0072EC04 /* TSUnreadIndicatorInteraction.m */; };
34CA1C251F706B5400E51C51 /* NSAttributedString+OWS.m in Sources */ = {isa = PBXBuildFile; fileRef = 34CA1C241F706B5400E51C51 /* NSAttributedString+OWS.m */; };
34CA1C271F7156F300E51C51 /* MessageMetadataViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34CA1C261F7156F300E51C51 /* MessageMetadataViewController.swift */; };
34CA1C291F7164F700E51C51 /* MediaMessageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34CA1C281F7164F700E51C51 /* MediaMessageView.swift */; };
34CCAF381F0C0599004084F4 /* AppUpdateNag.m in Sources */ = {isa = PBXBuildFile; fileRef = 34CCAF371F0C0599004084F4 /* AppUpdateNag.m */; };
34CCAF3B1F0C2748004084F4 /* OWSAddToContactViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 34CCAF3A1F0C2748004084F4 /* OWSAddToContactViewController.m */; };
34CE88E71F2FB9A10098030F /* ProfileViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 34CE88E61F2FB9A10098030F /* ProfileViewController.m */; };
@ -529,6 +531,8 @@
34C42D651F4734ED0072EC04 /* TSUnreadIndicatorInteraction.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TSUnreadIndicatorInteraction.m; sourceTree = "<group>"; };
34CA1C231F706B5400E51C51 /* NSAttributedString+OWS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "NSAttributedString+OWS.h"; sourceTree = "<group>"; };
34CA1C241F706B5400E51C51 /* NSAttributedString+OWS.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "NSAttributedString+OWS.m"; sourceTree = "<group>"; };
34CA1C261F7156F300E51C51 /* MessageMetadataViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MessageMetadataViewController.swift; sourceTree = "<group>"; };
34CA1C281F7164F700E51C51 /* MediaMessageView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MediaMessageView.swift; sourceTree = "<group>"; };
34CCAF361F0C0599004084F4 /* AppUpdateNag.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppUpdateNag.h; sourceTree = "<group>"; };
34CCAF371F0C0599004084F4 /* AppUpdateNag.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppUpdateNag.m; sourceTree = "<group>"; };
34CCAF391F0C2748004084F4 /* OWSAddToContactViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSAddToContactViewController.h; sourceTree = "<group>"; };
@ -1029,6 +1033,8 @@
34B3F84C1E8DF1700035BE1A /* InviteFlow.swift */,
34B3F84D1E8DF1700035BE1A /* LockInteractionController.h */,
34B3F84E1E8DF1700035BE1A /* LockInteractionController.m */,
34CA1C281F7164F700E51C51 /* MediaMessageView.swift */,
34CA1C261F7156F300E51C51 /* MessageMetadataViewController.swift */,
34D9134C1F66DB7C00722898 /* ModalActivityIndicatorViewController.swift */,
34B3F84F1E8DF1700035BE1A /* NewContactThreadViewController.h */,
34B3F8501E8DF1700035BE1A /* NewContactThreadViewController.m */,
@ -2233,6 +2239,7 @@
458DE9D91DEE7B360071BB03 /* OWSWebRTCDataProtos.pb.m in Sources */,
B62D53F71A23CCAD009AAF82 /* TSMessageAdapter.m in Sources */,
76EB063C18170B33006006FC /* NumberUtil.m in Sources */,
34CA1C291F7164F700E51C51 /* MediaMessageView.swift in Sources */,
B6A3EB4B1A423B3800B2236B /* TSPhotoAdapter.m in Sources */,
3400C7961EAF99F4008A8584 /* SelectThreadViewController.m in Sources */,
34D5CCB11EAE7E7F005515DB /* SelectRecipientViewController.m in Sources */,
@ -2297,6 +2304,7 @@
34B3F8881E8DF1700035BE1A /* OversizeTextMessageViewController.swift in Sources */,
34330AA31E79686200DF2FB9 /* OWSProgressView.m in Sources */,
34B3F8A21E8EA6040035BE1A /* ViewControllerUtils.m in Sources */,
34CA1C271F7156F300E51C51 /* MessageMetadataViewController.swift in Sources */,
34D5CCA91EAE3D30005515DB /* AvatarViewHelper.m in Sources */,
453D28BA1D332DB100D523F0 /* OWSMessagesBubblesSizeCalculator.m in Sources */,
45F170AC1E2F0351003FC1F2 /* CallAudioSession.swift in Sources */,

View File

@ -2,10 +2,10 @@
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
#import "ContactsManagerProtocol.h"
#import "OWSMessageData.h"
#import "OWSMessageEditing.h"
#import "TSInfoMessage.h"
#import "ContactsManagerProtocol.h"
NS_ASSUME_NONNULL_BEGIN
@ -23,6 +23,8 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic, readonly) CGFloat mediaViewAlpha;
@property (nonatomic, readonly) BOOL isMediaBeingSent;
+ (SEL)messageMetadataSelector;
@end
NS_ASSUME_NONNULL_END

View File

@ -260,6 +260,14 @@ NS_ASSUME_NONNULL_BEGIN
#pragma mark - OWSMessageEditing Protocol
+ (SEL)messageMetadataSelector
{
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wundeclared-selector"
return @selector(showMessageMetadata:);
#pragma clang diagnostic pop
}
- (BOOL)canPerformEditingAction:(SEL)action
{
if ([self attachmentStream] && ![self attachmentStream].isUploaded) {
@ -269,6 +277,9 @@ NS_ASSUME_NONNULL_BEGIN
// Deletes are always handled by TSMessageAdapter
if (action == @selector(delete:)) {
return YES;
} else if (action == [TSMessageAdapter messageMetadataSelector]) {
return ([self.interaction isKindOfClass:[TSIncomingMessage class]] ||
[self.interaction isKindOfClass:[TSOutgoingMessage class]]);
}
// Delegate other actions for media items
@ -301,9 +312,11 @@ NS_ASSUME_NONNULL_BEGIN
[AttachmentSharing showShareUIForAttachment:stream];
}
return;
} else if (action == [TSMessageAdapter messageMetadataSelector]) {
OWSFail(@"Conversation view should handle message metadata events.");
return;
}
// Delegate other actions for media items
if (self.isMediaMessage) {
[self.mediaItem performEditingAction:action];

View File

@ -92,8 +92,10 @@
#import <SignalServiceKit/TSContactThread.h>
#import <SignalServiceKit/TSErrorMessage.h>
#import <SignalServiceKit/TSGroupThread.h>
#import <SignalServiceKit/TSIncomingMessage.h>
#import <SignalServiceKit/TSInfoMessage.h>
#import <SignalServiceKit/TSNetworkManager.h>
#import <SignalServiceKit/TSOutgoingMessage.h>
#import <SignalServiceKit/TSPreKeyManager.h>
#import <SignalServiceKit/TSSocketManager.h>
#import <SignalServiceKit/TSStorageManager+Calling.h>

View File

@ -5,6 +5,8 @@
#import <PureLayout/PureLayout.h>
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
// A convenience method for doing responsive layout. Scales between two
// reference values (for iPhone 5 and iPhone 7 Plus) to the current device
// based on screen width, linearly interpolating.
@ -100,3 +102,5 @@ CGFloat ScaleFromIPhone5(CGFloat iPhone5Value);
- (void)addRedBorderRecursively;
@end
NS_ASSUME_NONNULL_END

View File

@ -5,6 +5,8 @@
#import "OWSMath.h"
#import "UIView+OWS.h"
NS_ASSUME_NONNULL_BEGIN
static inline CGFloat ScreenShortDimension()
{
return MIN([UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);
@ -372,3 +374,5 @@ CGFloat ScaleFromIPhone5(CGFloat iPhone5Value)
}
@end
NS_ASSUME_NONNULL_END

View File

@ -5,7 +5,7 @@
import Foundation
import MediaPlayer
class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPlayerDelegate {
class AttachmentApprovalViewController: OWSViewController {
let TAG = "[AttachmentApprovalViewController]"
@ -15,21 +15,14 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
var successCompletion : (() -> Void)?
var videoPlayer: MPMoviePlayerController?
var audioPlayer: OWSAudioAttachmentPlayer?
var audioStatusLabel: UILabel?
var audioPlayButton: UIButton?
var isAudioPlayingFlag = false
var isAudioPaused = false
var audioProgressSeconds: CGFloat = 0
var audioDurationSeconds: CGFloat = 0
let mediaMessageView: MediaMessageView
// MARK: Initializers
@available(*, unavailable, message:"use attachment: constructor instead.")
required init?(coder aDecoder: NSCoder) {
self.attachment = SignalAttachment.empty()
mediaMessageView = MediaMessageView(attachment:attachment)
super.init(coder: aDecoder)
owsFail("\(self.TAG) invalid constructor")
}
@ -38,8 +31,9 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
assert(!attachment.hasError)
self.attachment = attachment
self.successCompletion = successCompletion
mediaMessageView = MediaMessageView(attachment:attachment)
super.init(nibName: nil, bundle: nil)
}
}
// MARK: View Lifecycle
@ -51,13 +45,13 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
self.navigationItem.leftBarButtonItem = UIBarButtonItem(barButtonSystemItem:.stop,
target:self,
action:#selector(donePressed))
self.navigationItem.title = dialogTitle()
createViews()
self.navigationItem.title = dialogTitle()
}
private func dialogTitle() -> String {
guard let filename = formattedFileName() else {
guard let filename = mediaMessageView.formattedFileName() else {
return NSLocalizedString("ATTACHMENT_APPROVAL_DIALOG_TITLE",
comment: "Title for the 'attachment approval' dialog.")
}
@ -67,13 +61,13 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
ViewControllerUtils.setAudioIgnoresHardwareMuteSwitch(true)
mediaMessageView.viewWillAppear(animated)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
ViewControllerUtils.setAudioIgnoresHardwareMuteSwitch(false)
mediaMessageView.viewWillDisappear(animated)
}
// MARK: - Create Views
@ -82,24 +76,11 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
let previewTopMargin: CGFloat = 30
let previewHMargin: CGFloat = 20
let attachmentPreviewView = UIView()
self.view.addSubview(attachmentPreviewView)
attachmentPreviewView.autoPinWidthToSuperview(withMargin:previewHMargin)
attachmentPreviewView.autoPin(toTopLayoutGuideOf: self, withInset:previewTopMargin)
self.view.addSubview(mediaMessageView)
mediaMessageView.autoPinWidthToSuperview(withMargin:previewHMargin)
mediaMessageView.autoPin(toTopLayoutGuideOf: self, withInset:previewTopMargin)
createButtonRow(attachmentPreviewView:attachmentPreviewView)
if attachment.isAnimatedImage {
createAnimatedPreview(attachmentPreviewView:attachmentPreviewView)
} else if attachment.isImage {
createImagePreview(attachmentPreviewView:attachmentPreviewView)
} else if attachment.isVideo {
createVideoPreview(attachmentPreviewView:attachmentPreviewView)
} else if attachment.isAudio {
createAudioPreview(attachmentPreviewView:attachmentPreviewView)
} else {
createGenericPreview(attachmentPreviewView:attachmentPreviewView)
}
createButtonRow(mediaMessageView:mediaMessageView)
}
private func wrapViewsInVerticalStack(subviews: [UIView]) -> UIView {
@ -127,212 +108,7 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
return stackView
}
private func createAudioPreview(attachmentPreviewView: UIView) {
guard let dataUrl = attachment.dataUrl else {
createGenericPreview(attachmentPreviewView:attachmentPreviewView)
return
}
audioPlayer = OWSAudioAttachmentPlayer(mediaUrl: dataUrl, delegate: self)
var subviews = [UIView]()
let audioPlayButton = UIButton()
self.audioPlayButton = audioPlayButton
setAudioIconToPlay()
audioPlayButton.imageView?.layer.minificationFilter = kCAFilterTrilinear
audioPlayButton.imageView?.layer.magnificationFilter = kCAFilterTrilinear
audioPlayButton.addTarget(self, action:#selector(audioPlayButtonPressed), for:.touchUpInside)
let buttonSize = createHeroViewSize()
audioPlayButton.autoSetDimension(.width, toSize:buttonSize)
audioPlayButton.autoSetDimension(.height, toSize:buttonSize)
subviews.append(audioPlayButton)
let fileNameLabel = createFileNameLabel()
if let fileNameLabel = fileNameLabel {
subviews.append(fileNameLabel)
}
let fileSizeLabel = createFileSizeLabel()
subviews.append(fileSizeLabel)
let audioStatusLabel = createAudioStatusLabel()
self.audioStatusLabel = audioStatusLabel
updateAudioStatusLabel()
subviews.append(audioStatusLabel)
let stackView = wrapViewsInVerticalStack(subviews:subviews)
attachmentPreviewView.addSubview(stackView)
fileNameLabel?.autoPinWidthToSuperview(withMargin: 32)
stackView.autoPinWidthToSuperview()
stackView.autoVCenterInSuperview()
}
private func createAnimatedPreview(attachmentPreviewView: UIView) {
guard attachment.isValidImage else {
return
}
let data = attachment.data
// Use Flipboard FLAnimatedImage library to display gifs
guard let animatedImage = FLAnimatedImage(gifData:data) else {
createGenericPreview(attachmentPreviewView:attachmentPreviewView)
return
}
let animatedImageView = FLAnimatedImageView()
animatedImageView.animatedImage = animatedImage
animatedImageView.contentMode = .scaleAspectFit
attachmentPreviewView.addSubview(animatedImageView)
animatedImageView.autoPinWidthToSuperview()
animatedImageView.autoPinHeightToSuperview()
}
private func createImagePreview(attachmentPreviewView: UIView) {
var image = attachment.image
if image == nil {
image = UIImage(data:attachment.data)
}
guard image != nil else {
createGenericPreview(attachmentPreviewView:attachmentPreviewView)
return
}
let imageView = UIImageView(image:image)
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.contentMode = .scaleAspectFit
attachmentPreviewView.addSubview(imageView)
imageView.autoPinWidthToSuperview()
imageView.autoPinHeightToSuperview()
}
private func createVideoPreview(attachmentPreviewView: UIView) {
guard let dataUrl = attachment.dataUrl else {
createGenericPreview(attachmentPreviewView:attachmentPreviewView)
return
}
guard let videoPlayer = MPMoviePlayerController(contentURL:dataUrl) else {
createGenericPreview(attachmentPreviewView:attachmentPreviewView)
return
}
videoPlayer.prepareToPlay()
videoPlayer.controlStyle = .default
videoPlayer.shouldAutoplay = false
attachmentPreviewView.addSubview(videoPlayer.view)
self.videoPlayer = videoPlayer
videoPlayer.view.autoPinWidthToSuperview()
videoPlayer.view.autoPinHeightToSuperview()
}
private func createGenericPreview(attachmentPreviewView: UIView) {
var subviews = [UIView]()
let imageView = createHeroImageView(imageName: "file-thin-black-filled-large")
subviews.append(imageView)
let fileNameLabel = createFileNameLabel()
if let fileNameLabel = fileNameLabel {
subviews.append(fileNameLabel)
}
let fileSizeLabel = createFileSizeLabel()
subviews.append(fileSizeLabel)
let stackView = wrapViewsInVerticalStack(subviews:subviews)
attachmentPreviewView.addSubview(stackView)
fileNameLabel?.autoPinWidthToSuperview(withMargin: 32)
stackView.autoPinWidthToSuperview()
stackView.autoVCenterInSuperview()
}
private func createHeroViewSize() -> CGFloat {
return ScaleFromIPhone5To7Plus(175, 225)
}
private func createHeroImageView(imageName: String) -> UIView {
let imageSize = createHeroViewSize()
let image = UIImage(named:imageName)
assert(image != nil)
let imageView = UIImageView(image:image)
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.layer.shadowColor = UIColor.black.cgColor
let shadowScaling = 5.0
imageView.layer.shadowRadius = CGFloat(2.0 * shadowScaling)
imageView.layer.shadowOpacity = 0.25
imageView.layer.shadowOffset = CGSize(width: 0.75 * shadowScaling, height: 0.75 * shadowScaling)
imageView.autoSetDimension(.width, toSize:imageSize)
imageView.autoSetDimension(.height, toSize:imageSize)
return imageView
}
private func labelFont() -> UIFont {
return UIFont.ows_regularFont(withSize:ScaleFromIPhone5To7Plus(18, 24))
}
private func formattedFileExtension() -> String? {
guard let fileExtension = attachment.fileExtension else {
return nil
}
return String(format:NSLocalizedString("ATTACHMENT_APPROVAL_FILE_EXTENSION_FORMAT",
comment: "Format string for file extension label in call interstitial view"),
fileExtension.uppercased())
}
private func formattedFileName() -> String? {
guard let sourceFilename = attachment.sourceFilename else {
return nil
}
let filename = sourceFilename.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
guard filename.characters.count > 0 else {
return nil
}
return filename
}
private func createFileNameLabel() -> UIView? {
let filename = formattedFileName() ?? formattedFileExtension()
guard filename != nil else {
return nil
}
let label = UILabel()
label.text = filename
label.textColor = UIColor.ows_materialBlue()
label.font = labelFont()
label.textAlignment = .center
label.lineBreakMode = .byTruncatingMiddle
return label
}
private func createFileSizeLabel() -> UIView {
let label = UILabel()
let fileSize = attachment.dataLength
label.text = String(format:NSLocalizedString("ATTACHMENT_APPROVAL_FILE_SIZE_FORMAT",
comment: "Format string for file size label in call interstitial view. Embeds: {{file size as 'N mb' or 'N kb'}}."),
ViewControllerUtils.formatFileSize(UInt(fileSize)))
label.textColor = UIColor.ows_materialBlue()
label.font = labelFont()
label.textAlignment = .center
return label
}
private func createAudioStatusLabel() -> UILabel {
let label = UILabel()
label.textColor = UIColor.ows_materialBlue()
label.font = labelFont()
label.textAlignment = .center
return label
}
private func createButtonRow(attachmentPreviewView: UIView) {
private func createButtonRow(mediaMessageView: UIView) {
let buttonTopMargin = ScaleFromIPhone5To7Plus(30, 40)
let buttonBottomMargin = ScaleFromIPhone5To7Plus(25, 40)
let buttonHSpacing = ScaleFromIPhone5To7Plus(20, 30)
@ -341,7 +117,7 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
self.view.addSubview(buttonRow)
buttonRow.autoPinWidthToSuperview()
buttonRow.autoPinEdge(toSuperviewEdge:.bottom, withInset:buttonBottomMargin)
buttonRow.autoPinEdge(.top, to:.bottom, of:attachmentPreviewView, withOffset:buttonTopMargin)
buttonRow.autoPinEdge(.top, to:.bottom, of:mediaMessageView, withOffset:buttonTopMargin)
// We use this invisible subview to ensure that the buttons are centered
// horizontally.
@ -399,64 +175,4 @@ class AttachmentApprovalViewController: OWSViewController, OWSAudioAttachmentPla
successCompletion?()
})
}
func audioPlayButtonPressed(sender: UIButton) {
audioPlayer?.togglePlayState()
}
// MARK: - OWSAudioAttachmentPlayerDelegate
public func isAudioPlaying() -> Bool {
return isAudioPlayingFlag
}
public func setIsAudioPlaying(_ isAudioPlaying: Bool) {
isAudioPlayingFlag = isAudioPlaying
updateAudioStatusLabel()
}
public func isPaused() -> Bool {
return isAudioPaused
}
public func setIsPaused(_ isPaused: Bool) {
isAudioPaused = isPaused
}
public func setAudioProgress(_ progress: CGFloat, duration: CGFloat) {
audioProgressSeconds = progress
audioDurationSeconds = duration
updateAudioStatusLabel()
}
private func updateAudioStatusLabel() {
guard let audioStatusLabel = self.audioStatusLabel else {
owsFail("Missing audio status label")
return
}
if isAudioPlayingFlag && audioProgressSeconds > 0 && audioDurationSeconds > 0 {
audioStatusLabel.text = String(format:"%@ / %@",
ViewControllerUtils.formatDurationSeconds(Int(round(self.audioProgressSeconds))),
ViewControllerUtils.formatDurationSeconds(Int(round(self.audioDurationSeconds))))
} else {
audioStatusLabel.text = " "
}
}
public func setAudioIconToPlay() {
let image = UIImage(named:"audio_play_black_large")?.withRenderingMode(.alwaysTemplate)
assert(image != nil)
audioPlayButton?.setImage(image, for:.normal)
audioPlayButton?.imageView?.tintColor = UIColor.ows_materialBlue()
}
public func setAudioIconToPause() {
let image = UIImage(named:"audio_pause_black_large")?.withRenderingMode(.alwaysTemplate)
assert(image != nil)
audioPlayButton?.setImage(image, for:.normal)
audioPlayButton?.imageView?.tintColor = UIColor.ows_materialBlue()
}
}

View File

@ -637,6 +637,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
[JSQMessagesCollectionViewCell registerMenuAction:saveSelector];
SEL shareSelector = NSSelectorFromString(@"share:");
[JSQMessagesCollectionViewCell registerMenuAction:shareSelector];
[JSQMessagesCollectionViewCell registerMenuAction:[TSMessageAdapter messageMetadataSelector]];
[self initializeCollectionViewLayout];
[self registerCustomMessageNibs];
@ -757,9 +758,11 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
[[UIMenuItem alloc] initWithTitle:NSLocalizedString(@"EDIT_ITEM_SHARE_ACTION",
@"Short name for edit menu item to share contents of media message.")
action:shareSelector],
[[UIMenuItem alloc] initWithTitle:NSLocalizedString(@"EDIT_ITEM_MESSAGE_METADATA_ACTION",
@"Short name for edit menu item to show message metadata.")
action:[TSMessageAdapter messageMetadataSelector]],
];
[((OWSMessagesToolbarContentView *)self.inputToolbar.contentView)ensureSubviews];
[self.view layoutSubviews];
@ -1756,6 +1759,8 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
forItemAtIndexPath:(NSIndexPath *)indexPath
withSender:(id)sender
{
OWSAssert(indexPath);
id<OWSMessageData> messageData = [self messageAtIndexPath:indexPath];
return [messageData canPerformEditingAction:action];
}
@ -1766,7 +1771,19 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
withSender:(id)sender
{
id<OWSMessageData> messageData = [self messageAtIndexPath:indexPath];
[messageData performEditingAction:action];
if (action == [TSMessageAdapter messageMetadataSelector]) {
TSInteraction *interaction = messageData.interaction;
if ([interaction isKindOfClass:[TSIncomingMessage class]] ||
[interaction isKindOfClass:[TSOutgoingMessage class]]) {
TSMessage *message = (TSMessage *)interaction;
MessageMetadataViewController *view = [[MessageMetadataViewController alloc] initWithMessage:message];
[self.navigationController pushViewController:view animated:YES];
} else {
OWSFail(@"%@ Can't show message metadata for message of type: %@", self.tag, [interaction class]);
}
} else {
[messageData performEditingAction:action];
}
}
- (void)collectionView:(UICollectionView *)collectionView
@ -2135,7 +2152,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
: NSLocalizedString(@"MESSAGE_STATUS_SENT", @"message footer for sent messages"));
NSAttributedString *result = [[NSAttributedString alloc] initWithString:text];
if ([OWSReadReceiptManager.sharedManager areReadReceiptsEnabled] && outgoingMessage.wasDelivered
&& outgoingMessage.readRecipientIds.count > 0) {
&& outgoingMessage.recipientReadMap.count > 0) {
NSAttributedString *checkmark = [[NSAttributedString alloc]
initWithString:@"\uf00c "
attributes:@{
@ -2177,7 +2194,7 @@ typedef NS_ENUM(NSInteger, MessagesRangeSizeMode) {
}
} else if (message.messageType == TSIncomingMessageAdapter && [self.thread isKindOfClass:[TSGroupThread class]]) {
TSIncomingMessage *incomingMessage = (TSIncomingMessage *)message.interaction;
return [self.contactsManager attributedStringForMessageFooterWithPhoneIdentifier:incomingMessage.authorId];
return [self.contactsManager attributedContactOrProfileNameForPhoneIdentifier:incomingMessage.authorId];
}
return nil;

View File

@ -0,0 +1,364 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
import Foundation
import MediaPlayer
class MediaMessageView: UIView, OWSAudioAttachmentPlayerDelegate {
let TAG = "[MediaMessageView]"
// MARK: Properties
let attachment: SignalAttachment
var videoPlayer: MPMoviePlayerController?
var audioPlayer: OWSAudioAttachmentPlayer?
var audioStatusLabel: UILabel?
var audioPlayButton: UIButton?
var isAudioPlayingFlag = false
var isAudioPaused = false
var audioProgressSeconds: CGFloat = 0
var audioDurationSeconds: CGFloat = 0
// MARK: Initializers
@available(*, unavailable, message:"use attachment: constructor instead.")
required init?(coder aDecoder: NSCoder) {
self.attachment = SignalAttachment.empty()
super.init(coder: aDecoder)
owsFail("\(self.TAG) invalid constructor")
createViews()
}
required init(attachment: SignalAttachment) {
assert(!attachment.hasError)
self.attachment = attachment
super.init(frame: CGRect.zero)
createViews()
}
// MARK: View Lifecycle
func viewWillAppear(_ animated: Bool) {
ViewControllerUtils.setAudioIgnoresHardwareMuteSwitch(true)
}
func viewWillDisappear(_ animated: Bool) {
ViewControllerUtils.setAudioIgnoresHardwareMuteSwitch(false)
}
// MARK: - Create Views
private func createViews() {
self.backgroundColor = UIColor.white
if attachment.isAnimatedImage {
createAnimatedPreview()
} else if attachment.isImage {
createImagePreview()
} else if attachment.isVideo {
createVideoPreview()
} else if attachment.isAudio {
createAudioPreview()
} else {
createGenericPreview()
}
}
private func wrapViewsInVerticalStack(subviews: [UIView]) -> UIView {
assert(subviews.count > 0)
let stackView = UIView()
var lastView: UIView?
for subview in subviews {
stackView.addSubview(subview)
subview.autoHCenterInSuperview()
if lastView == nil {
subview.autoPinEdge(toSuperviewEdge:.top)
} else {
subview.autoPinEdge(.top, to:.bottom, of:lastView!, withOffset:10)
}
lastView = subview
}
lastView?.autoPinEdge(toSuperviewEdge:.bottom)
return stackView
}
private func createAudioPreview() {
guard let dataUrl = attachment.dataUrl else {
createGenericPreview()
return
}
audioPlayer = OWSAudioAttachmentPlayer(mediaUrl: dataUrl, delegate: self)
var subviews = [UIView]()
let audioPlayButton = UIButton()
self.audioPlayButton = audioPlayButton
setAudioIconToPlay()
audioPlayButton.imageView?.layer.minificationFilter = kCAFilterTrilinear
audioPlayButton.imageView?.layer.magnificationFilter = kCAFilterTrilinear
audioPlayButton.addTarget(self, action:#selector(audioPlayButtonPressed), for:.touchUpInside)
let buttonSize = createHeroViewSize()
audioPlayButton.autoSetDimension(.width, toSize:buttonSize)
audioPlayButton.autoSetDimension(.height, toSize:buttonSize)
subviews.append(audioPlayButton)
let fileNameLabel = createFileNameLabel()
if let fileNameLabel = fileNameLabel {
subviews.append(fileNameLabel)
}
let fileSizeLabel = createFileSizeLabel()
subviews.append(fileSizeLabel)
let audioStatusLabel = createAudioStatusLabel()
self.audioStatusLabel = audioStatusLabel
updateAudioStatusLabel()
subviews.append(audioStatusLabel)
let stackView = wrapViewsInVerticalStack(subviews:subviews)
self.addSubview(stackView)
fileNameLabel?.autoPinWidthToSuperview(withMargin: 32)
stackView.autoPinWidthToSuperview()
stackView.autoVCenterInSuperview()
}
private func createAnimatedPreview() {
guard attachment.isValidImage else {
return
}
let data = attachment.data
// Use Flipboard FLAnimatedImage library to display gifs
guard let animatedImage = FLAnimatedImage(gifData:data) else {
createGenericPreview()
return
}
let animatedImageView = FLAnimatedImageView()
animatedImageView.animatedImage = animatedImage
animatedImageView.contentMode = .scaleAspectFit
self.addSubview(animatedImageView)
animatedImageView.autoPinWidthToSuperview()
animatedImageView.autoPinHeightToSuperview()
}
private func createImagePreview() {
var image = attachment.image
if image == nil {
image = UIImage(data:attachment.data)
}
guard image != nil else {
createGenericPreview()
return
}
let imageView = UIImageView(image:image)
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.contentMode = .scaleAspectFit
self.addSubview(imageView)
imageView.autoPinWidthToSuperview()
imageView.autoPinHeightToSuperview()
}
private func createVideoPreview() {
guard let dataUrl = attachment.dataUrl else {
createGenericPreview()
return
}
guard let videoPlayer = MPMoviePlayerController(contentURL:dataUrl) else {
createGenericPreview()
return
}
videoPlayer.prepareToPlay()
videoPlayer.controlStyle = .default
videoPlayer.shouldAutoplay = false
self.addSubview(videoPlayer.view)
self.videoPlayer = videoPlayer
videoPlayer.view.autoPinWidthToSuperview()
videoPlayer.view.autoPinHeightToSuperview()
}
private func createGenericPreview() {
var subviews = [UIView]()
let imageView = createHeroImageView(imageName: "file-thin-black-filled-large")
subviews.append(imageView)
let fileNameLabel = createFileNameLabel()
if let fileNameLabel = fileNameLabel {
subviews.append(fileNameLabel)
}
let fileSizeLabel = createFileSizeLabel()
subviews.append(fileSizeLabel)
let stackView = wrapViewsInVerticalStack(subviews:subviews)
self.addSubview(stackView)
fileNameLabel?.autoPinWidthToSuperview(withMargin: 32)
stackView.autoPinWidthToSuperview()
stackView.autoVCenterInSuperview()
}
private func createHeroViewSize() -> CGFloat {
return ScaleFromIPhone5To7Plus(175, 225)
}
private func createHeroImageView(imageName: String) -> UIView {
let imageSize = createHeroViewSize()
let image = UIImage(named:imageName)
assert(image != nil)
let imageView = UIImageView(image:image)
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.layer.shadowColor = UIColor.black.cgColor
let shadowScaling = 5.0
imageView.layer.shadowRadius = CGFloat(2.0 * shadowScaling)
imageView.layer.shadowOpacity = 0.25
imageView.layer.shadowOffset = CGSize(width: 0.75 * shadowScaling, height: 0.75 * shadowScaling)
imageView.autoSetDimension(.width, toSize:imageSize)
imageView.autoSetDimension(.height, toSize:imageSize)
return imageView
}
private func labelFont() -> UIFont {
return UIFont.ows_regularFont(withSize:ScaleFromIPhone5To7Plus(18, 24))
}
private func formattedFileExtension() -> String? {
guard let fileExtension = attachment.fileExtension else {
return nil
}
return String(format:NSLocalizedString("ATTACHMENT_APPROVAL_FILE_EXTENSION_FORMAT",
comment: "Format string for file extension label in call interstitial view"),
fileExtension.uppercased())
}
public func formattedFileName() -> String? {
guard let sourceFilename = attachment.sourceFilename else {
return nil
}
let filename = sourceFilename.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines)
guard filename.characters.count > 0 else {
return nil
}
return filename
}
private func createFileNameLabel() -> UIView? {
let filename = formattedFileName() ?? formattedFileExtension()
guard filename != nil else {
return nil
}
let label = UILabel()
label.text = filename
label.textColor = UIColor.ows_materialBlue()
label.font = labelFont()
label.textAlignment = .center
label.lineBreakMode = .byTruncatingMiddle
return label
}
private func createFileSizeLabel() -> UIView {
let label = UILabel()
let fileSize = attachment.dataLength
label.text = String(format:NSLocalizedString("ATTACHMENT_APPROVAL_FILE_SIZE_FORMAT",
comment: "Format string for file size label in call interstitial view. Embeds: {{file size as 'N mb' or 'N kb'}}."),
ViewControllerUtils.formatFileSize(UInt(fileSize)))
label.textColor = UIColor.ows_materialBlue()
label.font = labelFont()
label.textAlignment = .center
return label
}
private func createAudioStatusLabel() -> UILabel {
let label = UILabel()
label.textColor = UIColor.ows_materialBlue()
label.font = labelFont()
label.textAlignment = .center
return label
}
// MARK: - Event Handlers
func audioPlayButtonPressed(sender: UIButton) {
audioPlayer?.togglePlayState()
}
// MARK: - OWSAudioAttachmentPlayerDelegate
public func isAudioPlaying() -> Bool {
return isAudioPlayingFlag
}
public func setIsAudioPlaying(_ isAudioPlaying: Bool) {
isAudioPlayingFlag = isAudioPlaying
updateAudioStatusLabel()
}
public func isPaused() -> Bool {
return isAudioPaused
}
public func setIsPaused(_ isPaused: Bool) {
isAudioPaused = isPaused
}
public func setAudioProgress(_ progress: CGFloat, duration: CGFloat) {
audioProgressSeconds = progress
audioDurationSeconds = duration
updateAudioStatusLabel()
}
private func updateAudioStatusLabel() {
guard let audioStatusLabel = self.audioStatusLabel else {
owsFail("Missing audio status label")
return
}
if isAudioPlayingFlag && audioProgressSeconds > 0 && audioDurationSeconds > 0 {
audioStatusLabel.text = String(format:"%@ / %@",
ViewControllerUtils.formatDurationSeconds(Int(round(self.audioProgressSeconds))),
ViewControllerUtils.formatDurationSeconds(Int(round(self.audioDurationSeconds))))
} else {
audioStatusLabel.text = " "
}
}
public func setAudioIconToPlay() {
let image = UIImage(named:"audio_play_black_large")?.withRenderingMode(.alwaysTemplate)
assert(image != nil)
audioPlayButton?.setImage(image, for:.normal)
audioPlayButton?.imageView?.tintColor = UIColor.ows_materialBlue()
}
public func setAudioIconToPause() {
let image = UIImage(named:"audio_pause_black_large")?.withRenderingMode(.alwaysTemplate)
assert(image != nil)
audioPlayButton?.setImage(image, for:.normal)
audioPlayButton?.imageView?.tintColor = UIColor.ows_materialBlue()
}
}

View File

@ -0,0 +1,393 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
import Foundation
class MessageMetadataViewController: OWSViewController {
let TAG = "[MessageMetadataViewController]"
// MARK: Properties
let message: TSMessage
var mediaMessageView: MediaMessageView?
var scrollView: UIScrollView?
var contentView: UIView?
var dataSource: DataSource?
var attachmentStream: TSAttachmentStream?
var messageBody: String?
// MARK: Initializers
@available(*, unavailable, message:"use message: constructor instead.")
required init?(coder aDecoder: NSCoder) {
self.message = TSMessage()
super.init(coder: aDecoder)
owsFail("\(self.TAG) invalid constructor")
}
required init(message: TSMessage) {
self.message = message
super.init(nibName: nil, bundle: nil)
}
// MARK: View Lifecycle
override func viewDidLoad() {
super.viewDidLoad()
self.navigationItem.title = NSLocalizedString("MESSAGE_METADATA_VIEW_TITLE",
comment: "Title for the 'message metadata' view.")
createViews()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
mediaMessageView?.viewWillAppear(animated)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
mediaMessageView?.viewWillDisappear(animated)
}
// MARK: - Create Views
private func createViews() {
view.backgroundColor = UIColor.white
let scrollView = UIScrollView()
self.scrollView = scrollView
view.addSubview(scrollView)
scrollView.autoPinWidthToSuperview(withMargin:0)
scrollView.autoPin(toTopLayoutGuideOf: self, withInset:0)
let footer = UIToolbar()
footer.barTintColor = UIColor.ows_materialBlue()
view.addSubview(footer)
footer.autoPinWidthToSuperview(withMargin:0)
footer.autoPinEdge(.top, to:.bottom, of:scrollView)
footer.autoPin(toBottomLayoutGuideOf: self, withInset:0)
footer.items = [
UIBarButtonItem(barButtonSystemItem: .flexibleSpace, target: nil, action: nil),
UIBarButtonItem(barButtonSystemItem: .action, target: self, action: #selector(shareButtonPressed)),
UIBarButtonItem(barButtonSystemItem: .flexibleSpace, target: nil, action: nil)
]
// See notes on how to use UIScrollView with iOS Auto Layout:
//
// https://developer.apple.com/library/content/releasenotes/General/RN-iOSSDK-6_0/
let contentView = UIView.container()
self.contentView = contentView
scrollView.addSubview(contentView)
contentView.autoPinLeadingToSuperView()
contentView.autoPinTrailingToSuperView()
contentView.autoPinEdge(toSuperviewEdge:.top)
contentView.autoPinEdge(toSuperviewEdge:.bottom)
var rows = [UIView]()
let contactsManager = Environment.getCurrent().contactsManager!
// Group?
let thread = message.thread
if let groupThread = thread as? TSGroupThread {
var groupName = groupThread.name()
if groupName.characters.count < 1 {
groupName = NSLocalizedString("NEW_GROUP_DEFAULT_TITLE", comment: "")
}
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_GROUP_NAME",
comment: "Label for the 'group name' field of the 'message metadata' view."),
value:groupName))
}
// Sender?
if let incomingMessage = message as? TSIncomingMessage {
let senderId = incomingMessage.authorId
let senderName = contactsManager.contactOrProfileName(forPhoneIdentifier:senderId)
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_SENDER",
comment: "Label for the 'sender' field of the 'message metadata' view."),
value:senderName))
}
// Recipient(s)
if let outgoingMessage = message as? TSOutgoingMessage {
for recipientId in thread.recipientIdentifiers {
let recipientName = contactsManager.contactOrProfileName(forPhoneIdentifier:recipientId)
let recipientStatus = self.recipientStatus(forOutgoingMessage: outgoingMessage, recipientId: recipientId)
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_RECIPIENT",
comment: "Label for the 'recipient' field of the 'message metadata' view."),
value:recipientName,
subtitle:recipientStatus))
}
}
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .short
dateFormatter.timeStyle = .long
let sentDate = NSDate.ows_date(withMillisecondsSince1970:message.timestamp)
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_SENT_DATE_TIME",
comment: "Label for the 'sent date & time' field of the 'message metadata' view."),
value:dateFormatter.string(from:sentDate)))
if let _ = message as? TSIncomingMessage {
let receivedDate = message.dateForSorting()
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_RECEIVED_DATE_TIME",
comment: "Label for the 'received date & time' field of the 'message metadata' view."),
value:dateFormatter.string(from:receivedDate)))
}
// TODO: We could include the "disappearing messages" state here.
if message.attachmentIds.count > 0 {
rows += addAttachmentRows()
} else if let messageBody = message.body {
// TODO: We should also display "oversize text messages" in a
// similar way.
if messageBody.characters.count > 0 {
self.messageBody = messageBody
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_BODY_LABEL",
comment: "Label for the message body in the 'message metadata' view."),
value:""))
let bodyLabel = UILabel()
bodyLabel.textColor = UIColor.black
bodyLabel.font = UIFont.ows_regularFont(withSize:14)
bodyLabel.text = messageBody
bodyLabel.numberOfLines = 0
bodyLabel.lineBreakMode = .byWordWrapping
rows.append(bodyLabel)
} else {
// Neither attachment nor body.
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_NO_ATTACHMENT_OR_BODY",
comment: "Label for messages without a body or attachment in the 'message metadata' view."),
value:""))
}
}
var lastRow: UIView?
for row in rows {
contentView.addSubview(row)
row.autoPinLeadingToSuperView()
row.autoPinTrailingToSuperView()
if let lastRow = lastRow {
row.autoPinEdge(.top, to:.bottom, of:lastRow, withOffset:5)
} else {
row.autoPinEdge(toSuperviewEdge:.top, withInset:20)
}
lastRow = row
}
if let lastRow = lastRow {
lastRow.autoPinEdge(toSuperviewEdge:.bottom, withInset:20)
}
if let mediaMessageView = mediaMessageView {
mediaMessageView.autoPinToSquareAspectRatio()
}
// TODO: We might want to add a footer with share/save/copy/etc.
}
private func addAttachmentRows() -> [UIView] {
var rows = [UIView]()
guard let attachmentId = message.attachmentIds[0] as? String else {
owsFail("Invalid attachment")
return rows
}
guard let attachment = TSAttachment.fetch(uniqueId:attachmentId) else {
owsFail("Missing attachment")
return rows
}
let contentType = attachment.contentType
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_ATTACHMENT_MIME_TYPE",
comment: "Label for the MIME type of attachments in the 'message metadata' view."),
value:contentType))
if let sourceFilename = attachment.sourceFilename {
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_SOURCE_FILENAME",
comment: "Label for the original filename of any attachment in the 'message metadata' view."),
value:sourceFilename))
}
guard let attachmentStream = attachment as? TSAttachmentStream else {
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_ATTACHMENT_NOT_YET_DOWNLOADED",
comment: "Label for 'not yet downloaded' attachments in the 'message metadata' view."),
value:""))
return rows
}
self.attachmentStream = attachmentStream
if let filePath = attachmentStream.filePath() {
dataSource = DataSourcePath.dataSource(withFilePath:filePath)
}
guard let dataSource = dataSource else {
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_ATTACHMENT_MISSING_FILE",
comment: "Label for 'missing' attachments in the 'message metadata' view."),
value:""))
return rows
}
let fileSize = dataSource.dataLength()
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_ATTACHMENT_FILE_SIZE",
comment: "Label for file size of attachments in the 'message metadata' view."),
value:ViewControllerUtils.formatFileSize(UInt(fileSize))))
if let dataUTI = MIMETypeUtil.utiType(forMIMEType:contentType) {
if attachment.isVoiceMessage() {
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_VOICE_MESSAGE",
comment: "Label for voice messages of the 'message metadata' view."),
value:""))
} else {
rows.append(valueRow(name: NSLocalizedString("MESSAGE_METADATA_VIEW_MEDIA",
comment: "Label for media messages of the 'message metadata' view."),
value:""))
}
let attachment = SignalAttachment(dataSource : dataSource, dataUTI: dataUTI)
let mediaMessageView = MediaMessageView(attachment:attachment)
self.mediaMessageView = mediaMessageView
rows.append(mediaMessageView)
}
return rows
}
private func recipientStatus(forOutgoingMessage message: TSOutgoingMessage, recipientId: String) -> String {
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .short
dateFormatter.timeStyle = .long
let recipientReadMap = message.recipientReadMap
if let readTimestamp = recipientReadMap[recipientId] {
assert(message.messageState == .sentToService)
let readDate = NSDate.ows_date(withMillisecondsSince1970:readTimestamp.uint64Value)
return String(format:NSLocalizedString("MESSAGE_STATUS_READ_WITH_TIMESTAMP_FORMAT",
comment: "message status for messages read by the recipient. Embeds: {{the date and time the message was read}}."),
dateFormatter.string(from:readDate))
}
// TODO: We don't currently track delivery state on a per-recipient basis.
// We should.
if message.wasDelivered {
return NSLocalizedString("MESSAGE_STATUS_DELIVERED",
comment:"message status for message delivered to their recipient.")
}
if message.messageState == .unsent {
return NSLocalizedString("MESSAGE_STATUS_FAILED", comment:"message footer for failed messages")
} else if (message.messageState == .sentToService ||
message.wasSent(toRecipient:recipientId)) {
return
NSLocalizedString("MESSAGE_STATUS_SENT",
comment:"message footer for sent messages")
} else if message.hasAttachments() {
return NSLocalizedString("MESSAGE_STATUS_UPLOADING",
comment:"message footer while attachment is uploading")
} else {
assert(message.messageState == .attemptingOut)
return NSLocalizedString("MESSAGE_STATUS_SENDING",
comment:"message status while message is sending.")
}
}
private func nameLabel(text: String) -> UILabel {
let label = UILabel()
label.textColor = UIColor.black
label.font = UIFont.ows_mediumFont(withSize:14)
label.text = text
label.setContentHuggingHorizontalHigh()
return label
}
private func valueLabel(text: String) -> UILabel {
let label = UILabel()
label.textColor = UIColor.black
label.font = UIFont.ows_regularFont(withSize:14)
label.text = text
label.setContentHuggingHorizontalLow()
return label
}
private func valueRow(name: String, value: String, subtitle: String = "") -> UIView {
let row = UIView.container()
let nameLabel = self.nameLabel(text:name)
let valueLabel = self.valueLabel(text:value)
row.addSubview(nameLabel)
row.addSubview(valueLabel)
nameLabel.autoPinLeadingToSuperView()
valueLabel.autoPinTrailingToSuperView()
valueLabel.autoPinLeading(toTrailingOf:nameLabel, margin: 10)
nameLabel.autoPinEdge(toSuperviewEdge:.top)
valueLabel.autoPinEdge(toSuperviewEdge:.top)
if subtitle.characters.count > 0 {
let subtitleLabel = self.valueLabel(text:subtitle)
subtitleLabel.textColor = UIColor.ows_darkGray()
row.addSubview(subtitleLabel)
subtitleLabel.autoPinTrailingToSuperView()
subtitleLabel.autoPinLeading(toTrailingOf:nameLabel, margin: 10)
subtitleLabel.autoPinEdge(.top, to:.bottom, of:valueLabel, withOffset:1)
subtitleLabel.autoPinEdge(toSuperviewEdge:.bottom)
} else if value.characters.count > 0 {
valueLabel.autoPinEdge(toSuperviewEdge:.bottom)
} else {
nameLabel.autoPinEdge(toSuperviewEdge:.bottom)
}
return row
}
// MARK: - Actions
func shareButtonPressed() {
if let messageBody = messageBody {
UIPasteboard.general.string = messageBody
return
}
guard let attachmentStream = attachmentStream else {
Logger.error("\(TAG) Message has neither attachment nor message body.")
return
}
AttachmentSharing.showShareUI(forAttachment:attachmentStream)
}
func copyToPasteboard() {
if let messageBody = messageBody {
UIPasteboard.general.string = messageBody
return
}
guard let attachmentStream = attachmentStream else {
Logger.error("\(TAG) Message has neither attachment nor message body.")
return
}
guard let utiType = MIMETypeUtil.utiType(forMIMEType:attachmentStream.contentType) else {
Logger.error("\(TAG) Attachment has invalid MIME type: \(attachmentStream.contentType).")
return
}
guard let dataSource = dataSource else {
Logger.error("\(TAG) Attachment missing data source.")
return
}
let data = dataSource.data()
UIPasteboard.general.setData(data, forPasteboardType:utiType)
}
}

View File

@ -9,9 +9,9 @@ NS_ASSUME_NONNULL_BEGIN
extern NSString *const OWSContactsManagerSignalAccountsDidChangeNotification;
@class UIFont;
@class SignalAccount;
@class ImageCache;
@class SignalAccount;
@class UIFont;
/**
* Get latest Signal contacts, and be notified when they change.
@ -74,7 +74,8 @@ extern NSString *const OWSContactsManagerSignalAccountsDidChangeNotification;
- (nullable UIImage *)imageForPhoneIdentifier:(nullable NSString *)identifier;
- (NSAttributedString *)formattedDisplayNameForSignalAccount:(SignalAccount *)signalAccount font:(UIFont *_Nonnull)font;
- (NSAttributedString *)formattedFullNameForRecipientId:(NSString *)recipientId font:(UIFont *)font;
- (NSAttributedString *)attributedStringForMessageFooterWithPhoneIdentifier:(NSString *)recipientId;
- (NSString *)contactOrProfileNameForPhoneIdentifier:(NSString *)recipientId;
- (NSAttributedString *)attributedContactOrProfileNameForPhoneIdentifier:(NSString *)recipientId;
- (NSAttributedString *)attributedStringForConversationTitleWithPhoneIdentifier:(NSString *)recipientId
primaryFont:(UIFont *)primaryFont
secondaryFont:(UIFont *)secondaryFont;

View File

@ -560,12 +560,12 @@ NSString *const kTSStorageManager_AccountLastNames = @"kTSStorageManager_Account
return formattedName;
}
- (NSAttributedString *)attributedStringForMessageFooterWithPhoneIdentifier:(NSString *)recipientId
- (NSString *)contactOrProfileNameForPhoneIdentifier:(NSString *)recipientId
{
// Prefer a saved name from system contacts, if available
NSString *_Nullable savedContactName = [self cachedDisplayNameForRecipientId:recipientId];
if (savedContactName.length > 0) {
return [[NSAttributedString alloc] initWithString:savedContactName];
return savedContactName;
}
NSString *_Nullable profileName = [self.profileManager profileNameForRecipientId:recipientId];
@ -577,11 +577,16 @@ NSString *const kTSStorageManager_AccountLastNames = @"kTSStorageManager_Account
NSString *numberAndProfileName =
[NSString stringWithFormat:numberAndProfileNameFormat, recipientId, profileName];
return [[NSAttributedString alloc] initWithString:numberAndProfileName];
return numberAndProfileName;
}
// else fall back to recipient id
return [[NSAttributedString alloc] initWithString:recipientId];
return recipientId;
}
- (NSAttributedString *)attributedContactOrProfileNameForPhoneIdentifier:(NSString *)recipientId
{
return [[NSAttributedString alloc] initWithString:[self contactOrProfileNameForPhoneIdentifier:recipientId]];
}
- (NSAttributedString *)attributedStringForConversationTitleWithPhoneIdentifier:(NSString *)recipientId

View File

@ -64,6 +64,7 @@ class ReminderView: UIView {
label.font = UIFont.ows_regularFont(withSize: 14)
container.addSubview(label)
label.numberOfLines = 0
label.lineBreakMode = .byWordWrapping
label.autoPinEdge(toSuperviewEdge: .top)
label.autoPinEdge(toSuperviewEdge: .left)
label.autoPinEdge(toSuperviewEdge: .bottom)

View File

@ -454,6 +454,9 @@
/* Short name for edit menu item to copy contents of media message. */
"EDIT_ITEM_COPY_ACTION" = "Copy";
/* Short name for edit menu item to show message metadata. */
"EDIT_ITEM_MESSAGE_METADATA_ACTION" = "Info";
/* Short name for edit menu item to save contents of media message. */
"EDIT_ITEM_SAVE_ACTION" = "Save";
@ -781,12 +784,64 @@
/* No comment provided by engineer. */
"MESSAGE_COMPOSEVIEW_TITLE" = "New Message";
/* message footer for delivered messages */
/* Label for file size of attachments in the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_ATTACHMENT_FILE_SIZE" = "File Size";
/* Label for the MIME type of attachments in the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_ATTACHMENT_MIME_TYPE" = "MIME type";
/* Label for 'missing' attachments in the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_ATTACHMENT_MISSING_FILE" = "Missing Attachment";
/* Label for 'not yet downloaded' attachments in the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_ATTACHMENT_NOT_YET_DOWNLOADED" = "Not yet downloaded";
/* Label for the message body in the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_BODY_LABEL" = "Message";
/* Label for the 'group name' field of the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_GROUP_NAME" = "Group";
/* Label for media messages of the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_MEDIA" = "Media";
/* Label for messages without a body or attachment in the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_NO_ATTACHMENT_OR_BODY" = "Message has no content or attachment.";
/* Label for the 'received date & time' field of the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_RECEIVED_DATE_TIME" = "Received";
/* Label for the 'recipient' field of the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_RECIPIENT" = "Recipient";
/* Label for the 'sender' field of the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_SENDER" = "Sender";
/* Label for the 'sent date & time' field of the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_SENT_DATE_TIME" = "Sent";
/* Label for the original filename of any attachment in the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_SOURCE_FILENAME" = "Filename";
/* Title for the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_TITLE" = "Message";
/* Label for voice messages of the 'message metadata' view. */
"MESSAGE_METADATA_VIEW_VOICE_MESSAGE" = "Voice Note";
/* message footer for delivered messages
message status for message delivered to their recipient. */
"MESSAGE_STATUS_DELIVERED" = "Delivered";
/* message footer for failed messages */
"MESSAGE_STATUS_FAILED" = "Sending failed. Tap for info.";
/* message status for messages read by the recipient. Embeds: {{the date and time the message was read}}. */
"MESSAGE_STATUS_READ_WITH_TIMESTAMP_FORMAT" = "Read %@";
/* message status while message is sending. */
"MESSAGE_STATUS_SENDING" = "Sending...";
/* message footer for sent messages */
"MESSAGE_STATUS_SENT" = "Sent";

View File

@ -104,8 +104,8 @@ typedef NS_ENUM(NSInteger, TSGroupMetaMessage) {
// This property won't be accurate for legacy messages.
@property (atomic, readonly) BOOL isFromLinkedDevice;
// The recipient ids of the recipients who have read the message.
@property (atomic, readonly) NSSet<NSString *> *readRecipientIds;
// Map of "recipient id"-to-"read time" of the recipients who have read the message.
@property (atomic, readonly) NSDictionary<NSString *, NSNumber *> *recipientReadMap;
/**
* Signal Identifier (e.g. e164 number) or nil if in a group thread.
@ -176,7 +176,9 @@ typedef NS_ENUM(NSInteger, TSGroupMetaMessage) {
- (void)updateWithWasSentFromLinkedDeviceWithTransaction:(YapDatabaseReadWriteTransaction *)transaction;
- (void)updateWithSingleGroupRecipient:(NSString *)singleGroupRecipient
transaction:(YapDatabaseReadWriteTransaction *)transaction;
- (void)updateWithReadRecipientId:(NSString *)recipientId transaction:(YapDatabaseReadWriteTransaction *)transaction;
- (void)updateWithReadRecipientId:(NSString *)recipientId
readTimestamp:(uint64_t)readTimestamp
transaction:(YapDatabaseReadWriteTransaction *)transaction;
#pragma mark - Sent Recipients

View File

@ -39,7 +39,7 @@ NSString *const kTSOutgoingMessageSentRecipientAll = @"kTSOutgoingMessageSentRec
@property (atomic) TSGroupMetaMessage groupMetaMessage;
@property (atomic) NSSet<NSString *> *readRecipientIds;
@property (atomic) NSDictionary<NSString *, NSNumber *> *recipientReadMap;
@end
@ -411,18 +411,20 @@ NSString *const kTSOutgoingMessageSentRecipientAll = @"kTSOutgoingMessageSentRec
}];
}
- (void)updateWithReadRecipientId:(NSString *)recipientId transaction:(YapDatabaseReadWriteTransaction *)transaction
- (void)updateWithReadRecipientId:(NSString *)recipientId
readTimestamp:(uint64_t)readTimestamp
transaction:(YapDatabaseReadWriteTransaction *)transaction
{
OWSAssert(recipientId.length > 0);
OWSAssert(transaction);
[self applyChangeToSelfAndLatestOutgoingMessage:transaction
changeBlock:^(TSOutgoingMessage *message) {
NSMutableSet<NSString *> *readRecipientIds
= (message.readRecipientIds ? [message.readRecipientIds mutableCopy]
: [NSMutableSet new]);
[readRecipientIds addObject:recipientId];
message.readRecipientIds = readRecipientIds;
NSMutableDictionary<NSString *, NSNumber *> *recipientReadMap
= (message.recipientReadMap ? [message.recipientReadMap mutableCopy]
: [NSMutableDictionary new]);
recipientReadMap[recipientId] = @(readTimestamp);
message.recipientReadMap = recipientReadMap;
}];
}

View File

@ -19,8 +19,9 @@ NS_ASSUME_NONNULL_BEGIN
@interface TSRecipientReadReceipt : TSYapDatabaseObject
@property (nonatomic, readonly) uint64_t timestamp;
@property (nonatomic, readonly) NSSet<NSString *> *recipientIds;
@property (nonatomic, readonly) uint64_t sentTimestamp;
// Map of "recipient id"-to-"read timestamp".
@property (nonatomic, readonly) NSDictionary<NSString *, NSNumber *> *recipientMap;
@end
@ -28,62 +29,70 @@ NS_ASSUME_NONNULL_BEGIN
@implementation TSRecipientReadReceipt
- (instancetype)initWithTimestamp:(uint64_t)timestamp
+ (NSString *)collection
{
OWSAssert(timestamp > 0);
return @"TSRecipientReadReceipt2";
}
self = [super initWithUniqueId:[TSRecipientReadReceipt uniqueIdForTimestamp:timestamp]];
- (instancetype)initWithSentTimestamp:(uint64_t)sentTimestamp
{
OWSAssert(sentTimestamp > 0);
self = [super initWithUniqueId:[TSRecipientReadReceipt uniqueIdForSentTimestamp:sentTimestamp]];
if (self) {
_timestamp = timestamp;
_recipientIds = [NSSet set];
_sentTimestamp = sentTimestamp;
_recipientMap = [NSDictionary new];
}
return self;
}
+ (NSString *)uniqueIdForTimestamp:(uint64_t)timestamp
+ (NSString *)uniqueIdForSentTimestamp:(uint64_t)timestamp
{
return [NSString stringWithFormat:@"%llu", timestamp];
}
- (void)addRecipientId:(NSString *)recipientId
- (void)addRecipientId:(NSString *)recipientId timestamp:(uint64_t)timestamp
{
NSMutableSet<NSString *> *recipientIdsCopy = [self.recipientIds mutableCopy];
[recipientIdsCopy addObject:recipientId];
_recipientIds = [recipientIdsCopy copy];
NSMutableDictionary<NSString *, NSNumber *> *recipientMapCopy = [self.recipientMap mutableCopy];
recipientMapCopy[recipientId] = @(timestamp);
_recipientMap = [recipientMapCopy copy];
}
+ (void)addRecipientId:(NSString *)recipientId
timestamp:(uint64_t)timestamp
sentTimestamp:(uint64_t)sentTimestamp
readTimestamp:(uint64_t)readTimestamp
transaction:(YapDatabaseReadWriteTransaction *)transaction
{
OWSAssert(transaction);
TSRecipientReadReceipt *_Nullable recipientReadReceipt =
[transaction objectForKey:[self uniqueIdForTimestamp:timestamp] inCollection:[self collection]];
[transaction objectForKey:[self uniqueIdForSentTimestamp:sentTimestamp] inCollection:[self collection]];
if (!recipientReadReceipt) {
recipientReadReceipt = [[TSRecipientReadReceipt alloc] initWithTimestamp:timestamp];
recipientReadReceipt = [[TSRecipientReadReceipt alloc] initWithSentTimestamp:sentTimestamp];
}
[recipientReadReceipt addRecipientId:recipientId];
[recipientReadReceipt addRecipientId:recipientId timestamp:readTimestamp];
[recipientReadReceipt saveWithTransaction:transaction];
}
+ (nullable NSSet<NSString *> *)recipientIdsForTimestamp:(uint64_t)timestamp
transaction:(YapDatabaseReadWriteTransaction *)transaction
+ (nullable NSDictionary<NSString *, NSNumber *> *)recipientMapForSentTimestamp:(uint64_t)sentTimestamp
transaction:
(YapDatabaseReadWriteTransaction *)transaction
{
OWSAssert(transaction);
TSRecipientReadReceipt *_Nullable recipientReadReceipt =
[transaction objectForKey:[self uniqueIdForTimestamp:timestamp] inCollection:[self collection]];
return recipientReadReceipt.recipientIds;
[transaction objectForKey:[self uniqueIdForSentTimestamp:sentTimestamp] inCollection:[self collection]];
return recipientReadReceipt.recipientMap;
}
+ (void)removeRecipientIdsForTimestamp:(uint64_t)timestamp transaction:(YapDatabaseReadWriteTransaction *)transaction
+ (void)removeRecipientIdsForTimestamp:(uint64_t)sentTimestamp
transaction:(YapDatabaseReadWriteTransaction *)transaction
{
OWSAssert(transaction);
[transaction removeObjectForKey:[self uniqueIdForTimestamp:timestamp] inCollection:[self collection]];
[transaction removeObjectForKey:[self uniqueIdForSentTimestamp:sentTimestamp] inCollection:[self collection]];
}
@end
@ -376,15 +385,16 @@ NSString *const OWSReadReceiptManagerAreReadReceiptsEnabled = @"areReadReceiptsE
NSString *recipientId = envelope.source;
OWSAssert(recipientId.length > 0);
PBArray *timestamps = receiptMessage.timestamp;
PBArray *sentTimestamps = receiptMessage.timestamp;
UInt64 readTimestamp = envelope.timestamp;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self.dbConnection readWriteWithBlock:^(YapDatabaseReadWriteTransaction *transaction) {
for (int i = 0; i < timestamps.count; i++) {
UInt64 timestamp = [timestamps uint64AtIndex:i];
for (int i = 0; i < sentTimestamps.count; i++) {
UInt64 sentTimestamp = [sentTimestamps uint64AtIndex:i];
NSArray<TSOutgoingMessage *> *messages
= (NSArray<TSOutgoingMessage *> *)[TSInteraction interactionsWithTimestamp:timestamp
= (NSArray<TSOutgoingMessage *> *)[TSInteraction interactionsWithTimestamp:sentTimestamp
ofClass:[TSOutgoingMessage class]
withTransaction:transaction];
OWSAssert(messages.count <= 1);
@ -392,12 +402,17 @@ NSString *const OWSReadReceiptManagerAreReadReceiptsEnabled = @"areReadReceiptsE
// TODO: We might also need to "mark as read by recipient" any older messages
// from us in that thread. Or maybe this state should hang on the thread?
for (TSOutgoingMessage *message in messages) {
[message updateWithReadRecipientId:recipientId transaction:transaction];
[message updateWithReadRecipientId:recipientId
readTimestamp:readTimestamp
transaction:transaction];
}
} else {
// Persist the read receipts so that we can apply them to outgoing messages
// that we learn about later through sync messages.
[TSRecipientReadReceipt addRecipientId:recipientId timestamp:timestamp transaction:transaction];
[TSRecipientReadReceipt addRecipientId:recipientId
sentTimestamp:sentTimestamp
readTimestamp:readTimestamp
transaction:transaction];
}
}
}];
@ -410,14 +425,18 @@ NSString *const OWSReadReceiptManagerAreReadReceiptsEnabled = @"areReadReceiptsE
OWSAssert(message);
OWSAssert(transaction);
NSSet<NSString *> *_Nullable recipientIds =
[TSRecipientReadReceipt recipientIdsForTimestamp:message.timestamp transaction:transaction];
if (!recipientIds) {
uint64_t sentTimestamp = message.timestamp;
NSDictionary<NSString *, NSNumber *> *recipientMap =
[TSRecipientReadReceipt recipientMapForSentTimestamp:sentTimestamp transaction:transaction];
if (!recipientMap) {
return;
}
OWSAssert(recipientIds.count > 0);
for (NSString *recipientId in recipientIds) {
[message updateWithReadRecipientId:recipientId transaction:transaction];
OWSAssert(recipientMap.count > 0);
for (NSString *recipientId in recipientMap) {
NSNumber *nsReadTimestamp = recipientMap[recipientId];
uint64_t readTimestamp = [nsReadTimestamp unsignedLongLongValue];
[message updateWithReadRecipientId:recipientId readTimestamp:readTimestamp transaction:transaction];
}
[TSRecipientReadReceipt removeRecipientIdsForTimestamp:message.timestamp transaction:transaction];
}

View File

@ -5,6 +5,8 @@
#import "NSDate+OWS.h"
#import <chrono>
NS_ASSUME_NONNULL_BEGIN
@implementation NSDate (millisecondTimeStamp)
+ (uint64_t)ows_millisecondTimeStamp
@ -25,3 +27,5 @@
}
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,15 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
//
NS_ASSUME_NONNULL_BEGIN
@interface NSDate (millisecondTimeStamp)
+ (uint64_t)ows_millisecondTimeStamp;
+ (NSDate *)ows_dateWithMillisecondsSince1970:(uint64_t)milliseconds;
+ (uint64_t)ows_millisecondsSince1970ForDate:(NSDate *)date;
@end
NS_ASSUME_NONNULL_END