Use ConversationMediaView to simplify media rendering in conversation view cells.

This commit is contained in:
Matthew Chen 2018-11-06 09:20:22 -05:00
parent f2c0a6f7dd
commit 0c76e1c02d
4 changed files with 311 additions and 527 deletions

View File

@ -149,6 +149,7 @@
34843B26214327C9004DED45 /* OWSOrphanDataCleanerTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 34843B25214327C9004DED45 /* OWSOrphanDataCleanerTest.m */; };
34843B2C214FE296004DED45 /* MockEnvironment.m in Sources */ = {isa = PBXBuildFile; fileRef = 34843B2A214FE295004DED45 /* MockEnvironment.m */; };
348570A820F67575004FF32B /* OWSMessageHeaderView.m in Sources */ = {isa = PBXBuildFile; fileRef = 348570A620F67574004FF32B /* OWSMessageHeaderView.m */; };
3488F9362191CC4000E524CC /* ConversationMediaView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3488F9352191CC4000E524CC /* ConversationMediaView.swift */; };
348BB25D20A0C5530047AEC2 /* ContactShareViewHelper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 348BB25C20A0C5530047AEC2 /* ContactShareViewHelper.swift */; };
3491D9A121022DB7001EF5A1 /* CDSSigningCertificateTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 3491D9A021022DB7001EF5A1 /* CDSSigningCertificateTest.m */; };
3496744D2076768700080B5F /* OWSMessageBubbleView.m in Sources */ = {isa = PBXBuildFile; fileRef = 3496744C2076768700080B5F /* OWSMessageBubbleView.m */; };
@ -793,6 +794,7 @@
34843B2B214FE295004DED45 /* MockEnvironment.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MockEnvironment.h; sourceTree = "<group>"; };
348570A620F67574004FF32B /* OWSMessageHeaderView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSMessageHeaderView.m; sourceTree = "<group>"; };
348570A720F67574004FF32B /* OWSMessageHeaderView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSMessageHeaderView.h; sourceTree = "<group>"; };
3488F9352191CC4000E524CC /* ConversationMediaView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ConversationMediaView.swift; sourceTree = "<group>"; };
348BB25C20A0C5530047AEC2 /* ContactShareViewHelper.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ContactShareViewHelper.swift; sourceTree = "<group>"; };
348F2EAD1F0D21BC00D4ECE0 /* DeviceSleepManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = DeviceSleepManager.swift; sourceTree = "<group>"; };
3491D9A021022DB7001EF5A1 /* CDSSigningCertificateTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = CDSSigningCertificateTest.m; sourceTree = "<group>"; };
@ -1825,6 +1827,7 @@
children = (
34D1F0BB1F8D108C0066283D /* AttachmentUploadView.h */,
34D1F0BC1F8D108C0066283D /* AttachmentUploadView.m */,
3488F9352191CC4000E524CC /* ConversationMediaView.swift */,
34D1F0961F867BFC0066283D /* ConversationViewCell.h */,
34D1F0971F867BFC0066283D /* ConversationViewCell.m */,
34A8B3502190A40E00218A25 /* MediaGalleryCellView.swift */,
@ -3450,6 +3453,7 @@
458DE9D61DEE3FD00071BB03 /* PeerConnectionClient.swift in Sources */,
45DDA6242090CEB500DE97F8 /* ConversationHeaderView.swift in Sources */,
4CA5F793211E1F06008C2708 /* Toast.swift in Sources */,
3488F9362191CC4000E524CC /* ConversationMediaView.swift in Sources */,
45F32C242057297A00A300D5 /* MessageDetailViewController.swift in Sources */,
34D1F0841F8678AA0066283D /* ConversationInputToolbar.m in Sources */,
457F671B20746193000EABCD /* QuotedReplyPreview.swift in Sources */,

View File

@ -0,0 +1,240 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import Foundation
@objc
public class ConversationMediaView: UIView {
private let mediaCache: NSCache<NSString, AnyObject>
private let attachment: TSAttachment
private var loadBlock : (() -> Void)?
private var unloadBlock : (() -> Void)?
private var didFailToLoad = false
@objc
public required init(mediaCache: NSCache<NSString, AnyObject>,
attachment: TSAttachment) {
self.mediaCache = mediaCache
self.attachment = attachment
super.init(frame: .zero)
self.backgroundColor = .white
createContents()
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
private func createContents() {
AssertIsOnMainThread()
guard let attachmentStream = attachment as? TSAttachmentStream else {
// TODO: Handle this case.
owsFailDebug("Missing attachment stream.")
return
}
if attachmentStream.isAnimated {
configureForAnimatedImage(attachmentStream: attachmentStream)
} else if attachmentStream.isImage {
configureForStillImage(attachmentStream: attachmentStream)
} else if attachmentStream.isVideo {
configureForVideo(attachmentStream: attachmentStream)
} else {
// TODO: Handle this case.
owsFailDebug("Attachment has unexpected type.")
}
}
private func configureForAnimatedImage(attachmentStream: TSAttachmentStream) {
guard let cacheKey = attachmentStream.uniqueId else {
owsFailDebug("Attachment stream missing unique ID.")
return
}
let animatedImageView = YYAnimatedImageView()
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
animatedImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
animatedImageView.layer.minificationFilter = kCAFilterTrilinear
animatedImageView.layer.magnificationFilter = kCAFilterTrilinear
animatedImageView.backgroundColor = .white
addSubview(animatedImageView)
animatedImageView.autoPinEdgesToSuperviewEdges()
// [self addAttachmentUploadViewIfNecessary];
loadBlock = { [weak self] in
guard let strongSelf = self else {
return
}
if animatedImageView.image != nil {
return
}
let cachedValue = strongSelf.tryToLoadMedia(loadMediaBlock: { () -> AnyObject? in
guard let filePath = attachmentStream.originalFilePath else {
owsFailDebug("Attachment stream missing original file path.")
return nil
}
let animatedImage = YYImage(contentsOfFile: filePath)
return animatedImage
},
cacheKey: cacheKey,
canLoadAsync: true)
guard let image = cachedValue as? YYImage else {
return
}
animatedImageView.image = image
}
unloadBlock = {
animatedImageView.image = nil
}
}
private func configureForStillImage(attachmentStream: TSAttachmentStream) {
guard let cacheKey = attachmentStream.uniqueId else {
owsFailDebug("Attachment stream missing unique ID.")
return
}
let stillImageView = UIImageView()
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
stillImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear
stillImageView.layer.magnificationFilter = kCAFilterTrilinear
stillImageView.backgroundColor = .white
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
// [self addAttachmentUploadViewIfNecessary];
loadBlock = { [weak self] in
guard let strongSelf = self else {
return
}
if stillImageView.image != nil {
return
}
let cachedValue = strongSelf.tryToLoadMedia(loadMediaBlock: { () -> AnyObject? in
return attachmentStream.thumbnailImageMedium(success: { (image) in
stillImageView.image = image
}, failure: {
Logger.error("Could not load thumbnail")
})
},
cacheKey: cacheKey,
canLoadAsync: true)
guard let image = cachedValue as? UIImage else {
return
}
stillImageView.image = image
}
unloadBlock = {
stillImageView.image = nil
}
}
private func configureForVideo(attachmentStream: TSAttachmentStream) {
guard let cacheKey = attachmentStream.uniqueId else {
owsFailDebug("Attachment stream missing unique ID.")
return
}
let stillImageView = UIImageView()
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
stillImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear
stillImageView.layer.magnificationFilter = kCAFilterTrilinear
stillImageView.backgroundColor = .white
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
// TODO: Hide during upload/download.
let videoPlayIcon = UIImage(named: "play_button")
let videoPlayButton = UIImageView(image: videoPlayIcon)
stillImageView.addSubview(videoPlayButton)
videoPlayButton.autoCenterInSuperview()
// [self addAttachmentUploadViewIfNecessary];
loadBlock = { [weak self] in
guard let strongSelf = self else {
return
}
if stillImageView.image != nil {
return
}
let cachedValue = strongSelf.tryToLoadMedia(loadMediaBlock: { () -> AnyObject? in
return attachmentStream.thumbnailImageMedium(success: { (image) in
stillImageView.image = image
}, failure: {
Logger.error("Could not load thumbnail")
})
},
cacheKey: cacheKey,
canLoadAsync: true)
guard let image = cachedValue as? UIImage else {
return
}
stillImageView.image = image
}
unloadBlock = {
stillImageView.image = nil
}
}
private func tryToLoadMedia(loadMediaBlock: @escaping () -> AnyObject?,
cacheKey: String,
canLoadAsync: Bool) -> AnyObject? {
AssertIsOnMainThread()
guard !didFailToLoad else {
return nil
}
if let media = mediaCache.object(forKey: cacheKey as NSString) {
Logger.verbose("media cache hit")
return media
}
if let media = loadMediaBlock() {
Logger.verbose("media cache miss")
mediaCache.setObject(media, forKey: cacheKey as NSString)
return media
}
guard canLoadAsync else {
Logger.error("Failed to load media.")
didFailToLoad = true
// TODO:
// [self showAttachmentErrorViewWithMediaView:mediaView];
return nil
}
return nil
}
@objc
public func loadMedia() {
AssertIsOnMainThread()
guard let loadBlock = loadBlock else {
owsFailDebug("Missing loadBlock")
return
}
loadBlock()
}
@objc
public func unloadMedia() {
AssertIsOnMainThread()
guard let unloadBlock = unloadBlock else {
owsFailDebug("Missing unloadBlock")
return
}
unloadBlock()
}
}

View File

@ -4,33 +4,22 @@
import Foundation
@objc(OWSMediaGalleryCellViewDelegate)
public protocol MediaGalleryCellViewDelegate: class {
@objc(tryToLoadCellMedia:mediaView:cacheKey:canLoadAsync:)
func tryToLoadCellMedia(loadCellMediaBlock: @escaping () -> Any?,
mediaView: UIView,
cacheKey: String,
canLoadAsync: Bool) -> Any?
}
@objc(OWSMediaGalleryCellView)
public class MediaGalleryCellView: UIView {
private weak var delegate: MediaGalleryCellViewDelegate?
public class MediaGalleryCellView: UIStackView {
private let items: [ConversationMediaGalleryItem]
private let itemViews: [MediaItemView]
private let itemViews: [ConversationMediaView]
private static let kSpacingPts: CGFloat = 2
private static let kMaxItems = 5
@objc
public required init(delegate: MediaGalleryCellViewDelegate,
public required init(mediaCache: NSCache<NSString, AnyObject>,
items: [ConversationMediaGalleryItem],
maxMessageWidth: CGFloat) {
self.delegate = delegate
self.items = items
self.itemViews = MediaGalleryCellView.itemsToDisplay(forItems: items).map {
MediaItemView(delegate: delegate,
item: $0)
ConversationMediaView(mediaCache: mediaCache,
attachment: $0.attachment)
}
super.init(frame: .zero)
@ -54,39 +43,29 @@ public class MediaGalleryCellView: UIView {
case 4:
// Square
let imageSize = (maxMessageWidth - MediaGalleryCellView.kSpacingPts) / 2
for itemView in itemViews {
itemView.autoSetDimensions(to: CGSize(width: imageSize, height: imageSize))
}
let topViews = Array(itemViews[0..<2])
let topStack = UIStackView(arrangedSubviews: topViews)
topStack.axis = .horizontal
topStack.spacing = MediaGalleryCellView.kSpacingPts
addArrangedSubview(newRow(rowViews: topViews,
axis: .horizontal,
viewSize: imageSize))
let bottomViews = Array(itemViews[2..<4])
let bottomStack = UIStackView(arrangedSubviews: bottomViews)
bottomStack.axis = .horizontal
bottomStack.spacing = MediaGalleryCellView.kSpacingPts
addArrangedSubview(newRow(rowViews: bottomViews,
axis: .horizontal,
viewSize: imageSize))
let vStackView = UIStackView(arrangedSubviews: [topStack, bottomStack])
vStackView.axis = .vertical
vStackView.spacing = MediaGalleryCellView.kSpacingPts
addSubview(vStackView)
vStackView.autoPinEdgesToSuperviewEdges()
self.axis = .vertical
self.spacing = MediaGalleryCellView.kSpacingPts
case 2:
// X X
// side-by-side.
let imageSize = (maxMessageWidth - MediaGalleryCellView.kSpacingPts) / 2
autoSet(viewSize: imageSize, ofViews: itemViews)
for itemView in itemViews {
itemView.autoSetDimensions(to: CGSize(width: imageSize, height: imageSize))
addArrangedSubview(itemView)
}
let views = Array(itemViews[0..<2])
let hStackView = UIStackView(arrangedSubviews: views)
hStackView.axis = .horizontal
hStackView.spacing = MediaGalleryCellView.kSpacingPts
addSubview(hStackView)
hStackView.autoPinEdgesToSuperviewEdges()
self.axis = .horizontal
self.spacing = MediaGalleryCellView.kSpacingPts
case 3:
// x
// X
@ -99,21 +78,15 @@ public class MediaGalleryCellView: UIView {
owsFailDebug("Missing view")
return
}
leftItemView.autoSetDimensions(to: CGSize(width: bigImageSize, height: bigImageSize))
autoSet(viewSize: bigImageSize, ofViews: [leftItemView])
addArrangedSubview(leftItemView)
let rightViews = Array(itemViews[1..<3])
for itemView in rightViews {
itemView.autoSetDimensions(to: CGSize(width: smallImageSize, height: smallImageSize))
}
let rightStack = UIStackView(arrangedSubviews: rightViews)
rightStack.axis = .vertical
rightStack.spacing = MediaGalleryCellView.kSpacingPts
let hStackView = UIStackView(arrangedSubviews: [leftItemView, rightStack])
hStackView.axis = .horizontal
hStackView.spacing = MediaGalleryCellView.kSpacingPts
addSubview(hStackView)
hStackView.autoPinEdgesToSuperviewEdges()
addArrangedSubview(newRow(rowViews: rightViews,
axis: .vertical,
viewSize: smallImageSize))
self.axis = .horizontal
self.spacing = MediaGalleryCellView.kSpacingPts
default:
// X X
// xxx
@ -122,29 +95,43 @@ public class MediaGalleryCellView: UIView {
let smallImageSize = (maxMessageWidth - MediaGalleryCellView.kSpacingPts * 2) / 3
let topViews = Array(itemViews[0..<2])
for itemView in topViews {
itemView.autoSetDimensions(to: CGSize(width: bigImageSize, height: bigImageSize))
}
let topStack = UIStackView(arrangedSubviews: topViews)
topStack.axis = .horizontal
topStack.spacing = MediaGalleryCellView.kSpacingPts
addArrangedSubview(newRow(rowViews: topViews,
axis: .horizontal,
viewSize: bigImageSize))
let bottomViews = Array(itemViews[2..<5])
for itemView in bottomViews {
itemView.autoSetDimensions(to: CGSize(width: smallImageSize, height: smallImageSize))
}
let bottomStack = UIStackView(arrangedSubviews: bottomViews)
bottomStack.axis = .horizontal
bottomStack.spacing = MediaGalleryCellView.kSpacingPts
addArrangedSubview(newRow(rowViews: bottomViews,
axis: .horizontal,
viewSize: smallImageSize))
let vStackView = UIStackView(arrangedSubviews: [topStack, bottomStack])
vStackView.axis = .vertical
vStackView.spacing = MediaGalleryCellView.kSpacingPts
addSubview(vStackView)
vStackView.autoPinEdgesToSuperviewEdges()
self.axis = .vertical
self.spacing = MediaGalleryCellView.kSpacingPts
}
}
private func autoSet(viewSize: CGFloat,
ofViews views: [ConversationMediaView]
) {
for itemView in views {
itemView.autoSetDimensions(to: CGSize(width: viewSize, height: viewSize))
}
}
private func newRow(rowViews: [ConversationMediaView],
axis: NSLayoutConstraint.Axis,
viewSize: CGFloat) -> UIStackView {
autoSet(viewSize: viewSize, ofViews: rowViews)
return newRow(rowViews: rowViews, axis: axis)
}
private func newRow(rowViews: [ConversationMediaView],
axis: NSLayoutConstraint.Axis) -> UIStackView {
let stackView = UIStackView(arrangedSubviews: rowViews)
stackView.axis = axis
stackView.spacing = MediaGalleryCellView.kSpacingPts
return stackView
}
@objc
public func loadMedia() {
for itemView in itemViews {
@ -160,7 +147,7 @@ public class MediaGalleryCellView: UIView {
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
required public init(coder aDecoder: NSCoder) {
notImplemented()
}
@ -205,250 +192,4 @@ public class MediaGalleryCellView: UIView {
return CGSize(width: maxMessageWidth, height: bigImageSize + smallImageSize + kSpacingPts)
}
}
private class MediaItemView: UIView {
private weak var delegate: MediaGalleryCellViewDelegate?
private let item: ConversationMediaGalleryItem
private var loadBlock : (() -> Void)?
private var unloadBlock : (() -> Void)?
required init(delegate: MediaGalleryCellViewDelegate,
item: ConversationMediaGalleryItem) {
self.delegate = delegate
self.item = item
super.init(frame: .zero)
// TODO:
self.backgroundColor = .white
self.backgroundColor = .red
createContents()
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
private func createContents() {
guard let attachmentStream = item.attachmentStream else {
// TODO: Handle this case.
owsFailDebug("Missing attachment stream.")
return
}
if attachmentStream.isAnimated {
configureForAnimatedImage(attachmentStream: attachmentStream)
} else if attachmentStream.isImage {
configureForStillImage(attachmentStream: attachmentStream)
} else if attachmentStream.isVideo {
configureForVideo(attachmentStream: attachmentStream)
}
}
private func configureForAnimatedImage(attachmentStream: TSAttachmentStream) {
guard let cacheKey = attachmentStream.uniqueId else {
owsFailDebug("Attachment stream missing unique ID.")
return
}
let animatedImageView = YYAnimatedImageView()
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
animatedImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
animatedImageView.layer.minificationFilter = kCAFilterTrilinear
animatedImageView.layer.magnificationFilter = kCAFilterTrilinear
animatedImageView.backgroundColor = .white
addSubview(animatedImageView)
animatedImageView.autoPinEdgesToSuperviewEdges()
// [self addAttachmentUploadViewIfNecessary];
loadBlock = { [weak self] in
guard let strongSelf = self else {
return
}
guard let strongDelegate = strongSelf.delegate else {
return
}
if animatedImageView.image != nil {
return
}
let cachedValue = strongDelegate.tryToLoadCellMedia(loadCellMediaBlock: { () -> Any? in
guard let filePath = attachmentStream.originalFilePath else {
owsFailDebug("Attachment stream missing original file path.")
return nil
}
let animatedImage = YYImage(contentsOfFile: filePath)
return animatedImage
},
mediaView: animatedImageView,
cacheKey: cacheKey,
canLoadAsync: true)
guard let image = cachedValue as? YYImage else {
return
}
animatedImageView.image = image
}
unloadBlock = {
animatedImageView.image = nil
}
}
private func configureForStillImage(attachmentStream: TSAttachmentStream) {
guard let cacheKey = attachmentStream.uniqueId else {
owsFailDebug("Attachment stream missing unique ID.")
return
}
let stillImageView = UIImageView()
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
stillImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear
stillImageView.layer.magnificationFilter = kCAFilterTrilinear
stillImageView.backgroundColor = .white
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
// [self addAttachmentUploadViewIfNecessary];
loadBlock = { [weak self] in
guard let strongSelf = self else {
return
}
guard let strongDelegate = strongSelf.delegate else {
return
}
if stillImageView.image != nil {
return
}
let cachedValue = strongDelegate.tryToLoadCellMedia(loadCellMediaBlock: { () -> Any? in
return attachmentStream.thumbnailImageMedium(success: { (image) in
stillImageView.image = image
}, failure: {
Logger.error("Could not load thumbnail")
})
},
mediaView: stillImageView,
cacheKey: cacheKey,
canLoadAsync: true)
guard let image = cachedValue as? UIImage else {
return
}
stillImageView.image = image
}
unloadBlock = {
stillImageView.image = nil
}
}
private func configureForVideo(attachmentStream: TSAttachmentStream) {
guard let cacheKey = attachmentStream.uniqueId else {
owsFailDebug("Attachment stream missing unique ID.")
return
}
let stillImageView = UIImageView()
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
stillImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear
stillImageView.layer.magnificationFilter = kCAFilterTrilinear
stillImageView.backgroundColor = .white
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
// TODO: Hide during upload/download.
let videoPlayIcon = UIImage(named: "play_button")
let videoPlayButton = UIImageView(image: videoPlayIcon)
stillImageView.addSubview(videoPlayButton)
videoPlayButton.autoCenterInSuperview()
// [self addAttachmentUploadViewIfNecessary];
loadBlock = { [weak self] in
guard let strongSelf = self else {
return
}
guard let strongDelegate = strongSelf.delegate else {
return
}
if stillImageView.image != nil {
return
}
let cachedValue = strongDelegate.tryToLoadCellMedia(loadCellMediaBlock: { () -> Any? in
return attachmentStream.thumbnailImageMedium(success: { (image) in
stillImageView.image = image
}, failure: {
Logger.error("Could not load thumbnail")
})
},
mediaView: stillImageView,
cacheKey: cacheKey,
canLoadAsync: true)
guard let image = cachedValue as? UIImage else {
return
}
stillImageView.image = image
}
unloadBlock = {
stillImageView.image = nil
}
}
func loadMedia() {
guard let loadBlock = loadBlock else {
owsFailDebug("Missing loadBlock")
return
}
loadBlock()
}
func unloadMedia() {
guard let unloadBlock = unloadBlock else {
owsFailDebug("Missing unloadBlock")
return
}
unloadBlock()
}
private class func itemsToDisplay(forItems items: [ConversationMediaGalleryItem]) -> Int {
let validItemCount = items.filter {
$0.attachmentStream != nil
}.count
return max(1, min(5, validItemCount))
}
@objc
public class func layoutSize(forMaxMessageWidth maxMessageWidth: CGFloat,
items: [ConversationMediaGalleryItem]) -> CGSize {
let itemCount = itemsToDisplay(forItems: items)
switch itemCount {
case 0, 1, 4:
// Square
//
// TODO: What's the correct size here?
return CGSize(width: maxMessageWidth, height: maxMessageWidth)
case 2:
// X X
// side-by-side.
let imageSize = (maxMessageWidth - kSpacingPts) / 2
return CGSize(width: maxMessageWidth, height: imageSize)
case 3:
// x
// X
// x
// Big on left, 2 small on right.
let smallImageSize = (maxMessageWidth - kSpacingPts * 2) / 3
let bigImageSize = smallImageSize * 2 + kSpacingPts
return CGSize(width: maxMessageWidth, height: bigImageSize)
default:
// X X
// xxx
// 2 big on top, 3 small on bottom.
let bigImageSize = (maxMessageWidth - kSpacingPts) / 2
let smallImageSize = (maxMessageWidth - kSpacingPts * 2) / 3
return CGSize(width: maxMessageWidth, height: bigImageSize + smallImageSize + kSpacingPts)
}
}
}
}

View File

@ -24,11 +24,7 @@ NS_ASSUME_NONNULL_BEGIN
const UIDataDetectorTypes kOWSAllowedDataDetectorTypes
= UIDataDetectorTypeLink | UIDataDetectorTypeAddress | UIDataDetectorTypeCalendarEvent;
typedef _Nullable id (^LoadCellMediaBlock)(void);
@interface OWSMessageBubbleView () <OWSQuotedMessageViewDelegate,
OWSContactShareButtonsViewDelegate,
OWSMediaGalleryCellViewDelegate>
@interface OWSMessageBubbleView () <OWSQuotedMessageViewDelegate, OWSContactShareButtonsViewDelegate>
@property (nonatomic) OWSBubbleView *bubbleView;
@ -310,16 +306,10 @@ typedef _Nullable id (^LoadCellMediaBlock)(void);
case OWSMessageCellType_OversizeTextMessage:
break;
case OWSMessageCellType_StillImage:
OWSAssertDebug(self.viewItem.attachmentStream);
bodyMediaView = [self loadViewForStillImage];
break;
case OWSMessageCellType_AnimatedImage:
OWSAssertDebug(self.viewItem.attachmentStream);
bodyMediaView = [self loadViewForAnimatedImage];
break;
case OWSMessageCellType_Video:
OWSAssertDebug(self.viewItem.attachmentStream);
bodyMediaView = [self loadViewForVideo];
bodyMediaView = [self loadViewForMedia];
break;
case OWSMessageCellType_Audio:
OWSAssertDebug(self.viewItem.attachmentStream);
@ -662,46 +652,6 @@ typedef _Nullable id (^LoadCellMediaBlock)(void);
return YES;
}
// We now eagerly create our view hierarchy (to do this exactly once per cell usage)
// but lazy-load any expensive media (photo, gif, etc.) used in those views. Note that
// this lazy-load can fail, in which case we modify the view hierarchy to use an "error"
// state. The didCellMediaFailToLoad reflects media load fails.
- (nullable id)tryToLoadCellMedia:(LoadCellMediaBlock)loadCellMediaBlock
mediaView:(UIView *)mediaView
cacheKey:(NSString *)cacheKey
canLoadAsync:(BOOL)canLoadAsync
{
OWSAssertIsOnMainThread();
if (self.cellType == OWSMessageCellType_MediaGallery) {
OWSAssertDebug(self.viewItem.mediaGalleryItems);
} else {
OWSAssertDebug(self.attachmentStream);
}
OWSAssertDebug(mediaView);
OWSAssertDebug(cacheKey);
OWSAssertDebug(self.cellMediaCache);
if (self.viewItem.didCellMediaFailToLoad) {
return nil;
}
id _Nullable cellMedia = [self.cellMediaCache objectForKey:cacheKey];
if (cellMedia) {
OWSLogVerbose(@"cell media cache hit");
return cellMedia;
}
cellMedia = loadCellMediaBlock();
if (cellMedia) {
OWSLogVerbose(@"cell media cache miss");
[self.cellMediaCache setObject:cellMedia forKey:cacheKey];
} else if (!canLoadAsync) {
OWSLogError(@"Failed to load cell media: %@", self.attachmentStream.originalMediaURL);
self.viewItem.didCellMediaFailToLoad = YES;
[self showAttachmentErrorViewWithMediaView:mediaView];
}
return cellMedia;
}
- (CGFloat)textViewVSpacing
{
return 2.f;
@ -842,11 +792,10 @@ typedef _Nullable id (^LoadCellMediaBlock)(void);
{
OWSAssertDebug(self.viewItem.mediaGalleryItems);
OWSLogVerbose(@"self.viewItem.mediaGalleryItems: %lu", (unsigned long)self.viewItem.mediaGalleryItems.count);
OWSMediaGalleryCellView *galleryView =
[[OWSMediaGalleryCellView alloc] initWithDelegate:self
items:self.viewItem.mediaGalleryItems
maxMessageWidth:self.conversationStyle.maxMessageWidth];
[[OWSMediaGalleryCellView alloc] initWithMediaCache:self.cellMediaCache
items:self.viewItem.mediaGalleryItems
maxMessageWidth:self.conversationStyle.maxMessageWidth];
self.loadCellContentBlock = ^{
[galleryView loadMedia];
};
@ -857,110 +806,22 @@ typedef _Nullable id (^LoadCellMediaBlock)(void);
return galleryView;
}
- (UIView *)loadViewForStillImage
- (UIView *)loadViewForMedia
{
OWSAssertDebug(self.attachmentStream);
OWSAssertDebug([self.attachmentStream isImage]);
UIImageView *stillImageView = [UIImageView new];
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
stillImageView.contentMode = UIViewContentModeScaleAspectFill;
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear;
stillImageView.layer.magnificationFilter = kCAFilterTrilinear;
stillImageView.backgroundColor = [UIColor whiteColor];
[self addAttachmentUploadViewIfNecessary];
__weak OWSMessageBubbleView *weakSelf = self;
__weak UIImageView *weakImageView = stillImageView;
ConversationMediaView *mediaView =
[[ConversationMediaView alloc] initWithMediaCache:self.cellMediaCache attachment:self.attachmentStream];
self.loadCellContentBlock = ^{
OWSMessageBubbleView *strongSelf = weakSelf;
if (!strongSelf) {
return;
}
OWSCAssertDebug(strongSelf.bodyMediaView == stillImageView);
if (stillImageView.image) {
return;
}
stillImageView.image = [strongSelf
tryToLoadCellMedia:^{
OWSCAssertDebug([strongSelf.attachmentStream isImage]);
OWSCAssertDebug([strongSelf.attachmentStream isValidImage]);
return [strongSelf.attachmentStream
thumbnailImageMediumWithSuccess:^(UIImage *image) {
weakImageView.image = image;
}
failure:^{
OWSLogError(@"Could not load thumbnail.");
}];
}
mediaView:stillImageView
cacheKey:strongSelf.attachmentStream.uniqueId
canLoadAsync:YES];
[mediaView loadMedia];
};
self.unloadCellContentBlock = ^{
OWSMessageBubbleView *strongSelf = weakSelf;
if (!strongSelf) {
return;
}
OWSCAssertDebug(strongSelf.bodyMediaView == stillImageView);
stillImageView.image = nil;
[mediaView unloadMedia];
};
return stillImageView;
}
- (UIView *)loadViewForAnimatedImage
{
OWSAssertDebug(self.attachmentStream);
OWSAssertDebug([self.attachmentStream isAnimated]);
YYAnimatedImageView *animatedImageView = [[YYAnimatedImageView alloc] init];
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
animatedImageView.contentMode = UIViewContentModeScaleAspectFill;
animatedImageView.backgroundColor = [UIColor whiteColor];
[self addAttachmentUploadViewIfNecessary];
__weak OWSMessageBubbleView *weakSelf = self;
self.loadCellContentBlock = ^{
OWSMessageBubbleView *strongSelf = weakSelf;
if (!strongSelf) {
return;
}
OWSCAssertDebug(strongSelf.bodyMediaView == animatedImageView);
if (animatedImageView.image) {
return;
}
animatedImageView.image = [strongSelf
tryToLoadCellMedia:^{
OWSCAssertDebug([strongSelf.attachmentStream isAnimated]);
OWSCAssertDebug([strongSelf.attachmentStream isValidImage]);
NSString *_Nullable filePath = [strongSelf.attachmentStream originalFilePath];
YYImage *_Nullable animatedImage = nil;
if (strongSelf.attachmentStream.isValidImage && filePath) {
animatedImage = [YYImage imageWithContentsOfFile:filePath];
}
return animatedImage;
}
mediaView:animatedImageView
cacheKey:strongSelf.attachmentStream.uniqueId
canLoadAsync:NO];
};
self.unloadCellContentBlock = ^{
OWSMessageBubbleView *strongSelf = weakSelf;
if (!strongSelf) {
return;
}
OWSCAssertDebug(strongSelf.bodyMediaView == animatedImageView);
animatedImageView.image = nil;
};
return animatedImageView;
return mediaView;
}
- (UIView *)loadViewForAudio
@ -986,68 +847,6 @@ typedef _Nullable id (^LoadCellMediaBlock)(void);
return audioMessageView;
}
- (UIView *)loadViewForVideo
{
OWSAssertDebug(self.attachmentStream);
OWSAssertDebug([self.attachmentStream isVideo]);
UIImageView *stillImageView = [UIImageView new];
// We need to specify a contentMode since the size of the image
// might not match the aspect ratio of the view.
stillImageView.contentMode = UIViewContentModeScaleAspectFill;
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear;
stillImageView.layer.magnificationFilter = kCAFilterTrilinear;
UIImage *videoPlayIcon = [UIImage imageNamed:@"play_button"];
UIImageView *videoPlayButton = [[UIImageView alloc] initWithImage:videoPlayIcon];
[stillImageView addSubview:videoPlayButton];
[videoPlayButton autoCenterInSuperview];
[self addAttachmentUploadViewIfNecessaryWithAttachmentStateCallback:^(BOOL isAttachmentReady) {
videoPlayButton.hidden = !isAttachmentReady;
}];
__weak OWSMessageBubbleView *weakSelf = self;
__weak UIImageView *weakImageView = stillImageView;
self.loadCellContentBlock = ^{
OWSMessageBubbleView *strongSelf = weakSelf;
if (!strongSelf) {
return;
}
OWSCAssertDebug(strongSelf.bodyMediaView == stillImageView);
if (stillImageView.image) {
return;
}
stillImageView.image = [strongSelf
tryToLoadCellMedia:^{
OWSCAssertDebug([strongSelf.attachmentStream isVideo]);
OWSCAssertDebug([strongSelf.attachmentStream isValidVideo]);
return [strongSelf.attachmentStream
thumbnailImageMediumWithSuccess:^(UIImage *image) {
weakImageView.image = image;
}
failure:^{
OWSLogError(@"Could not load thumbnail.");
}];
}
mediaView:stillImageView
cacheKey:strongSelf.attachmentStream.uniqueId
canLoadAsync:YES];
};
self.unloadCellContentBlock = ^{
OWSMessageBubbleView *strongSelf = weakSelf;
if (!strongSelf) {
return;
}
OWSCAssertDebug(strongSelf.bodyMediaView == stillImageView);
stillImageView.image = nil;
};
return stillImageView;
}
- (UIView *)loadViewForGenericAttachment
{
OWSAssertDebug(self.viewItem.attachmentStream);