Fixed a few bugs and crashes around media interactions

Fixed a crash when trying to grant permission to access additional photos
Fixed a bug where audio files would incorrectly get recognised as voice messages
Replaced our custom video/audio players with the native ones (which have additional built-in controls)
Updated the errors from SSKKeychainStorage to include useful information
Updated layout for audio attachments
This commit is contained in:
Morgan Pretty 2023-10-12 16:03:00 +11:00
parent 6d57523ede
commit bd98db2612
21 changed files with 269 additions and 881 deletions

View File

@ -337,7 +337,6 @@
C38D5E8D2575011E00B6A65C /* MessageSender+ClosedGroups.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38D5E8C2575011E00B6A65C /* MessageSender+ClosedGroups.swift */; };
C38EF00C255B61CC007E1867 /* SignalUtilitiesKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C33FD9AB255A548A00E217F9 /* SignalUtilitiesKit.framework */; };
C38EF22B255B6D5D007E1867 /* ShareViewDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF226255B6D5D007E1867 /* ShareViewDelegate.swift */; };
C38EF22C255B6D5D007E1867 /* OWSVideoPlayer.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF227255B6D5D007E1867 /* OWSVideoPlayer.swift */; };
C38EF24D255B6D67007E1867 /* UIView+OWS.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF240255B6D67007E1867 /* UIView+OWS.swift */; };
C38EF24E255B6D67007E1867 /* Collection+OWS.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF241255B6D67007E1867 /* Collection+OWS.swift */; };
C38EF2B3255B6D9C007E1867 /* UIViewController+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF2B1255B6D9C007E1867 /* UIViewController+Utilities.swift */; };
@ -379,7 +378,6 @@
C38EF3FB255B6DF7007E1867 /* UIAlertController+OWS.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF3DD255B6DF1007E1867 /* UIAlertController+OWS.swift */; };
C38EF3FF255B6DF7007E1867 /* TappableView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF3E1255B6DF3007E1867 /* TappableView.swift */; };
C38EF400255B6DF7007E1867 /* GalleryRailView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF3E2255B6DF3007E1867 /* GalleryRailView.swift */; };
C38EF401255B6DF7007E1867 /* VideoPlayerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF3E3255B6DF4007E1867 /* VideoPlayerView.swift */; };
C38EF402255B6DF7007E1867 /* CommonStrings.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF3E4255B6DF4007E1867 /* CommonStrings.swift */; };
C38EF405255B6DF7007E1867 /* OWSButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF3E7255B6DF5007E1867 /* OWSButton.swift */; };
C38EF407255B6DF7007E1867 /* Toast.swift in Sources */ = {isa = PBXBuildFile; fileRef = C38EF3E9255B6DF6007E1867 /* Toast.swift */; };
@ -753,6 +751,7 @@
FDA8EB10280F8238002B68E5 /* Codable+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FDA8EB0F280F8238002B68E5 /* Codable+Utilities.swift */; };
FDB4BBC72838B91E00B7C95D /* LinkPreviewError.swift in Sources */ = {isa = PBXBuildFile; fileRef = FDB4BBC62838B91E00B7C95D /* LinkPreviewError.swift */; };
FDB4BBC92839BEF000B7C95D /* ProfileManagerError.swift in Sources */ = {isa = PBXBuildFile; fileRef = FDB4BBC82839BEF000B7C95D /* ProfileManagerError.swift */; };
FDB6A87C2AD75B7F002D4F96 /* PhotosUI.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = FDB6A87B2AD75B7F002D4F96 /* PhotosUI.framework */; };
FDB7400B28EB99A70094D718 /* TimeInterval+Utilities.swift in Sources */ = {isa = PBXBuildFile; fileRef = FDB7400A28EB99A70094D718 /* TimeInterval+Utilities.swift */; };
FDB7400D28EBEC240094D718 /* DateHeaderCell.swift in Sources */ = {isa = PBXBuildFile; fileRef = FDB7400C28EBEC240094D718 /* DateHeaderCell.swift */; };
FDBB25E32988B13800F1508E /* _004_AddJobPriority.swift in Sources */ = {isa = PBXBuildFile; fileRef = FDBB25E22988B13800F1508E /* _004_AddJobPriority.swift */; };
@ -1474,7 +1473,6 @@
C38EEF09255B49A8007E1867 /* SNProtoEnvelope+Conversion.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "SNProtoEnvelope+Conversion.swift"; sourceTree = "<group>"; };
C38EF224255B6D5D007E1867 /* SignalAttachment.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = SignalAttachment.swift; path = "SessionMessagingKit/Sending & Receiving/Attachments/SignalAttachment.swift"; sourceTree = SOURCE_ROOT; };
C38EF226255B6D5D007E1867 /* ShareViewDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShareViewDelegate.swift; path = SignalUtilitiesKit/Utilities/ShareViewDelegate.swift; sourceTree = SOURCE_ROOT; };
C38EF227255B6D5D007E1867 /* OWSVideoPlayer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = OWSVideoPlayer.swift; path = "SignalUtilitiesKit/Media Viewing & Editing/OWSVideoPlayer.swift"; sourceTree = SOURCE_ROOT; };
C38EF237255B6D65007E1867 /* UIDevice+featureSupport.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "UIDevice+featureSupport.swift"; path = "SessionUtilitiesKit/General/UIDevice+featureSupport.swift"; sourceTree = SOURCE_ROOT; };
C38EF23D255B6D66007E1867 /* UIView+OWS.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "UIView+OWS.h"; path = "SessionUtilitiesKit/General/UIView+OWS.h"; sourceTree = SOURCE_ROOT; };
C38EF23E255B6D66007E1867 /* UIView+OWS.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "UIView+OWS.m"; path = "SessionUtilitiesKit/General/UIView+OWS.m"; sourceTree = SOURCE_ROOT; };
@ -1529,7 +1527,6 @@
C38EF3DD255B6DF1007E1867 /* UIAlertController+OWS.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = "UIAlertController+OWS.swift"; path = "SignalUtilitiesKit/Utilities/UIAlertController+OWS.swift"; sourceTree = SOURCE_ROOT; };
C38EF3E1255B6DF3007E1867 /* TappableView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = TappableView.swift; path = "SignalUtilitiesKit/Shared Views/TappableView.swift"; sourceTree = SOURCE_ROOT; };
C38EF3E2255B6DF3007E1867 /* GalleryRailView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = GalleryRailView.swift; path = "SignalUtilitiesKit/Shared Views/GalleryRailView.swift"; sourceTree = SOURCE_ROOT; };
C38EF3E3255B6DF4007E1867 /* VideoPlayerView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = VideoPlayerView.swift; path = "SignalUtilitiesKit/Media Viewing & Editing/VideoPlayerView.swift"; sourceTree = SOURCE_ROOT; };
C38EF3E4255B6DF4007E1867 /* CommonStrings.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = CommonStrings.swift; path = SignalUtilitiesKit/Utilities/CommonStrings.swift; sourceTree = SOURCE_ROOT; };
C38EF3E7255B6DF5007E1867 /* OWSButton.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = OWSButton.swift; path = "SignalUtilitiesKit/Shared Views/OWSButton.swift"; sourceTree = SOURCE_ROOT; };
C38EF3E9255B6DF6007E1867 /* Toast.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Toast.swift; path = "SignalUtilitiesKit/Shared Views/Toast.swift"; sourceTree = SOURCE_ROOT; };
@ -1897,6 +1894,7 @@
FDA8EB0F280F8238002B68E5 /* Codable+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Codable+Utilities.swift"; sourceTree = "<group>"; };
FDB4BBC62838B91E00B7C95D /* LinkPreviewError.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LinkPreviewError.swift; sourceTree = "<group>"; };
FDB4BBC82839BEF000B7C95D /* ProfileManagerError.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileManagerError.swift; sourceTree = "<group>"; };
FDB6A87B2AD75B7F002D4F96 /* PhotosUI.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = PhotosUI.framework; path = System/Library/Frameworks/PhotosUI.framework; sourceTree = SDKROOT; };
FDB7400A28EB99A70094D718 /* TimeInterval+Utilities.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "TimeInterval+Utilities.swift"; sourceTree = "<group>"; };
FDB7400C28EBEC240094D718 /* DateHeaderCell.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DateHeaderCell.swift; sourceTree = "<group>"; };
FDBB25E22988B13800F1508E /* _004_AddJobPriority.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = _004_AddJobPriority.swift; sourceTree = "<group>"; };
@ -2154,6 +2152,7 @@
files = (
B8FF8DAE25C0D00F004D1F22 /* SessionMessagingKit.framework in Frameworks */,
B8FF8DAF25C0D00F004D1F22 /* SessionUtilitiesKit.framework in Frameworks */,
FDB6A87C2AD75B7F002D4F96 /* PhotosUI.framework in Frameworks */,
C37F54DC255BB84A002AEA92 /* SessionSnodeKit.framework in Frameworks */,
C37F5414255BAFA7002AEA92 /* SignalUtilitiesKit.framework in Frameworks */,
455A16DD1F1FEA0000F86704 /* Metal.framework in Frameworks */,
@ -3129,9 +3128,7 @@
C379DCEA2567334F0002D4EB /* Attachment Approval */,
C379DCE9256733390002D4EB /* Image Editing */,
C38EF358255B6DCC007E1867 /* MediaMessageView.swift */,
C38EF227255B6D5D007E1867 /* OWSVideoPlayer.swift */,
C38EF3B5255B6DE6007E1867 /* OWSViewController+ImageEditor.swift */,
C38EF3E3255B6DF4007E1867 /* VideoPlayerView.swift */,
);
path = "Media Viewing & Editing";
sourceTree = "<group>";
@ -3503,6 +3500,7 @@
D221A08C169C9E5E00537ABF /* Frameworks */ = {
isa = PBXGroup;
children = (
FDB6A87B2AD75B7F002D4F96 /* PhotosUI.framework */,
3496955F21A2FC8100DCFE74 /* CloudKit.framework */,
455A16DB1F1FEA0000F86704 /* Metal.framework */,
455A16DC1F1FEA0000F86704 /* MetalKit.framework */,
@ -5665,7 +5663,6 @@
C3F0A530255C80BC007BE2A3 /* NoopNotificationsManager.swift in Sources */,
C33FDD8D255A582000E217F9 /* OWSSignalAddress.swift in Sources */,
C38EF388255B6DD2007E1867 /* AttachmentApprovalViewController.swift in Sources */,
C38EF22C255B6D5D007E1867 /* OWSVideoPlayer.swift in Sources */,
C33FDC29255A581F00E217F9 /* ReachabilityManager.swift in Sources */,
C38EF407255B6DF7007E1867 /* Toast.swift in Sources */,
C38EF38C255B6DD2007E1867 /* ApprovalRailCellView.swift in Sources */,
@ -5690,7 +5687,6 @@
C38EF3BB255B6DE7007E1867 /* ImageEditorStrokeItem.swift in Sources */,
C38EF3C0255B6DE7007E1867 /* ImageEditorCropViewController.swift in Sources */,
FD52090B28B59BB4006098F6 /* ScreenLockViewController.swift in Sources */,
C38EF401255B6DF7007E1867 /* VideoPlayerView.swift in Sources */,
C38EF3BD255B6DE7007E1867 /* ImageEditorTransform.swift in Sources */,
C33FDC58255A582000E217F9 /* ReverseDispatchQueue.swift in Sources */,
C38EF324255B6DBF007E1867 /* Bench.swift in Sources */,

View File

@ -1,6 +1,8 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import UIKit
import AVKit
import AVFoundation
import Combine
import CoreServices
import Photos
@ -895,7 +897,7 @@ extension ConversationVC:
}
switch cellViewModel.cellType {
case .audio: viewModel.playOrPauseAudio(for: cellViewModel)
case .voiceMessage: viewModel.playOrPauseAudio(for: cellViewModel)
case .mediaMessage:
guard
@ -945,6 +947,18 @@ extension ConversationVC:
// Ignore invalid media
guard mediaView.attachment.isValid else { return }
guard albumView.numItems > 1 || !mediaView.attachment.isVideo else {
guard
let originalFilePath: String = mediaView.attachment.originalFilePath,
FileManager.default.fileExists(atPath: originalFilePath)
else { return SNLog("Missing video file") }
let viewController: AVPlayerViewController = AVPlayerViewController()
viewController.player = AVPlayer(url: URL(fileURLWithPath: originalFilePath))
self.navigationController?.present(viewController, animated: true)
return
}
let viewController: UIViewController? = MediaGalleryViewModel.createDetailViewController(
for: self.viewModel.threadData.threadId,
threadVariant: self.viewModel.threadData.threadVariant,
@ -975,6 +989,17 @@ extension ConversationVC:
}
}
case .audio:
guard
let attachment: Attachment = cellViewModel.attachments?.first,
let originalFilePath: String = attachment.originalFilePath
else { return }
// Use the native player to play audio files
let viewController: AVPlayerViewController = AVPlayerViewController()
viewController.player = AVPlayer(url: URL(fileURLWithPath: originalFilePath))
self.navigationController?.present(viewController, animated: true)
case .genericAttachment:
guard
let attachment: Attachment = cellViewModel.attachments?.first,
@ -1038,7 +1063,7 @@ extension ConversationVC:
func handleItemDoubleTapped(_ cellViewModel: MessageViewModel) {
switch cellViewModel.cellType {
// The user can double tap a voice message when it's playing to speed it up
case .audio: self.viewModel.speedUpAudio(for: cellViewModel)
case .voiceMessage: self.viewModel.speedUpAudio(for: cellViewModel)
default: break
}
}
@ -1777,7 +1802,7 @@ extension ConversationVC:
UIPasteboard.general.string = cellViewModel.body
case .audio, .genericAttachment, .mediaMessage:
case .audio, .voiceMessage, .genericAttachment, .mediaMessage:
guard
cellViewModel.attachments?.count == 1,
let attachment: Attachment = cellViewModel.attachments?.first,

View File

@ -1048,7 +1048,7 @@ public class ConversationViewModel: OWSAudioPlayerDelegate {
let currentIndex: Int = messageSection.elements
.firstIndex(where: { $0.id == interactionId }),
currentIndex < (messageSection.elements.count - 1),
messageSection.elements[currentIndex + 1].cellType == .audio,
messageSection.elements[currentIndex + 1].cellType == .voiceMessage,
Storage.shared[.shouldAutoPlayConsecutiveAudioMessages] == true
else { return }

View File

@ -33,20 +33,35 @@ final class DocumentView: UIView {
)
imageView.setContentCompressionResistancePriority(.required, for: .horizontal)
imageView.setContentHuggingPriority(.required, for: .horizontal)
imageView.contentMode = .scaleAspectFit
imageView.themeTintColor = textColor
imageView.set(.height, to: 22)
imageView.set(.width, to: 24)
imageView.set(.height, to: 32)
if attachment.isAudio {
let audioImageView = UIImageView(
image: UIImage(systemName: "music.note")?
.withRenderingMode(.alwaysTemplate)
)
audioImageView.contentMode = .scaleAspectFit
audioImageView.themeTintColor = textColor
imageView.addSubview(audioImageView)
audioImageView.center(.horizontal, in: imageView)
audioImageView.center(.vertical, in: imageView, withInset: 4)
audioImageView.set(.height, to: .height, of: imageView, multiplier: 0.32)
}
// Body label
let titleLabel = UILabel()
titleLabel.font = .systemFont(ofSize: Values.mediumFontSize)
titleLabel.text = (attachment.sourceFilename ?? "File")
titleLabel.text = attachment.documentFileName
titleLabel.themeTextColor = textColor
titleLabel.lineBreakMode = .byTruncatingTail
// Size label
let sizeLabel = UILabel()
sizeLabel.font = .systemFont(ofSize: Values.verySmallFontSize)
sizeLabel.text = Format.fileSize(attachment.byteCount)
sizeLabel.text = attachment.documentFileInfo
sizeLabel.themeTextColor = textColor
sizeLabel.lineBreakMode = .byTruncatingTail
@ -55,14 +70,19 @@ final class DocumentView: UIView {
labelStackView.axis = .vertical
// Download image view
let downloadImageView = UIImageView(
image: UIImage(systemName: "arrow.down")?
.withRenderingMode(.alwaysTemplate)
let rightImageView = UIImageView(
image: {
switch attachment.isAudio {
case true: return UIImage(systemName: "play.fill")
case false: return UIImage(systemName: "arrow.down")
}
}()?.withRenderingMode(.alwaysTemplate)
)
downloadImageView.setContentCompressionResistancePriority(.required, for: .horizontal)
downloadImageView.setContentHuggingPriority(.required, for: .horizontal)
downloadImageView.themeTintColor = textColor
downloadImageView.set(.height, to: 16)
rightImageView.setContentCompressionResistancePriority(.required, for: .horizontal)
rightImageView.setContentHuggingPriority(.required, for: .horizontal)
rightImageView.contentMode = .scaleAspectFit
rightImageView.themeTintColor = textColor
rightImageView.set(.height, to: 24)
// Stack view
let stackView = UIStackView(
@ -70,7 +90,7 @@ final class DocumentView: UIView {
imageView,
UIView.spacer(withWidth: 0),
labelStackView,
downloadImageView
rightImageView
]
)
stackView.axis = .horizontal

View File

@ -9,6 +9,8 @@ public class MediaAlbumView: UIStackView {
private let items: [Attachment]
public let itemViews: [MediaView]
public var moreItemsView: MediaView?
public var numItems: Int { return items.count }
public var numVisibleItems: Int { return itemViews.count }
private static let kSpacingPts: CGFloat = 4
private static let kMaxItems = 3
@ -24,13 +26,22 @@ public class MediaAlbumView: UIStackView {
isOutgoing: Bool,
maxMessageWidth: CGFloat
) {
let itemsToDisplay: [Attachment] = MediaAlbumView.itemsToDisplay(forItems: items)
self.items = items
self.itemViews = MediaAlbumView.itemsToDisplay(forItems: items)
.map {
self.itemViews = itemsToDisplay.enumerated()
.map { index, attachment -> MediaView in
MediaView(
mediaCache: mediaCache,
attachment: $0,
attachment: attachment,
isOutgoing: isOutgoing,
shouldSupressControls: (
// If there are extra items that aren't displayed and this is the
// last one that will be displayed then suppress any custom controls
// otherwise the '+' icon will be obscured
itemsToDisplay.count != items.count &&
(index == (itemsToDisplay.count - 1))
),
cornerRadius: VisibleMessageCell.largeCornerRadius
)
}

View File

@ -22,6 +22,7 @@ public class MediaView: UIView {
private let mediaCache: NSCache<NSString, AnyObject>?
public let attachment: Attachment
private let isOutgoing: Bool
private let shouldSupressControls: Bool
private var loadBlock: (() -> Void)?
private var unloadBlock: (() -> Void)?
@ -51,11 +52,13 @@ public class MediaView: UIView {
mediaCache: NSCache<NSString, AnyObject>? = nil,
attachment: Attachment,
isOutgoing: Bool,
shouldSupressControls: Bool,
cornerRadius: CGFloat
) {
self.mediaCache = mediaCache
self.attachment = attachment
self.isOutgoing = isOutgoing
self.shouldSupressControls = shouldSupressControls
super.init(frame: .zero)
@ -275,7 +278,29 @@ public class MediaView: UIView {
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
if !addUploadProgressIfNecessary(stillImageView) {
if !addUploadProgressIfNecessary(stillImageView) && !shouldSupressControls {
if let duration: TimeInterval = attachment.duration {
let fadeView: GradientView = GradientView()
fadeView.themeBackgroundGradient = [
.value(.black, alpha: 0),
.value(.black, alpha: 0.4)
]
stillImageView.addSubview(fadeView)
fadeView.set(.height, to: 40)
fadeView.pin(.leading, to: .leading, of: stillImageView)
fadeView.pin(.trailing, to: .trailing, of: stillImageView)
fadeView.pin(.bottom, to: .bottom, of: stillImageView)
let durationLabel: UILabel = UILabel()
durationLabel.font = .systemFont(ofSize: Values.smallFontSize)
durationLabel.text = Format.duration(duration)
durationLabel.themeTextColor = .white
stillImageView.addSubview(durationLabel)
durationLabel.pin(.trailing, to: .trailing, of: stillImageView, withInset: -Values.smallSpacing)
durationLabel.pin(.bottom, to: .bottom, of: stillImageView, withInset: -Values.smallSpacing)
}
// Add the play button above the duration label and fade
let videoPlayIcon = UIImage(named: "CirclePlay")
let videoPlayButton = UIImageView(image: videoPlayIcon)
videoPlayButton.set(.width, to: 72)

View File

@ -611,7 +611,7 @@ final class VisibleMessageCell: MessageCell, TappableLabelDelegate {
unloadContent = { albumView.unloadMedia() }
case .audio:
case .voiceMessage:
guard let attachment: Attachment = cellViewModel.attachments?.first(where: { $0.isAudio }) else {
return
}
@ -630,7 +630,7 @@ final class VisibleMessageCell: MessageCell, TappableLabelDelegate {
snContentView.addArrangedSubview(bubbleBackgroundView)
self.voiceMessageView = voiceMessageView
case .genericAttachment:
case .audio, .genericAttachment:
guard let attachment: Attachment = cellViewModel.attachments?.first else { preconditionFailure() }
let inset: CGFloat = 12
@ -741,7 +741,7 @@ final class VisibleMessageCell: MessageCell, TappableLabelDelegate {
}
switch cellViewModel.cellType {
case .audio:
case .voiceMessage:
guard let attachment: Attachment = cellViewModel.attachments?.first(where: { $0.isAudio }) else {
return
}

View File

@ -376,10 +376,17 @@ class DocumentCell: UITableViewCell {
// MARK: - UI
private static let iconImageViewSize: CGSize = CGSize(width: 31, height: 40)
private let iconImageView: UIImageView = {
let result: UIImageView = UIImageView(image: #imageLiteral(resourceName: "File").withRenderingMode(.alwaysTemplate))
let result: UIImageView = UIImageView(image: UIImage(systemName: "doc")?.withRenderingMode(.alwaysTemplate))
result.translatesAutoresizingMaskIntoConstraints = false
result.themeTintColor = .textPrimary
result.contentMode = .scaleAspectFit
return result
}()
private let audioImageView: UIImageView = {
let result = UIImageView(image: UIImage(systemName: "music.note")?.withRenderingMode(.alwaysTemplate))
result.translatesAutoresizingMaskIntoConstraints = false
result.themeTintColor = .textPrimary
result.contentMode = .scaleAspectFit
@ -439,6 +446,8 @@ class DocumentCell: UITableViewCell {
contentView.addSubview(titleLabel)
contentView.addSubview(timeLabel)
contentView.addSubview(detailLabel)
iconImageView.addSubview(audioImageView)
}
// MARK: - Layout
@ -458,6 +467,8 @@ class DocumentCell: UITableViewCell {
lessThanOrEqualTo: contentView.bottomAnchor,
constant: -(Values.verySmallSpacing + Values.verySmallSpacing)
),
iconImageView.widthAnchor.constraint(equalToConstant: 36),
iconImageView.heightAnchor.constraint(equalToConstant: 46),
titleLabel.topAnchor.constraint(
equalTo: contentView.topAnchor,
@ -485,6 +496,10 @@ class DocumentCell: UITableViewCell {
lessThanOrEqualTo: contentView.bottomAnchor,
constant: -(Values.verySmallSpacing + Values.smallSpacing)
),
audioImageView.centerXAnchor.constraint(equalTo: iconImageView.centerXAnchor),
audioImageView.centerYAnchor.constraint(equalTo: iconImageView.centerYAnchor, constant: 7),
audioImageView.heightAnchor.constraint(equalTo: iconImageView.heightAnchor, multiplier: 0.32)
])
}
@ -504,11 +519,12 @@ class DocumentCell: UITableViewCell {
func update(with item: MediaGalleryViewModel.Item) {
let attachment = item.attachment
titleLabel.text = (attachment.sourceFilename ?? "File")
detailLabel.text = "\(Format.fileSize(attachment.byteCount)))"
titleLabel.text = attachment.documentFileName
detailLabel.text = attachment.documentFileInfo
timeLabel.text = Date(
timeIntervalSince1970: TimeInterval(item.interactionTimestampMs / 1000)
).formattedForDisplay
audioImageView.isHidden = !attachment.isAudio
}
}

View File

@ -3,6 +3,7 @@
import Foundation
import Combine
import Photos
import PhotosUI
import SessionUIKit
import SignalUtilitiesKit
import SignalCoreKit

View File

@ -1,6 +1,8 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import UIKit
import AVKit
import AVFoundation
import YYImage
import SessionUIKit
import SignalUtilitiesKit
@ -13,7 +15,7 @@ public enum MediaGalleryOption {
case showAllMediaButton
}
class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVideoPlayerDelegate, PlayerProgressBarDelegate {
class MediaDetailViewController: OWSViewController, UIScrollViewDelegate {
public let galleryItem: MediaGalleryViewModel.Item
public weak var delegate: MediaDetailViewControllerDelegate?
private var image: UIImage?
@ -37,9 +39,19 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
}()
public var mediaView: UIView = UIView()
private var playVideoButton: UIButton = UIButton()
private var videoProgressBar: PlayerProgressBar = PlayerProgressBar()
private var videoPlayer: OWSVideoPlayer?
private lazy var playVideoButton: UIButton = {
let result: UIButton = UIButton()
result.contentMode = .scaleAspectFill
result.setBackgroundImage(UIImage(named: "CirclePlay"), for: .normal)
result.addTarget(self, action: #selector(playVideo), for: .touchUpInside)
result.alpha = 0
let playButtonSize: CGFloat = ScaleFromIPhone5(70)
result.set(.width, to: playButtonSize)
result.set(.height, to: playButtonSize)
return result
}()
// MARK: - Initialization
@ -86,10 +98,6 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
fatalError("init(coder:) has not been implemented")
}
deinit {
self.stopAnyVideo()
}
// MARK: - Lifecycle
override func viewDidLoad() {
@ -98,7 +106,10 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
self.view.themeBackgroundColor = .newConversation_background
self.view.addSubview(scrollView)
self.view.addSubview(playVideoButton)
scrollView.pin(to: self.view)
playVideoButton.center(in: self.view)
self.updateContents()
}
@ -112,12 +123,18 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
if self.parent == nil || !(self.parent is MediaPageViewController) {
parentDidAppear()
}
}
public func parentDidAppear() {
if mediaView is YYAnimatedImageView {
// Add a slight delay before starting the gif animation to prevent it from looking
// buggy due to the custom transition
DispatchQueue.main.asyncAfter(deadline: .now() + .milliseconds(250)) { [weak self] in
(self?.mediaView as? YYAnimatedImageView)?.startAnimating()
}
(mediaView as? YYAnimatedImageView)?.startAnimating()
}
if self.galleryItem.attachment.isVideo {
UIView.animate(withDuration: 0.2) { self.playVideoButton.alpha = 1 }
}
}
@ -128,6 +145,12 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
self.centerMediaViewConstraints()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
UIView.animate(withDuration: 0.15) { [weak playVideoButton] in playVideoButton?.alpha = 0 }
}
// MARK: - Functions
private func updateMinZoomScale() {
@ -174,8 +197,6 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
private func updateContents() {
self.mediaView.removeFromSuperview()
self.playVideoButton.removeFromSuperview()
self.videoProgressBar.removeFromSuperview()
self.scrollView.zoomScale = 1
if self.galleryItem.attachment.isAnimated {
@ -195,15 +216,6 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
self.mediaView = UIView()
self.mediaView.themeBackgroundColor = .newConversation_background
}
else if self.galleryItem.attachment.isVideo {
if self.galleryItem.attachment.isValid {
self.mediaView = self.buildVideoPlayerView()
}
else {
self.mediaView = UIView()
self.mediaView.themeBackgroundColor = .newConversation_background
}
}
else {
// Present the static image using standard UIImageView
self.mediaView = UIImageView(image: self.image)
@ -230,61 +242,6 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
// some performance cost.
self.mediaView.layer.minificationFilter = .trilinear
self.mediaView.layer.magnificationFilter = .trilinear
if self.galleryItem.attachment.isVideo {
self.videoProgressBar = PlayerProgressBar()
self.videoProgressBar.delegate = self
self.videoProgressBar.player = self.videoPlayer?.avPlayer
// We hide the progress bar until either:
// 1. Video completes playing
// 2. User taps the screen
self.videoProgressBar.isHidden = false
self.view.addSubview(self.videoProgressBar)
self.videoProgressBar.autoPinWidthToSuperview()
self.videoProgressBar.autoPinEdge(toSuperviewSafeArea: .top)
self.videoProgressBar.autoSetDimension(.height, toSize: 44)
self.playVideoButton = UIButton()
self.playVideoButton.contentMode = .scaleAspectFill
self.playVideoButton.setBackgroundImage(UIImage(named: "CirclePlay"), for: .normal)
self.playVideoButton.addTarget(self, action: #selector(playVideo), for: .touchUpInside)
self.view.addSubview(self.playVideoButton)
self.playVideoButton.set(.width, to: 72)
self.playVideoButton.set(.height, to: 72)
self.playVideoButton.center(in: self.view)
}
}
private func buildVideoPlayerView() -> UIView {
guard
let originalFilePath: String = self.galleryItem.attachment.originalFilePath,
FileManager.default.fileExists(atPath: originalFilePath)
else {
owsFailDebug("Missing video file")
return UIView()
}
self.videoPlayer = OWSVideoPlayer(url: URL(fileURLWithPath: originalFilePath))
self.videoPlayer?.seek(to: .zero)
self.videoPlayer?.delegate = self
let imageSize: CGSize = (self.image?.size ?? .zero)
let playerView: VideoPlayerView = VideoPlayerView()
playerView.player = self.videoPlayer?.avPlayer
NSLayoutConstraint.autoSetPriority(.defaultLow) {
playerView.autoSetDimensions(to: imageSize)
}
return playerView
}
public func setShouldHideToolbars(_ shouldHideToolbars: Bool) {
self.videoProgressBar.isHidden = shouldHideToolbars
}
private func addGestureRecognizers(to view: UIView) {
@ -330,14 +287,10 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
self.scrollView.zoom(to: translatedRect, animated: true)
}
@objc public func didPressPlayBarButton() {
public func didPressPlayBarButton() {
self.playVideo()
}
@objc public func didPressPauseBarButton() {
self.pauseVideo()
}
// MARK: - UIScrollViewDelegate
func viewForZooming(in scrollView: UIScrollView) -> UIView? {
@ -391,49 +344,17 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
// MARK: - Video Playback
@objc public func playVideo() {
self.playVideoButton.isHidden = true
self.videoPlayer?.play()
self.delegate?.mediaDetailViewController(self, isPlayingVideo: true)
}
private func pauseVideo() {
self.videoPlayer?.pause()
self.delegate?.mediaDetailViewController(self, isPlayingVideo: false)
}
public func stopAnyVideo() {
guard self.galleryItem.attachment.isVideo else { return }
guard
let originalFilePath: String = self.galleryItem.attachment.originalFilePath,
FileManager.default.fileExists(atPath: originalFilePath)
else { return SNLog("Missing video file") }
self.stopVideo()
}
private func stopVideo() {
self.videoPlayer?.stop()
self.playVideoButton.isHidden = false
self.delegate?.mediaDetailViewController(self, isPlayingVideo: false)
}
// MARK: - OWSVideoPlayerDelegate
func videoPlayerDidPlayToCompletion(_ videoPlayer: OWSVideoPlayer) {
self.stopVideo()
}
// MARK: - PlayerProgressBarDelegate
func playerProgressBarDidStartScrubbing(_ playerProgressBar: PlayerProgressBar) {
self.videoPlayer?.pause()
}
func playerProgressBar(_ playerProgressBar: PlayerProgressBar, scrubbedToTime time: CMTime) {
self.videoPlayer?.seek(to: time)
}
func playerProgressBar(_ playerProgressBar: PlayerProgressBar, didFinishScrubbingAtTime time: CMTime, shouldResumePlayback: Bool) {
self.videoPlayer?.seek(to: time)
if shouldResumePlayback {
self.videoPlayer?.play()
let videoUrl: URL = URL(fileURLWithPath: originalFilePath)
let player: AVPlayer = AVPlayer(url: videoUrl)
let viewController: AVPlayerViewController = AVPlayerViewController()
viewController.player = player
self.present(viewController, animated: true) { [weak player] in
player?.play()
}
}
}
@ -441,6 +362,5 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
// MARK: - MediaDetailViewControllerDelegate
protocol MediaDetailViewControllerDelegate: AnyObject {
func mediaDetailViewController(_ mediaDetailViewController: MediaDetailViewController, isPlayingVideo: Bool)
func mediaDetailViewControllerDidTapMedia(_ mediaDetailViewController: MediaDetailViewController)
}

View File

@ -18,6 +18,7 @@ extension MediaInfoVC {
let result: MediaView = MediaView.init(
attachment: attachment,
isOutgoing: isOutgoing,
shouldSupressControls: false,
cornerRadius: 0
)

View File

@ -51,7 +51,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
updateTitle(item: item)
updateCaption(item: item)
setViewControllers([galleryPage], direction: direction, animated: isAnimated)
updateFooterBarButtonItems(isPlayingVideo: false)
updateFooterBarButtonItems()
updateMediaRail(item: item)
}
@ -204,7 +204,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
updateTitle(item: currentItem)
updateCaption(item: currentItem)
updateMediaRail(item: currentItem)
updateFooterBarButtonItems(isPlayingVideo: false)
updateFooterBarButtonItems()
// Gestures
@ -237,6 +237,15 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
hasAppeared = true
becomeFirstResponder()
children.forEach { child in
switch child {
case let detailViewController as MediaDetailViewController:
detailViewController.parentDidAppear()
default: break
}
}
}
public override func viewWillDisappear(_ animated: Bool) {
@ -291,7 +300,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
// MARK: View Helpers
public func willBePresentedAgain() {
updateFooterBarButtonItems(isPlayingVideo: false)
updateFooterBarButtonItems()
}
public func wasPresented() {
@ -309,7 +318,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
self.navigationController?.setNavigationBarHidden(shouldHideToolbars, animated: false)
UIView.animate(withDuration: 0.1) {
self.currentViewController.setShouldHideToolbars(self.shouldHideToolbars)
self.bottomContainer.isHidden = self.shouldHideToolbars
}
}
@ -354,24 +362,12 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
return videoPlayBarButton
}()
lazy var videoPauseBarButton: UIBarButtonItem = {
let videoPauseBarButton = UIBarButtonItem(
barButtonSystemItem: .pause,
target: self,
action: #selector(didPressPauseBarButton)
)
videoPauseBarButton.themeTintColor = .textPrimary
return videoPauseBarButton
}()
private func updateFooterBarButtonItems(isPlayingVideo: Bool) {
private func updateFooterBarButtonItems() {
self.footerBar.setItems(
[
shareBarButton,
buildFlexibleSpace(),
(self.currentItem.isVideo && isPlayingVideo ? self.videoPauseBarButton : nil),
(self.currentItem.isVideo && !isPlayingVideo ? self.videoPlayBarButton : nil),
(self.currentItem.isVideo ? self.videoPlayBarButton : nil),
(self.currentItem.isVideo ? buildFlexibleSpace() : nil),
deleteBarButton
].compactMap { $0 },
@ -465,8 +461,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
// MARK: - Actions
@objc public func didPressAllMediaButton(sender: Any) {
currentViewController.stopAnyVideo()
// If the screen wasn't presented or it was presented from a location which isn't the
// MediaTileViewController then just pop/dismiss the screen
let parentNavController: UINavigationController? = {
@ -622,15 +616,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
currentViewController.didPressPlayBarButton()
}
@objc public func didPressPauseBarButton() {
guard let currentViewController = self.viewControllers?.first as? MediaDetailViewController else {
SNLog("currentViewController was unexpectedly nil")
return
}
currentViewController.didPressPauseBarButton()
}
// MARK: UIPageViewControllerDelegate
var pendingViewController: MediaDetailViewController?
@ -650,9 +635,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
} else {
self.captionContainerView.pendingText = nil
}
// Ensure upcoming page respects current toolbar status
pendingViewController.setShouldHideToolbars(self.shouldHideToolbars)
}
}
@ -679,8 +661,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
updateTitle(item: currentItem)
updateMediaRail(item: currentItem)
previousPage.zoomOut(animated: false)
previousPage.stopAnyVideo()
updateFooterBarButtonItems(isPlayingVideo: false)
updateFooterBarButtonItems()
} else {
captionContainerView.pendingText = nil
}
@ -801,7 +782,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
// Swapping mediaView for presentationView will be perceptible if we're not zoomed out all the way.
// currentVC
currentViewController.zoomOut(animated: true)
currentViewController.stopAnyVideo()
self.navigationController?.view.isUserInteractionEnabled = false
self.navigationController?.dismiss(animated: true, completion: { [weak self] in
@ -823,16 +803,6 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
self.shouldHideToolbars = !self.shouldHideToolbars
}
public func mediaDetailViewController(_ mediaDetailViewController: MediaDetailViewController, isPlayingVideo: Bool) {
guard mediaDetailViewController == currentViewController else {
Logger.verbose("ignoring stale delegate.")
return
}
self.shouldHideToolbars = isPlayingVideo
self.updateFooterBarButtonItems(isPlayingVideo: isPlayingVideo)
}
// MARK: - Dynamic Header
private lazy var dateFormatter: DateFormatter = {

View File

@ -8,6 +8,7 @@ import GRDB
import SignalCoreKit
import SessionUtilitiesKit
import SessionSnodeKit
import SessionUIKit
public struct Attachment: Codable, Identifiable, Equatable, Hashable, FetchableRecord, PersistableRecord, TableRecord, ColumnExpressible {
public static var databaseTableName: String { "attachment" }
@ -788,6 +789,14 @@ extension Attachment {
public var isText: Bool { MIMETypeUtil.isText(contentType) }
public var isMicrosoftDoc: Bool { MIMETypeUtil.isMicrosoftDoc(contentType) }
public var documentFileName: String {
if let sourceFilename: String = sourceFilename { return sourceFilename }
if isImage { return "Image File" }
if isAudio { return "Audio File" }
if isVideo { return "Video File" }
return "File"
}
public var shortDescription: String {
if isImage { return "Image" }
if isAudio { return "Audio" }
@ -795,6 +804,15 @@ extension Attachment {
return "Document"
}
public var documentFileInfo: String {
switch duration {
case .some(let duration) where duration > 0:
return "\(Format.fileSize(byteCount)), \(Format.duration(duration))"
default: return Format.fileSize(byteCount)
}
}
public func readDataFromFile() throws -> Data? {
guard let filePath: String = self.originalFilePath else {
return nil

View File

@ -263,6 +263,25 @@ public class SignalAttachment: Equatable, Hashable {
return text
}
public func duration() -> TimeInterval? {
switch (isAudio, isVideo) {
case (true, _):
let audioPlayer: AVAudioPlayer? = try? AVAudioPlayer(data: dataSource.data())
return (audioPlayer?.duration).map { $0 > 0 ? $0 : nil }
case (_, true):
return dataUrl.map { url in
let asset: AVURLAsset = AVURLAsset(url: url, options: nil)
// According to the CMTime docs "value/timescale = seconds"
return (TimeInterval(asset.duration.value) / TimeInterval(asset.duration.timescale))
}
default: return nil
}
}
// Returns the MIME type for this attachment or nil if no MIME type
// can be identified.

View File

@ -78,6 +78,7 @@ public struct MessageViewModel: FetchableRecordWithRowId, Decodable, Equatable,
case textOnlyMessage
case mediaMessage
case audio
case voiceMessage
case genericAttachment
case typingIndicator
case dateHeader
@ -289,7 +290,7 @@ public struct MessageViewModel: FetchableRecordWithRowId, Decodable, Equatable,
)
)
{
return .audio
return (attachment.variant == .voiceMessage ? .voiceMessage : .audio)
}
if attachment.isVisualMedia {

View File

@ -11,6 +11,14 @@ public enum Format {
return result
}()
private static let durationFormatter: DateComponentsFormatter = {
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .positional
formatter.allowedUnits = [.minute, .second ]
formatter.zeroFormattingBehavior = [ .pad ]
return formatter
}()
private static let oneKilobyte: Double = 1024;
private static let oneMegabyte: Double = (oneKilobyte * oneKilobyte)
@ -29,4 +37,8 @@ public enum Format {
.appending("KB") ?? "n/a")
}
}
public static func duration(_ duration: TimeInterval) -> String {
return (Format.durationFormatter.string(from: duration) ?? "0:00")
}
}

View File

@ -47,7 +47,7 @@ public class SSKDefaultKeychainStorage: NSObject, SSKKeychainStorage {
var error: NSError?
let result = SAMKeychain.password(forService: service, account: key, error: &error)
if let error = error {
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error retrieving string: \(error)")
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error retrieving string: \(error.localizedDescription)")
}
guard let string = result else {
throw KeychainStorageError.failure(code: nil, description: "\(logTag) could not retrieve string")
@ -62,7 +62,7 @@ public class SSKDefaultKeychainStorage: NSObject, SSKKeychainStorage {
var error: NSError?
let result = SAMKeychain.setPassword(string, forService: service, account: key, error: &error)
if let error = error {
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error setting string: \(error)")
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error setting string: \(error.localizedDescription)")
}
guard result else {
throw KeychainStorageError.failure(code: nil, description: "\(logTag) could not set string")
@ -73,7 +73,7 @@ public class SSKDefaultKeychainStorage: NSObject, SSKKeychainStorage {
var error: NSError?
let result = SAMKeychain.passwordData(forService: service, account: key, error: &error)
if let error = error {
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error retrieving data: \(error)")
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error retrieving data: \(error.localizedDescription)")
}
guard let data = result else {
throw KeychainStorageError.failure(code: nil, description: "\(logTag) could not retrieve data")
@ -88,7 +88,7 @@ public class SSKDefaultKeychainStorage: NSObject, SSKKeychainStorage {
var error: NSError?
let result = SAMKeychain.setPasswordData(data, forService: service, account: key, error: &error)
if let error = error {
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error setting data: \(error)")
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error setting data: \(error.localizedDescription)")
}
guard result else {
throw KeychainStorageError.failure(code: nil, description: "\(logTag) could not set data")
@ -103,7 +103,7 @@ public class SSKDefaultKeychainStorage: NSObject, SSKKeychainStorage {
if error.code == errSecItemNotFound {
return
}
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error removing data: \(error)")
throw KeychainStorageError.failure(code: Int32(error.code), description: "\(logTag) error removing data: \(error.localizedDescription)")
}
guard result else {
throw KeychainStorageError.failure(code: nil, description: "\(logTag) could not remove data")

View File

@ -2,6 +2,7 @@
import Foundation
import UIKit
import AVKit
import AVFoundation
import SessionUIKit
import SignalCoreKit
@ -16,7 +17,7 @@ protocol AttachmentPrepViewControllerDelegate: AnyObject {
// MARK: -
public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarDelegate, OWSVideoPlayerDelegate, MediaMessageViewAudioDelegate {
public class AttachmentPrepViewController: OWSViewController {
// We sometimes shrink the attachment view so that it remains somewhat visible
// when the keyboard is presented.
public enum AttachmentViewScale {
@ -31,18 +32,6 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
var attachment: SignalAttachment {
return attachmentItem.attachment
}
private lazy var videoPlayer: OWSVideoPlayer? = {
guard let videoURL = attachment.dataUrl else {
owsFailDebug("Missing videoURL")
return nil
}
let player: OWSVideoPlayer = OWSVideoPlayer(url: videoURL)
player.delegate = self
return player
}()
// MARK: - UI
@ -75,7 +64,6 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
private lazy var mediaMessageView: MediaMessageView = {
let view: MediaMessageView = MediaMessageView(attachment: attachment, mode: .attachmentApproval)
view.translatesAutoresizingMaskIntoConstraints = false
view.audioDelegate = self
view.isHidden = (imageEditorView != nil)
return view
@ -92,29 +80,7 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
return view
}()
private lazy var videoPlayerView: VideoPlayerView? = {
guard let videoPlayer: OWSVideoPlayer = videoPlayer else { return nil }
let view: VideoPlayerView = VideoPlayerView()
view.translatesAutoresizingMaskIntoConstraints = false
view.player = videoPlayer.avPlayer
let pauseGesture = UITapGestureRecognizer(target: self, action: #selector(didTapPlayerView(_:)))
view.addGestureRecognizer(pauseGesture)
return view
}()
private lazy var progressBar: PlayerProgressBar = {
let progressBar: PlayerProgressBar = PlayerProgressBar()
progressBar.translatesAutoresizingMaskIntoConstraints = false
progressBar.player = videoPlayer?.avPlayer
progressBar.delegate = self
return progressBar
}()
private lazy var playVideoButton: UIButton = {
private lazy var playButton: UIButton = {
let button: UIButton = UIButton()
button.translatesAutoresizingMaskIntoConstraints = false
button.contentMode = .scaleAspectFit
@ -168,21 +134,8 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
imageEditorUpdateNavigationBar()
}
// Hide the play button embedded in the MediaView and replace it with our own.
// This allows us to zoom in on the media view without zooming in on the button
// TODO: This for both Audio and Video?
if attachment.isVideo, let playerView: VideoPlayerView = videoPlayerView {
mediaMessageView.videoPlayButton.isHidden = true
mediaMessageView.addSubview(playerView)
// We don't want the progress bar to zoom during "pinch-to-zoom"
// but we do want it to shrink with the media content when the user
// pops the keyboard.
contentContainerView.addSubview(progressBar)
contentContainerView.addSubview(playVideoButton)
}
else if attachment.isAudio, mediaMessageView.audioPlayer != nil {
contentContainerView.addSubview(progressBar)
if attachment.isVideo || attachment.isAudio {
contentContainerView.addSubview(playButton)
}
setupLayout()
@ -256,33 +209,17 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
])
}
if attachment.isVideo, let playerView: VideoPlayerView = videoPlayerView {
if attachment.isVideo || attachment.isAudio {
let playButtonSize: CGFloat = ScaleFromIPhone5(70)
NSLayoutConstraint.activate([
playerView.topAnchor.constraint(equalTo: mediaMessageView.topAnchor),
playerView.leftAnchor.constraint(equalTo: mediaMessageView.leftAnchor),
playerView.rightAnchor.constraint(equalTo: mediaMessageView.rightAnchor),
playerView.bottomAnchor.constraint(equalTo: mediaMessageView.bottomAnchor),
progressBar.topAnchor.constraint(equalTo: view.topAnchor),
progressBar.widthAnchor.constraint(equalTo: contentContainerView.widthAnchor),
progressBar.heightAnchor.constraint(equalToConstant: 44),
playVideoButton.centerXAnchor.constraint(equalTo: contentContainerView.centerXAnchor),
playVideoButton.centerYAnchor.constraint(
playButton.centerXAnchor.constraint(equalTo: contentContainerView.centerXAnchor),
playButton.centerYAnchor.constraint(
equalTo: contentContainerView.centerYAnchor,
constant: -AttachmentPrepViewController.verticalCenterOffset
),
playVideoButton.widthAnchor.constraint(equalToConstant: playButtonSize),
playVideoButton.heightAnchor.constraint(equalToConstant: playButtonSize),
])
}
else if attachment.isAudio, mediaMessageView.audioPlayer != nil {
NSLayoutConstraint.activate([
progressBar.topAnchor.constraint(equalTo: view.topAnchor),
progressBar.widthAnchor.constraint(equalTo: contentContainerView.widthAnchor),
progressBar.heightAnchor.constraint(equalToConstant: 44)
playButton.widthAnchor.constraint(equalToConstant: playButtonSize),
playButton.heightAnchor.constraint(equalToConstant: playButtonSize),
])
}
}
@ -303,109 +240,18 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
self.view.window?.endEditing(true)
}
@objc public func didTapPlayerView(_ gestureRecognizer: UIGestureRecognizer) {
self.view.window?.endEditing(true)
self.pauseVideo()
}
@objc public func playButtonTapped() {
self.playVideo()
}
// MARK: - Video
private func playVideo() {
guard let videoPlayer = self.videoPlayer else {
owsFailDebug("video player was unexpectedly nil")
return
}
UIView.animate(withDuration: 0.1) { [weak self] in
self?.playVideoButton.alpha = 0.0
}
guard let fileUrl: URL = attachment.dataUrl else { return SNLog("Missing video file") }
videoPlayer.play()
}
private func pauseVideo() {
guard let videoPlayer = self.videoPlayer else {
owsFailDebug("video player was unexpectedly nil")
return
}
videoPlayer.pause()
let player: AVPlayer = AVPlayer(url: fileUrl)
let viewController: AVPlayerViewController = AVPlayerViewController()
viewController.player = player
UIView.animate(withDuration: 0.1) { [weak self] in
self?.playVideoButton.alpha = 1.0
self.navigationController?.present(viewController, animated: true) { [weak player] in
player?.play()
}
}
public func videoPlayerDidPlayToCompletion(_ videoPlayer: OWSVideoPlayer) {
UIView.animate(withDuration: 0.1) { [weak self] in
self?.playVideoButton.alpha = 1.0
}
}
public func playerProgressBarDidStartScrubbing(_ playerProgressBar: PlayerProgressBar) {
if attachment.isAudio {
mediaMessageView.pauseAudio()
return
}
guard let videoPlayer = self.videoPlayer else {
owsFailDebug("video player was unexpectedly nil")
return
}
videoPlayer.pause()
}
public func playerProgressBar(_ playerProgressBar: PlayerProgressBar, scrubbedToTime time: CMTime) {
if attachment.isAudio {
mediaMessageView.setAudioTime(currentTime: CMTimeGetSeconds(time))
progressBar.manuallySetValue(CMTimeGetSeconds(time), durationSeconds: mediaMessageView.audioDurationSeconds)
return
}
guard let videoPlayer = self.videoPlayer else {
owsFailDebug("video player was unexpectedly nil")
return
}
videoPlayer.seek(to: time)
progressBar.updateState()
}
public func playerProgressBar(_ playerProgressBar: PlayerProgressBar, didFinishScrubbingAtTime time: CMTime, shouldResumePlayback: Bool) {
if attachment.isAudio {
mediaMessageView.setAudioTime(currentTime: CMTimeGetSeconds(time))
progressBar.manuallySetValue(CMTimeGetSeconds(time), durationSeconds: mediaMessageView.audioDurationSeconds)
if mediaMessageView.wasPlayingAudio {
mediaMessageView.playAudio()
}
return
}
guard let videoPlayer = self.videoPlayer else {
owsFailDebug("video player was unexpectedly nil")
return
}
videoPlayer.seek(to: time)
progressBar.updateState()
if (shouldResumePlayback) {
videoPlayer.play()
}
}
// MARK: - MediaMessageViewAudioDelegate
public func progressChanged(_ progressSeconds: CGFloat, durationSeconds: CGFloat) {
progressBar.manuallySetValue(progressSeconds, durationSeconds: durationSeconds)
}
// MARK: - Helpers
var isZoomable: Bool {

View File

@ -10,11 +10,7 @@ import SessionMessagingKit
import SignalCoreKit
import SessionUtilitiesKit
public protocol MediaMessageViewAudioDelegate: AnyObject {
func progressChanged(_ progressSeconds: CGFloat, durationSeconds: CGFloat)
}
public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
public class MediaMessageView: UIView {
public enum Mode: UInt {
case large
case small
@ -26,25 +22,6 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
private var disposables: Set<AnyCancellable> = Set()
public let mode: Mode
public let attachment: SignalAttachment
public lazy var audioPlayer: OWSAudioPlayer? = {
guard let dataUrl = attachment.dataUrl else { return nil }
return OWSAudioPlayer(mediaUrl: dataUrl, audioBehavior: .playback, delegate: self)
}()
public var wasPlayingAudio: Bool = false
public var audioProgressSeconds: CGFloat = 0
public var audioDurationSeconds: CGFloat = 0
public weak var audioDelegate: MediaMessageViewAudioDelegate?
public var playbackState = AudioPlaybackState.stopped {
didSet {
AssertIsOnMainThread()
ensureButtonState()
}
}
private lazy var validImage: UIImage? = {
if attachment.isImage {
@ -88,6 +65,7 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
return image
}()
private lazy var duration: TimeInterval? = attachment.duration()
private var linkPreviewInfo: (url: String, draft: LinkPreviewDraft?)?
// MARK: Initializers
@ -210,34 +188,6 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
return view
}()
lazy var videoPlayButton: UIImageView = {
let view: UIImageView = UIImageView(image: UIImage(named: "CirclePlay"))
view.translatesAutoresizingMaskIntoConstraints = false
view.contentMode = .scaleAspectFit
view.isHidden = true
return view
}()
/// Note: This uses different assets from the `videoPlayButton` and has a 'Pause' state
private lazy var audioPlayPauseButton: UIButton = {
let button: UIButton = UIButton()
button.translatesAutoresizingMaskIntoConstraints = false
button.clipsToBounds = true
button.setThemeBackgroundColorForced(
.theme(.classicLight, color: .settings_tabBackground),
for: .normal
)
button.setThemeBackgroundColorForced(
.theme(.classicLight, color: .highlighted(.settings_tabBackground)),
for: .highlighted
)
button.addTarget(self, action: #selector(audioPlayPauseButtonPressed), for: .touchUpInside)
button.isHidden = true
return button
}()
private lazy var titleStackView: UIStackView = {
let stackView: UIStackView = UIStackView()
stackView.translatesAutoresizingMaskIntoConstraints = false
@ -354,7 +304,9 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
// Format string for file size label in call interstitial view.
// Embeds: {{file size as 'N mb' or 'N kb'}}.
let fileSize: UInt = attachment.dataLength
label.text = String(format: "ATTACHMENT_APPROVAL_FILE_SIZE_FORMAT".localized(), Format.fileSize(fileSize))
label.text = duration
.map { "\(Format.fileSize(fileSize)), \(Format.duration($0))" }
.defaulting(to: Format.fileSize(fileSize))
label.textAlignment = .center
}
@ -373,7 +325,6 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
// Setup the view hierarchy
addSubview(stackView)
addSubview(loadingView)
addSubview(videoPlayButton)
stackView.addArrangedSubview(imageView)
stackView.addArrangedSubview(animatedImageView)
@ -396,24 +347,15 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
// Note: The 'attachmentApproval' mode provides it's own play button to keep
// it at the proper scale when zooming
imageView.isHidden = false
videoPlayButton.isHidden = (mode == .attachmentApproval)
}
else if attachment.isAudio {
// Hide the 'audioPlayPauseButton' if the 'audioPlayer' failed to get created
imageView.isHidden = false
audioPlayPauseButton.isHidden = (audioPlayer == nil)
setAudioIconToPlay()
setAudioProgress(0, duration: (audioPlayer?.duration ?? 0))
fileTypeImageView.image = UIImage(named: "table_ic_notification_sound")?
.withRenderingMode(.alwaysTemplate)
fileTypeImageView.themeTintColor = .textPrimary
fileTypeImageView.isHidden = false
// Note: There is an annoying bug where the MediaMessageView will fill the screen if the
// 'audioPlayPauseButton' is added anywhere within the view hierarchy causing issues with
// the min scale on 'image' and 'animatedImage' file types (assume it's actually any UIButton)
addSubview(audioPlayPauseButton)
}
else if attachment.isUrl {
imageView.isHidden = false
@ -481,8 +423,6 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
}()
let imageSize: CGFloat = (maybeImageSize ?? 0)
let audioButtonSize: CGFloat = (imageSize / 2.5)
audioPlayPauseButton.layer.cornerRadius = (audioButtonSize / 2)
// Actual layout
NSLayoutConstraint.activate([
@ -531,9 +471,6 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
multiplier: ((fileTypeImageView.image?.size.width ?? 1) / (fileTypeImageView.image?.size.height ?? 1))
),
fileTypeImageView.widthAnchor.constraint(equalTo: imageView.widthAnchor, multiplier: 0.5),
videoPlayButton.centerXAnchor.constraint(equalTo: centerXAnchor),
videoPlayButton.centerYAnchor.constraint(equalTo: centerYAnchor),
loadingView.centerXAnchor.constraint(equalTo: imageView.centerXAnchor),
loadingView.centerYAnchor.constraint(equalTo: imageView.centerYAnchor),
@ -548,18 +485,6 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
subtitleLabel.widthAnchor.constraint(equalTo: stackView.widthAnchor, constant: -(32 * 2))
])
}
// Note: There is an annoying bug where the MediaMessageView will fill the screen if the
// 'audioPlayPauseButton' is added anywhere within the view hierarchy causing issues with
// the min scale on 'image' and 'animatedImage' file types (assume it's actually any UIButton)
if attachment.isAudio {
NSLayoutConstraint.activate([
audioPlayPauseButton.centerXAnchor.constraint(equalTo: imageView.centerXAnchor),
audioPlayPauseButton.centerYAnchor.constraint(equalTo: imageView.centerYAnchor),
audioPlayPauseButton.widthAnchor.constraint(equalToConstant: audioButtonSize),
audioPlayPauseButton.heightAnchor.constraint(equalToConstant: audioButtonSize),
])
}
}
// MARK: - Link Loading
@ -615,88 +540,4 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
)
.store(in: &disposables)
}
// MARK: - Functions
public func playAudio() {
audioPlayer?.play()
ensureButtonState()
}
public func pauseAudio() {
wasPlayingAudio = (audioPlayer?.isPlaying == true)
// If the 'audioPlayer' has a duration of 0 then we probably haven't played previously which
// will result in the audioPlayer having a 'duration' of 0 breaking the progressBar. We play
// the audio to get it to properly load the file right before pausing it so the data is
// loaded correctly
if audioPlayer?.duration == 0 {
audioPlayer?.play()
}
audioPlayer?.pause()
ensureButtonState()
}
public func setAudioTime(currentTime: TimeInterval) {
audioPlayer?.setCurrentTime(currentTime)
}
// MARK: - Event Handlers
@objc func audioPlayPauseButtonPressed(sender: UIButton) {
audioPlayer?.togglePlayState()
}
// MARK: - OWSAudioPlayerDelegate
public func audioPlaybackState() -> AudioPlaybackState {
return playbackState
}
public func setAudioPlaybackState(_ value: AudioPlaybackState) {
playbackState = value
}
public func showInvalidAudioFileAlert() {
let modal: ConfirmationModal = ConfirmationModal(
targetView: CurrentAppContext().frontmostViewController()?.view,
info: ConfirmationModal.Info(
title: CommonStrings.errorAlertTitle,
body: .text("INVALID_AUDIO_FILE_ALERT_ERROR_MESSAGE".localized()),
cancelTitle: "BUTTON_OK".localized(),
cancelStyle: .alert_text
)
)
CurrentAppContext().frontmostViewController()?.present(modal, animated: true)
}
public func audioPlayerDidFinishPlaying(_ player: OWSAudioPlayer, successfully flag: Bool) {
// Do nothing
}
private func ensureButtonState() {
switch playbackState {
case .playing: setAudioIconToPause()
default: setAudioIconToPlay()
}
}
public func setAudioProgress(_ progress: CGFloat, duration: CGFloat) {
// Note: When the OWSAudioPlayer stops it sets the duration to 0 (which we want to ignore so
// the UI doesn't look buggy)
let finalDuration: CGFloat = (duration > 0 ? duration : audioDurationSeconds)
audioProgressSeconds = progress
audioDurationSeconds = finalDuration
audioDelegate?.progressChanged(progress, durationSeconds: finalDuration)
}
private func setAudioIconToPlay() {
audioPlayPauseButton.setImage(UIImage(named: "Play"), for: .normal)
}
private func setAudioIconToPause() {
audioPlayPauseButton.setImage(UIImage(named: "Pause"), for: .normal)
}
}

View File

@ -1,74 +0,0 @@
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
import Foundation
import AVFoundation
import SessionMessagingKit
import SignalCoreKit
public protocol OWSVideoPlayerDelegate: AnyObject {
func videoPlayerDidPlayToCompletion(_ videoPlayer: OWSVideoPlayer)
}
public class OWSVideoPlayer {
public let avPlayer: AVPlayer
let audioActivity: AudioActivity
public weak var delegate: OWSVideoPlayerDelegate?
@objc public init(url: URL) {
self.avPlayer = AVPlayer(url: url)
self.audioActivity = AudioActivity(audioDescription: "[OWSVideoPlayer] url:\(url)", behavior: .playback)
NotificationCenter.default.addObserver(self,
selector: #selector(playerItemDidPlayToCompletion(_:)),
name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
object: avPlayer.currentItem)
}
// MARK: Playback Controls
@objc
public func pause() {
avPlayer.pause()
Environment.shared?.audioSession.endAudioActivity(self.audioActivity)
}
@objc
public func play() {
let success = (Environment.shared?.audioSession.startAudioActivity(self.audioActivity) == true)
assert(success)
guard let item = avPlayer.currentItem else {
owsFailDebug("video player item was unexpectedly nil")
return
}
if item.currentTime() == item.duration {
// Rewind for repeated plays, but only if it previously played to end.
avPlayer.seek(to: CMTime.zero, toleranceBefore: .zero, toleranceAfter: .zero)
}
avPlayer.play()
}
@objc
public func stop() {
avPlayer.pause()
avPlayer.seek(to: CMTime.zero, toleranceBefore: .zero, toleranceAfter: .zero)
Environment.shared?.audioSession.endAudioActivity(self.audioActivity)
}
@objc(seekToTime:)
public func seek(to time: CMTime) {
avPlayer.seek(to: time, toleranceBefore: .zero, toleranceAfter: .zero)
}
// MARK: private
@objc
private func playerItemDidPlayToCompletion(_ notification: Notification) {
self.delegate?.videoPlayerDidPlayToCompletion(self)
Environment.shared?.audioSession.endAudioActivity(self.audioActivity)
}
}

View File

@ -1,260 +0,0 @@
// Copyright © 2022 Rangeproof Pty Ltd. All rights reserved.
import UIKit
import AVFoundation
import SessionUIKit
import SignalCoreKit
@objc
public class VideoPlayerView: UIView {
@objc
public var player: AVPlayer? {
get {
return playerLayer.player
}
set {
playerLayer.player = newValue
}
}
var playerLayer: AVPlayerLayer {
return layer as! AVPlayerLayer
}
// Override UIView property
override public static var layerClass: AnyClass {
return AVPlayerLayer.self
}
}
@objc
public protocol PlayerProgressBarDelegate {
func playerProgressBarDidStartScrubbing(_ playerProgressBar: PlayerProgressBar)
func playerProgressBar(_ playerProgressBar: PlayerProgressBar, scrubbedToTime time: CMTime)
func playerProgressBar(_ playerProgressBar: PlayerProgressBar, didFinishScrubbingAtTime time: CMTime, shouldResumePlayback: Bool)
}
// Allows the user to tap anywhere on the slider to set it's position,
// without first having to grab the thumb.
class TrackingSlider: UISlider {
override init(frame: CGRect) {
super.init(frame: frame)
}
override func beginTracking(_ touch: UITouch, with event: UIEvent?) -> Bool {
return true
}
required init?(coder aDecoder: NSCoder) {
notImplemented()
}
}
@objc
public class PlayerProgressBar: UIView {
@objc
public weak var delegate: PlayerProgressBarDelegate?
private lazy var formatter: DateComponentsFormatter = {
let formatter = DateComponentsFormatter()
formatter.unitsStyle = .positional
formatter.allowedUnits = [.minute, .second ]
formatter.zeroFormattingBehavior = [ .pad ]
return formatter
}()
// MARK: Subviews
private let positionLabel = UILabel()
private let remainingLabel = UILabel()
private let slider = TrackingSlider()
private let blurView = UIVisualEffectView()
weak private var progressObserver: AnyObject?
private let kPreferredTimeScale: CMTimeScale = 100
@objc
public var player: AVPlayer? {
didSet {
guard let item = player?.currentItem else {
owsFailDebug("No player item")
return
}
slider.minimumValue = 0
let duration: CMTime = item.asset.duration
slider.maximumValue = Float(CMTimeGetSeconds(duration))
updateState()
// OPTIMIZE We need a high frequency observer for smooth slider updates while playing,
// but could use a much less frequent observer for label updates
progressObserver = player?.addPeriodicTimeObserver(forInterval: CMTime(seconds: 0.1, preferredTimescale: kPreferredTimeScale), queue: nil, using: { [weak self] _ in
// If it is playing update the time
if self?.player?.rate != 0 && self?.player?.error == nil {
self?.updateState()
}
}) as AnyObject
}
}
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
override public init(frame: CGRect) {
super.init(frame: frame)
// Background & blur
let backgroundView = UIView()
backgroundView.themeBackgroundColor = .backgroundSecondary
backgroundView.alpha = Values.lowOpacity
addSubview(backgroundView)
backgroundView.pin(to: self)
if !UIAccessibility.isReduceTransparencyEnabled {
addSubview(blurView)
blurView.pin(to: self)
ThemeManager.onThemeChange(observer: blurView) { [weak blurView] theme, _ in
switch theme.interfaceStyle {
case .light: blurView?.effect = UIBlurEffect(style: .light)
default: blurView?.effect = UIBlurEffect(style: .dark)
}
}
}
// Configure controls
let kLabelFont = UIFont.monospacedDigitSystemFont(ofSize: 12, weight: UIFont.Weight.regular)
positionLabel.font = kLabelFont
remainingLabel.font = kLabelFont
// We use a smaller thumb for the progress slider.
slider.setThumbImage(#imageLiteral(resourceName: "sliderProgressThumb"), for: .normal)
slider.themeMinimumTrackTintColor = .backgroundPrimary
slider.themeMaximumTrackTintColor = .backgroundPrimary
slider.addTarget(self, action: #selector(handleSliderTouchDown), for: .touchDown)
slider.addTarget(self, action: #selector(handleSliderTouchUp), for: .touchUpInside)
slider.addTarget(self, action: #selector(handleSliderTouchUp), for: .touchUpOutside)
slider.addTarget(self, action: #selector(handleSliderValueChanged), for: .valueChanged)
// Panning is a no-op. We just absorb pan gesture's originating in the video controls
// from propogating so we don't inadvertently change pages while trying to scrub in
// the MediaPageView.
let panAbsorber = UIPanGestureRecognizer(target: self, action: nil)
self.addGestureRecognizer(panAbsorber)
// Layout Subviews
addSubview(positionLabel)
addSubview(remainingLabel)
addSubview(slider)
positionLabel.autoPinEdge(toSuperviewMargin: .leading)
positionLabel.autoVCenterInSuperview()
let kSliderMargin: CGFloat = 8
slider.autoPinEdge(.leading, to: .trailing, of: positionLabel, withOffset: kSliderMargin)
slider.autoVCenterInSuperview()
remainingLabel.autoPinEdge(.leading, to: .trailing, of: slider, withOffset: kSliderMargin)
remainingLabel.autoPinEdge(toSuperviewMargin: .trailing)
remainingLabel.autoVCenterInSuperview()
}
// MARK: Gesture handling
var wasPlayingWhenScrubbingStarted: Bool = false
@objc
private func handleSliderTouchDown(_ slider: UISlider) {
guard let player = self.player else {
owsFailDebug("player was nil")
return
}
self.wasPlayingWhenScrubbingStarted = (player.rate != 0) && (player.error == nil)
self.delegate?.playerProgressBarDidStartScrubbing(self)
}
@objc
private func handleSliderTouchUp(_ slider: UISlider) {
let sliderTime = time(slider: slider)
self.delegate?.playerProgressBar(self, didFinishScrubbingAtTime: sliderTime, shouldResumePlayback: wasPlayingWhenScrubbingStarted)
}
@objc
private func handleSliderValueChanged(_ slider: UISlider) {
let sliderTime = time(slider: slider)
self.delegate?.playerProgressBar(self, scrubbedToTime: sliderTime)
}
// MARK: Render cycle
public func updateState() {
guard let player = player else {
owsFailDebug("player isn't set.")
return
}
guard let item = player.currentItem else {
owsFailDebug("player has no item.")
return
}
let position = player.currentTime()
let positionSeconds: Float64 = CMTimeGetSeconds(position)
positionLabel.text = formatter.string(from: positionSeconds)
let duration: CMTime = item.asset.duration
let remainingTime = duration - position
let remainingSeconds = CMTimeGetSeconds(remainingTime)
guard let remainingString = formatter.string(from: remainingSeconds) else {
owsFailDebug("unable to format time remaining")
remainingLabel.text = "0:00"
return
}
// show remaining time as negative
remainingLabel.text = "-\(remainingString)"
slider.setValue(Float(positionSeconds), animated: false)
}
// MARK: Util
private func time(slider: UISlider) -> CMTime {
let seconds: Double = Double(slider.value)
return CMTime(seconds: seconds, preferredTimescale: kPreferredTimeScale)
}
// MARK: - Functions
public func manuallySetValue(_ positionSeconds: CGFloat, durationSeconds: CGFloat) {
let remainingSeconds = (durationSeconds - positionSeconds)
slider.minimumValue = 0
slider.maximumValue = Float(durationSeconds)
positionLabel.text = formatter.string(from: positionSeconds)
guard let remainingString = formatter.string(from: remainingSeconds) else {
owsFailDebug("unable to format time remaining")
remainingLabel.text = "0:00"
return
}
// show remaining time as negative
remainingLabel.text = "-\(remainingString)"
slider.setValue(Float(positionSeconds), animated: false)
}
}