Fixed a few bugs with media attachment handling, added webp support

Updated the OpenGroupManager to create a BlindedIdLookup for messages within the `inbox` (validating that the sessionId does actually match the blindedId)
Added support for static and animated WebP images
Added basic support for HEIC and HEIF images
Fixed an issue where the file size limit was set to 10,000,000 bytes instead of 10,485,760 bytes (which is actually 10Mb)
Fixed an issue where attachments uploaded by the current user on other devices would always show a loading indicator
Fixed an issue where media attachments that don't contain width/height information in their protos weren't updating the values once the download was completed
Fixed an issue where the media view could download an invalid file and endlessly appear to be downloading
This commit is contained in:
Morgan Pretty 2022-07-29 15:26:24 +10:00
parent c022f7cda2
commit 4d5ded7557
12 changed files with 424 additions and 67 deletions

View File

@ -24,6 +24,7 @@ abstract_target 'GlobalDependencies' do
pod 'PureLayout', '~> 3.1.8'
pod 'NVActivityIndicatorView'
pod 'YYImage', git: 'https://github.com/signalapp/YYImage'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
pod 'ZXingObjC'
pod 'DifferenceKit'
end
@ -52,6 +53,7 @@ abstract_target 'GlobalDependencies' do
pod 'SAMKeychain'
pod 'SwiftProtobuf', '~> 1.5.0'
pod 'YYImage', git: 'https://github.com/signalapp/YYImage'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
pod 'DifferenceKit'
end
@ -71,6 +73,7 @@ abstract_target 'GlobalDependencies' do
target 'SessionUtilitiesKit' do
pod 'SAMKeychain'
pod 'YYImage/libwebp', git: 'https://github.com/signalapp/YYImage'
target 'SessionUtilitiesKitTests' do
inherit! :complete

View File

@ -29,6 +29,15 @@ PODS:
- DifferenceKit/Core
- GRDB.swift/SQLCipher (5.24.1):
- SQLCipher (>= 3.4.0)
- libwebp (1.2.1):
- libwebp/demux (= 1.2.1)
- libwebp/mux (= 1.2.1)
- libwebp/webp (= 1.2.1)
- libwebp/demux (1.2.1):
- libwebp/webp
- libwebp/mux (1.2.1):
- libwebp/demux
- libwebp/webp (1.2.1)
- Nimble (10.0.0)
- NVActivityIndicatorView (5.1.1):
- NVActivityIndicatorView/Base (= 5.1.1)
@ -124,6 +133,9 @@ PODS:
- YYImage (1.0.4):
- YYImage/Core (= 1.0.4)
- YYImage/Core (1.0.4)
- YYImage/libwebp (1.0.4):
- libwebp
- YYImage/Core
- ZXingObjC (3.6.5):
- ZXingObjC/All (= 3.6.5)
- ZXingObjC/All (3.6.5)
@ -149,6 +161,7 @@ DEPENDENCIES:
- WebRTC-lib
- YapDatabase/SQLCipher (from `https://github.com/oxen-io/session-ios-yap-database.git`, branch `signal-release`)
- YYImage (from `https://github.com/signalapp/YYImage`)
- YYImage/libwebp (from `https://github.com/signalapp/YYImage`)
- ZXingObjC
SPEC REPOS:
@ -158,6 +171,7 @@ SPEC REPOS:
- CryptoSwift
- DifferenceKit
- GRDB.swift
- libwebp
- Nimble
- NVActivityIndicatorView
- OpenSSL-Universal
@ -212,6 +226,7 @@ SPEC CHECKSUMS:
Curve25519Kit: e63f9859ede02438ae3defc5e1a87e09d1ec7ee6
DifferenceKit: 5659c430bb7fe45876fa32ce5cba5d6167f0c805
GRDB.swift: b3180ce2135fc06a453297889b746b1478c4d8c7
libwebp: 98a37e597e40bfdb4c911fc98f2c53d0b12d05fc
Nimble: 5316ef81a170ce87baf72dd961f22f89a602ff84
NVActivityIndicatorView: 1f6c5687f1171810aa27a3296814dc2d7dec3667
OpenSSL-Universal: e7311447fd2419f57420c79524b641537387eff2
@ -230,6 +245,6 @@ SPEC CHECKSUMS:
YYImage: f1ddd15ac032a58b78bbed1e012b50302d318331
ZXingObjC: fdbb269f25dd2032da343e06f10224d62f537bdb
PODFILE CHECKSUM: 6ab902a81a379cc2c0a9a92c334c78d413190338
PODFILE CHECKSUM: 456facc7043447a9c67733cf8846ec62afff8ea8
COCOAPODS: 1.11.3

View File

@ -129,7 +129,10 @@ public class MediaView: UIView {
configure(forError: .failed)
return false
}
guard attachment.state != .uploaded else { return false }
// If this message was uploaded on a different device it'll now be seen as 'downloaded' (but
// will still be outgoing - we don't want to show a loading indicator in this case)
guard attachment.state != .uploaded && attachment.state != .downloaded else { return false }
let loader = MediaLoaderView()
addSubview(loader)
@ -164,9 +167,13 @@ public class MediaView: UIView {
}
strongSelf.tryToLoadMedia(
loadMediaBlock: { applyMediaBlock in
guard attachment.isValid else { return }
guard attachment.isValid else {
self?.configure(forError: .invalid)
return
}
guard let filePath: String = attachment.originalFilePath else {
owsFailDebug("Attachment stream missing original file path.")
self?.configure(forError: .invalid)
return
}
@ -177,6 +184,7 @@ public class MediaView: UIView {
guard let image: YYImage = media as? YYImage else {
owsFailDebug("Media has unexpected type: \(type(of: media))")
self?.configure(forError: .invalid)
return
}
// FIXME: Animated images flicker when reloading the cells (even though they are in the cache)
@ -216,12 +224,18 @@ public class MediaView: UIView {
}
self?.tryToLoadMedia(
loadMediaBlock: { applyMediaBlock in
guard attachment.isValid else { return }
guard attachment.isValid else {
self?.configure(forError: .invalid)
return
}
attachment.thumbnail(
size: .large,
success: { image, _ in applyMediaBlock(image) },
failure: { Logger.error("Could not load thumbnail") }
failure: {
Logger.error("Could not load thumbnail")
self?.configure(forError: .invalid)
}
)
},
applyMediaBlock: { media in
@ -229,6 +243,7 @@ public class MediaView: UIView {
guard let image: UIImage = media as? UIImage else {
owsFailDebug("Media has unexpected type: \(type(of: media))")
self?.configure(forError: .invalid)
return
}
@ -277,12 +292,18 @@ public class MediaView: UIView {
}
self?.tryToLoadMedia(
loadMediaBlock: { applyMediaBlock in
guard attachment.isValid else { return }
guard attachment.isValid else {
self?.configure(forError: .invalid)
return
}
attachment.thumbnail(
size: .medium,
success: { image, _ in applyMediaBlock(image) },
failure: { Logger.error("Could not load thumbnail") }
failure: {
Logger.error("Could not load thumbnail")
self?.configure(forError: .invalid)
}
)
},
applyMediaBlock: { media in
@ -290,6 +311,7 @@ public class MediaView: UIView {
guard let image: UIImage = media as? UIImage else {
owsFailDebug("Media has unexpected type: \(type(of: media))")
self?.configure(forError: .invalid)
return
}

View File

@ -129,7 +129,15 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
// MARK: - Functions
private func updateMinZoomScale() {
guard let image: UIImage = image else {
let maybeImageSize: CGSize? = {
switch self.mediaView {
case let imageView as UIImageView: return (imageView.image?.size ?? .zero)
case let imageView as YYAnimatedImageView: return (imageView.image?.size ?? .zero)
default: return nil
}
}()
guard let imageSize: CGSize = maybeImageSize else {
self.scrollView.minimumZoomScale = 1
self.scrollView.maximumZoomScale = 1
self.scrollView.zoomScale = 1
@ -138,13 +146,13 @@ class MediaDetailViewController: OWSViewController, UIScrollViewDelegate, OWSVid
let viewSize: CGSize = self.scrollView.bounds.size
guard image.size.width > 0 && image.size.height > 0 else {
SNLog("Invalid image dimensions (\(image.size.width), \(image.size.height))")
return;
guard imageSize.width > 0 && imageSize.height > 0 else {
SNLog("Invalid image dimensions (\(imageSize.width), \(imageSize.height))")
return
}
let scaleWidth: CGFloat = (viewSize.width / image.size.width)
let scaleHeight: CGFloat = (viewSize.height / image.size.height)
let scaleWidth: CGFloat = (viewSize.width / imageSize.width)
let scaleHeight: CGFloat = (viewSize.height / imageSize.height)
let minScale: CGFloat = min(scaleWidth, scaleHeight)
if minScale != self.scrollView.minimumZoomScale {

View File

@ -339,6 +339,23 @@ extension Attachment {
default: return (self.isValid, self.duration)
}
}()
// Regenerate this just in case we added support since the attachment was inserted into
// the database (eg. manually downloaded in a later update)
let isVisualMedia: Bool = (
MIMETypeUtil.isImage(contentType) ||
MIMETypeUtil.isVideo(contentType) ||
MIMETypeUtil.isAnimated(contentType)
)
let attachmentResolution: CGSize? = {
if let width: UInt = self.width, let height: UInt = self.height, width > 0, height > 0 {
return CGSize(width: Int(width), height: Int(height))
}
guard isVisualMedia else { return nil }
guard state == .downloaded else { return nil }
guard let originalFilePath: String = originalFilePath else { return nil }
return Attachment.imageSize(contentType: contentType, originalFilePath: originalFilePath)
}()
return Attachment(
id: self.id,
@ -351,10 +368,16 @@ extension Attachment {
sourceFilename: sourceFilename,
downloadUrl: (downloadUrl ?? self.downloadUrl),
localRelativeFilePath: (localRelativeFilePath ?? self.localRelativeFilePath),
width: width,
height: height,
width: attachmentResolution.map { UInt($0.width) },
height: attachmentResolution.map { UInt($0.height) },
duration: duration,
isVisualMedia: isVisualMedia,
isVisualMedia: (
// Regenerate this just in case we added support since the attachment was inserted into
// the database (eg. manually downloaded in a later update)
MIMETypeUtil.isImage(contentType) ||
MIMETypeUtil.isVideo(contentType) ||
MIMETypeUtil.isAnimated(contentType)
),
isValid: isValid,
encryptionKey: (encryptionKey ?? self.encryptionKey),
digest: (digest ?? self.digest),

View File

@ -72,6 +72,7 @@ public extension BlindedIdLookup {
static func fetchOrCreate(
_ db: Database,
blindedId: String,
sessionId: String? = nil,
openGroupServer: String,
openGroupPublicKey: String,
isCheckingForOutbox: Bool,
@ -90,6 +91,22 @@ public extension BlindedIdLookup {
// If the lookup already has a resolved sessionId then just return it immediately
guard lookup.sessionId == nil else { return lookup }
// If we we given a sessionId then validate it is correct and if so save it
if
let sessionId: String = sessionId,
dependencies.sodium.sessionId(
sessionId,
matchesBlindedId: blindedId,
serverPublicKey: openGroupPublicKey,
genericHash: dependencies.genericHash
)
{
lookup = try lookup
.with(sessionId: sessionId)
.saved(db)
return lookup
}
// We now need to try to match the blinded id to an existing contact, this can only be done by looping
// through all approved contacts and generating a blinded id for the provided open group for each to
// see if it matches the provided blindedId

View File

@ -14,7 +14,7 @@ public final class FileServerAPI: NSObject {
public static let oldServerPublicKey = "7cb31905b55cd5580c686911debf672577b3fb0bff81df4ce2d5c4cb3a7aaa69"
@objc public static let server = "http://filev2.getsession.org"
public static let serverPublicKey = "da21e1d886c6fbaea313f75298bd64aab03a97ce985b46bb2dad9f2089c8ee59"
public static let maxFileSize = 10_000_000 // 10 MB
public static let maxFileSize = (10 * 1024 * 1024) // 10 MB
/// The file server has a file size limit of `maxFileSize`, which the Service Nodes try to enforce as well. However, the limit applied by the Service Nodes
/// is on the **HTTP request** and not the actual file size. Because the file server expects the file data to be base 64 encoded, the size of the HTTP
/// request for a given file will be at least `ceil(n / 3) * 4` bytes, where n is the file size in bytes. This is the minimum size because there might also

View File

@ -41,6 +41,40 @@ public enum DisappearingMessagesJob: JobExecutor {
// The 'if' is only there to prevent the "variable never read" warning from showing
if backgroundTask != nil { backgroundTask = nil }
// TODO: Remove this for the final build
Storage.shared.writeAsync { db in
// Re-process all WebP images, and images with no width/height values to update their validity state
let supportedVisualMediaMimeTypes: Set<String> = MIMETypeUtil.supportedImageMIMETypes()
.appending(contentsOf: MIMETypeUtil.supportedAnimatedImageMIMETypes())
.appending(contentsOf: MIMETypeUtil.supportedVideoMIMETypes())
.asSet()
let attachments: [Attachment] = try Attachment
.filter(Attachment.Columns.state == Attachment.State.downloaded)
.filter(
Attachment.Columns.contentType == "image/webp" || (
(
Attachment.Columns.width == nil ||
Attachment.Columns.height == nil
) &&
supportedVisualMediaMimeTypes.contains(Attachment.Columns.contentType)
)
)
.filter(
!Attachment.Columns.isValid ||
!Attachment.Columns.isVisualMedia ||
Attachment.Columns.width == nil ||
Attachment.Columns.height == nil
)
.fetchAll(db)
if !attachments.isEmpty {
attachments.forEach { attachment in
_ = try? attachment.with(state: attachment.state).saved(db)
}
}
}
// TODO: Remove this for the final build
}
}

View File

@ -617,28 +617,37 @@ public final class OpenGroupManager: NSObject {
dependencies: dependencies
)
// If the message was an outgoing message then attempt to unblind the recipient (this will help put
// messages in the correct thread in case of message request approval race conditions as well as
// during device sync'ing and restoration)
// We want to update the BlindedIdLookup cache with the message info so we can avoid using the
// "expensive" lookup when possible
let lookup: BlindedIdLookup = try {
// Minor optimisation to avoid processing the same sender multiple times in the same
// 'handleMessages' call (since the 'mapping' call is done within a transaction we
// will never have a mapping come through part-way through processing these messages)
if let result: BlindedIdLookup = lookupCache[message.recipient] {
return result
}
return try BlindedIdLookup.fetchOrCreate(
db,
blindedId: (fromOutbox ?
message.recipient :
message.sender
),
sessionId: (fromOutbox ?
nil :
processedMessage?.threadId
),
openGroupServer: server.lowercased(),
openGroupPublicKey: openGroup.publicKey,
isCheckingForOutbox: fromOutbox,
dependencies: dependencies
)
}()
lookupCache[message.recipient] = lookup
// We also need to set the 'syncTarget' for outgoing messages to be consistent with
// standard messages
if fromOutbox {
// Attempt to un-blind the 'message.recipient'
let lookup: BlindedIdLookup = try {
// Minor optimisation to avoid processing the same sender multiple times in the same
// 'handleMessages' call (since the 'mapping' call is done within a transaction we
// will never have a mapping come through part-way through processing these messages)
if let result: BlindedIdLookup = lookupCache[message.recipient] {
return result
}
return try BlindedIdLookup.fetchOrCreate(
db,
blindedId: message.recipient,
openGroupServer: server.lowercased(),
openGroupPublicKey: openGroup.publicKey,
isCheckingForOutbox: true,
dependencies: dependencies
)
}()
let syncTarget: String = (lookup.sessionId ?? message.recipient)
switch processedMessage?.messageInfo.variant {
@ -650,8 +659,6 @@ public final class OpenGroupManager: NSObject {
default: break
}
lookupCache[message.recipient] = lookup
}
if let messageInfo: MessageReceiveJob.Details.MessageInfo = processedMessage?.messageInfo {

View File

@ -12,6 +12,9 @@ extern NSString *const OWSMimeTypeImageTiff1;
extern NSString *const OWSMimeTypeImageTiff2;
extern NSString *const OWSMimeTypeImageBmp1;
extern NSString *const OWSMimeTypeImageBmp2;
extern NSString *const OWSMimeTypeImageWebp;
extern NSString *const OWSMimeTypeImageHeic;
extern NSString *const OWSMimeTypeImageHeif;
extern NSString *const OWSMimeTypeUnknownForTests;
extern NSString *const kOversizeTextAttachmentUTI;
@ -36,6 +39,10 @@ extern NSString *const kSyncMessageFileExtension;
+ (nullable NSString *)getSupportedExtensionFromImageMIMEType:(NSString *)supportedMIMEType;
+ (nullable NSString *)getSupportedExtensionFromAnimatedMIMEType:(NSString *)supportedMIMEType;
+ (NSArray<NSString *> *)supportedImageMIMETypes;
+ (NSArray<NSString *> *)supportedAnimatedImageMIMETypes;
+ (NSArray<NSString *> *)supportedVideoMIMETypes;
+ (BOOL)isAnimated:(NSString *)contentType;
+ (BOOL)isImage:(NSString *)contentType;
+ (BOOL)isVideo:(NSString *)contentType;

View File

@ -19,6 +19,9 @@ NSString *const OWSMimeTypeImageTiff1 = @"image/tiff";
NSString *const OWSMimeTypeImageTiff2 = @"image/x-tiff";
NSString *const OWSMimeTypeImageBmp1 = @"image/bmp";
NSString *const OWSMimeTypeImageBmp2 = @"image/x-windows-bmp";
NSString *const OWSMimeTypeImageWebp = @"image/webp";
NSString *const OWSMimeTypeImageHeic = @"image/heic";
NSString *const OWSMimeTypeImageHeif = @"image/heif";
NSString *const OWSMimeTypeUnknownForTests = @"unknown/mimetype";
NSString *const OWSMimeTypeApplicationZip = @"application/zip";
NSString *const OWSMimeTypeApplicationPdf = @"application/pdf";
@ -85,7 +88,8 @@ NSString *const kSyncMessageFileExtension = @"bin";
@"image/bmp" : @"bmp",
@"image/x-windows-bmp" : @"bmp",
@"image/gif" : @"gif",
@"image/x-icon": @"ico"
@"image/x-icon": @"ico",
OWSMimeTypeImageWebp : @"webp"
};
});
return result;
@ -97,6 +101,7 @@ NSString *const kSyncMessageFileExtension = @"bin";
dispatch_once(&onceToken, ^{
result = @{
OWSMimeTypeImageGif : @"gif",
OWSMimeTypeImageWebp : @"image/webp",
};
});
return result;
@ -175,7 +180,8 @@ NSString *const kSyncMessageFileExtension = @"bin";
@"jpeg" : @"image/jpeg",
@"jpg" : @"image/jpeg",
@"tif" : @"image/tiff",
@"tiff" : @"image/tiff"
@"tiff" : @"image/tiff",
@"webp" : OWSMimeTypeImageWebp
};
});
return result;
@ -187,6 +193,7 @@ NSString *const kSyncMessageFileExtension = @"bin";
dispatch_once(&onceToken, ^{
result = @{
@"gif" : OWSMimeTypeImageGif,
@"image/webp" : OWSMimeTypeImageWebp
};
});
return result;
@ -556,6 +563,36 @@ NSString *const kSyncMessageFileExtension = @"bin";
return result;
}
+ (NSArray<NSString *> *)supportedImageMIMETypes
{
static NSArray<NSString *> *result = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
result = [self supportedImageMIMETypesToExtensionTypes].allKeys;
});
return result;
}
+ (NSArray<NSString *> *)supportedAnimatedImageMIMETypes
{
static NSArray<NSString *> *result = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
result = [self supportedAnimatedMIMETypesToExtensionTypes].allKeys;
});
return result;
}
+ (NSArray<NSString *> *)supportedVideoMIMETypes
{
static NSArray<NSString *> *result = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
result = [self supportedVideoMIMETypesToExtensionTypes].allKeys;
});
return result;
}
+ (NSDictionary *)genericMIMETypesToExtensionTypes
{
static NSDictionary *result = nil;
@ -1386,6 +1423,8 @@ NSString *const kSyncMessageFileExtension = @"bin";
@"image/fif" : @"fif",
@"image/g3fax" : @"g3",
@"image/gif" : @"gif",
@"image/heic" : @"heic",
@"image/heif" : @"heif",
@"image/ief" : @"ief",
@"image/jpeg" : @"jpg",
@"image/jutvision" : @"jut",
@ -1935,6 +1974,8 @@ NSString *const kSyncMessageFileExtension = @"bin";
@"hal" : @"application/vnd.hal+xml",
@"hbci" : @"application/vnd.hbci",
@"hdf" : @"application/x-hdf",
@"heic" : @"image/heic",
@"heif" : @"image/heif",
@"hh" : @"text/x-c",
@"hlp" : @"application/winhlp",
@"hpgl" : @"application/vnd.hp-hpgl",

View File

@ -2,6 +2,8 @@
#import "MIMETypeUtil.h"
#import "OWSFileSystem.h"
#import <AVFoundation/AVFoundation.h>
#import <libwebp/decode.h>
#import <libwebp/demux.h>
#import <SessionUtilitiesKit/SessionUtilitiesKit-Swift.h>
NS_ASSUME_NONNULL_BEGIN
@ -13,8 +15,18 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
ImageFormat_Tiff,
ImageFormat_Jpeg,
ImageFormat_Bmp,
ImageFormat_Webp,
ImageFormat_Heic,
ImageFormat_Heif,
};
#pragma mark -
typedef struct {
CGSize pixelSize;
CGFloat depthBytes;
} ImageDimensionInfo;
// FIXME: Refactor all of these to be in Swift against 'Data'
@implementation NSData (Image)
@ -47,40 +59,47 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
return YES;
}
+ (BOOL)ows_isValidImageAtPath:(NSString *)filePath mimeType:(nullable NSString *)mimeType
+ (nullable NSData *)ows_validImageDataAtPath:(NSString *)filePath mimeType:(nullable NSString *)mimeType
{
if (mimeType.length < 1) {
NSString *fileExtension = [filePath pathExtension].lowercaseString;
mimeType = [MIMETypeUtil mimeTypeForFileExtension:fileExtension];
}
if (mimeType.length < 1) {
return NO;
return nil;
}
NSNumber *_Nullable fileSize = [OWSFileSystem fileSizeOfPath:filePath];
if (!fileSize) {
return NO;
return nil;
}
BOOL isAnimated = [MIMETypeUtil isSupportedAnimatedMIMEType:mimeType];
if (isAnimated) {
if (fileSize.unsignedIntegerValue > OWSMediaUtils.kMaxFileSizeAnimatedImage) {
return NO;
return nil;
}
} else if ([MIMETypeUtil isSupportedImageMIMEType:mimeType]) {
if (fileSize.unsignedIntegerValue > OWSMediaUtils.kMaxFileSizeImage) {
return NO;
return nil;
}
} else {
return NO;
return nil;
}
NSError *error = nil;
NSData *_Nullable data = [NSData dataWithContentsOfFile:filePath options:NSDataReadingMappedIfSafe error:&error];
if (!data || error) {
return [NSData dataWithContentsOfFile:filePath options:NSDataReadingMappedIfSafe error:&error];
}
+ (BOOL)ows_isValidImageAtPath:(NSString *)filePath mimeType:(nullable NSString *)mimeType
{
NSData *_Nullable data = [NSData ows_validImageDataAtPath:filePath mimeType:mimeType];
if (!data) {
return NO;
}
if (![self ows_hasValidImageDimensionsAtPath:filePath isAnimated:isAnimated]) {
BOOL isAnimated = [MIMETypeUtil isSupportedAnimatedMIMEType:mimeType];
if (![self ows_hasValidImageDimensionsAtPath:filePath withData:data mimeType:mimeType isAnimated:isAnimated]) {
return NO;
}
@ -93,45 +112,98 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
if (imageSource == NULL) {
return NO;
}
BOOL result = [NSData ows_hasValidImageDimensionWithImageSource:imageSource isAnimated:isAnimated];
ImageDimensionInfo dimensionInfo = [NSData ows_imageDimensionWithImageSource:imageSource isAnimated:isAnimated];
CFRelease(imageSource);
return result;
return [NSData ows_isValidImageDimension:dimensionInfo.pixelSize depthBytes:dimensionInfo.depthBytes isAnimated:isAnimated];
}
+ (BOOL)ows_hasValidImageDimensionsAtPath:(NSString *)path isAnimated:(BOOL)isAnimated
+ (BOOL)ows_hasValidImageDimensionsAtPath:(NSString *)path withData:(NSData *)data mimeType:(nullable NSString *)mimeType isAnimated:(BOOL)isAnimated
{
CGSize imageDimensions = [self ows_imageDimensionsAtPath:path withData:data mimeType:mimeType isAnimated:isAnimated];
if (imageDimensions.width < 1 || imageDimensions.height < 1) {
return NO;
}
return YES;
}
+ (CGSize)ows_imageDimensionsAtPath:(NSString *)path withData:(nullable NSData *)data mimeType:(nullable NSString *)mimeType isAnimated:(BOOL)isAnimated
{
NSURL *url = [NSURL fileURLWithPath:path];
if (!url) {
return NO;
return CGSizeZero;
}
if ([mimeType isEqualToString:OWSMimeTypeImageWebp]) {
NSData *targetData = data;
if (targetData == nil) {
NSError *error = nil;
NSData *_Nullable loadedData = [NSData dataWithContentsOfFile:path options:NSDataReadingMappedIfSafe error:&error];
if (!data || error) {
return CGSizeZero;
}
targetData = loadedData;
}
CGSize imageSize = [data sizeForWebpData];
if (imageSize.width < 1 || imageSize.height < 1) {
return CGSizeZero;
}
const CGFloat kExpectedBytePerPixel = 4;
CGFloat kMaxValidImageDimension = OWSMediaUtils.kMaxAnimatedImageDimensions;
CGFloat kMaxBytes = kMaxValidImageDimension * kMaxValidImageDimension * kExpectedBytePerPixel;
if (data.length > kMaxBytes) {
return CGSizeZero;
}
return imageSize;
}
CGImageSourceRef imageSource = CGImageSourceCreateWithURL((__bridge CFURLRef)url, NULL);
if (imageSource == NULL) {
return NO;
return CGSizeZero;
}
BOOL result = [self ows_hasValidImageDimensionWithImageSource:imageSource isAnimated:isAnimated];
ImageDimensionInfo dimensionInfo = [self ows_imageDimensionWithImageSource:imageSource isAnimated:isAnimated];
CFRelease(imageSource);
return result;
if (![self ows_isValidImageDimension:dimensionInfo.pixelSize depthBytes:dimensionInfo.depthBytes isAnimated:isAnimated]) {
return CGSizeZero;
}
return dimensionInfo.pixelSize;
}
+ (BOOL)ows_hasValidImageDimensionWithImageSource:(CGImageSourceRef)imageSource isAnimated:(BOOL)isAnimated
+ (ImageDimensionInfo)ows_imageDimensionWithImageSource:(CGImageSourceRef)imageSource isAnimated:(BOOL)isAnimated
{
NSDictionary *imageProperties
= (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL);
ImageDimensionInfo info;
info.pixelSize = CGSizeZero;
info.depthBytes = 0;
if (!imageProperties) {
return NO;
return info;
}
NSNumber *widthNumber = imageProperties[(__bridge NSString *)kCGImagePropertyPixelWidth];
if (!widthNumber) {
return NO;
return info;
}
CGFloat width = widthNumber.floatValue;
NSNumber *heightNumber = imageProperties[(__bridge NSString *)kCGImagePropertyPixelHeight];
if (!heightNumber) {
return NO;
return info;
}
CGFloat height = heightNumber.floatValue;
@ -139,7 +211,7 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
* key is a CFNumberRef. */
NSNumber *depthNumber = imageProperties[(__bridge NSString *)kCGImagePropertyDepth];
if (!depthNumber) {
return NO;
return info;
}
NSUInteger depthBits = depthNumber.unsignedIntegerValue;
// This should usually be 1.
@ -149,13 +221,27 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
* The value of this key is CFStringRef. */
NSString *colorModel = imageProperties[(__bridge NSString *)kCGImagePropertyColorModel];
if (!colorModel) {
return NO;
return info;
}
if (![colorModel isEqualToString:(__bridge NSString *)kCGImagePropertyColorModelRGB]
&& ![colorModel isEqualToString:(__bridge NSString *)kCGImagePropertyColorModelGray]) {
return info;
}
// Update the struct to return
info.pixelSize = CGSizeMake(width, height);
info.depthBytes = depthBytes;
return info;
}
+ (BOOL)ows_isValidImageDimension:(CGSize)imageSize depthBytes:(CGFloat)depthBytes isAnimated:(BOOL)isAnimated
{
if (imageSize.width < 1 || imageSize.height < 1 || depthBytes < 1) {
// Invalid metadata.
return NO;
}
// We only support (A)RGB and (A)Grayscale, so worst case is 4.
const CGFloat kWorseCastComponentsPerPixel = 4;
CGFloat bytesPerPixel = kWorseCastComponentsPerPixel * depthBytes;
@ -164,7 +250,7 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
CGFloat kMaxValidImageDimension
= (isAnimated ? OWSMediaUtils.kMaxAnimatedImageDimensions : OWSMediaUtils.kMaxStillImageDimensions);
CGFloat kMaxBytes = kMaxValidImageDimension * kMaxValidImageDimension * kExpectedBytePerPixel;
CGFloat actualBytes = width * height * bytesPerPixel;
CGFloat actualBytes = imageSize.width * imageSize.height * bytesPerPixel;
if (actualBytes > kMaxBytes) {
return NO;
}
@ -205,6 +291,12 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
case ImageFormat_Bmp:
return (mimeType == nil || [mimeType isEqualToString:OWSMimeTypeImageBmp1] ||
[mimeType isEqualToString:OWSMimeTypeImageBmp2]);
case ImageFormat_Webp:
return (mimeType == nil || [mimeType isEqualToString:OWSMimeTypeImageWebp]);
case ImageFormat_Heic:
return (mimeType == nil || [mimeType isEqualToString:OWSMimeTypeImageHeic]);
case ImageFormat_Heif:
return (mimeType == nil || [mimeType isEqualToString:OWSMimeTypeImageHeif]);
}
}
@ -235,9 +327,52 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
} else if (byte0 == 0x49 && byte1 == 0x49) {
// Intel byte order TIFF
return ImageFormat_Tiff;
} else if (byte0 == 0x52 && byte1 == 0x49) {
// First two letters of RIFF tag.
return ImageFormat_Webp;
}
return [self ows_guessHighEfficiencyImageFormat];
}
- (ImageFormat)ows_guessHighEfficiencyImageFormat
{
// A HEIF image file has the first 16 bytes like
// 0000 0018 6674 7970 6865 6963 0000 0000
// so in this case the 5th to 12th bytes shall make a string of "ftypheic"
const NSUInteger kHeifHeaderStartsAt = 4;
const NSUInteger kHeifBrandStartsAt = 8;
// We support "heic", "mif1" or "msf1". Other brands are invalid for us for now.
// The length is 4 + 1 because the brand must be terminated with a null.
// Include the null in the comparison to prevent a bogus brand like "heicfake"
// from being considered valid.
const NSUInteger kHeifSupportedBrandLength = 5;
const NSUInteger kTotalHeaderLength = kHeifBrandStartsAt - kHeifHeaderStartsAt + kHeifSupportedBrandLength;
if (self.length < kHeifBrandStartsAt + kHeifSupportedBrandLength) {
return ImageFormat_Unknown;
}
return ImageFormat_Unknown;
// These are the brands of HEIF formatted files that are renderable by CoreGraphics
const NSString *kHeifBrandHeaderHeic = @"ftypheic\0";
const NSString *kHeifBrandHeaderHeif = @"ftypmif1\0";
const NSString *kHeifBrandHeaderHeifStream = @"ftypmsf1\0";
// Pull the string from the header and compare it with the supported formats
unsigned char bytes[kTotalHeaderLength];
[self getBytes:&bytes range:NSMakeRange(kHeifHeaderStartsAt, kTotalHeaderLength)];
NSData *data = [[NSData alloc] initWithBytes:bytes length:kTotalHeaderLength];
NSString *marker = [[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
if ([kHeifBrandHeaderHeic isEqualToString:marker]) {
return ImageFormat_Heic;
} else if ([kHeifBrandHeaderHeif isEqualToString:marker]) {
return ImageFormat_Heif;
} else if ([kHeifBrandHeaderHeifStream isEqualToString:marker]) {
return ImageFormat_Heif;
} else {
return ImageFormat_Unknown;
}
}
- (NSString *_Nullable)ows_guessMimeType
@ -304,9 +439,18 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
+ (CGSize)imageSizeForFilePath:(NSString *)filePath mimeType:(NSString *)mimeType
{
if (![NSData ows_isValidImageAtPath:filePath mimeType:mimeType]) {
NSData *_Nullable data = [NSData ows_validImageDataAtPath:filePath mimeType:mimeType];
if (!data) {
return CGSizeZero;
}
BOOL isAnimated = [MIMETypeUtil isSupportedAnimatedMIMEType:mimeType];
CGSize pixelSize = [NSData ows_imageDimensionsAtPath:filePath withData:data mimeType:mimeType isAnimated:isAnimated];
if (pixelSize.width > 0 && pixelSize.height > 0 && [mimeType isEqualToString:OWSMimeTypeImageWebp]) {
return pixelSize;
}
NSURL *url = [NSURL fileURLWithPath:filePath];
// With CGImageSource we avoid loading the whole image into memory.
@ -386,6 +530,42 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
return result;
}
// MARK: - Webp
+ (CGSize)sizeForWebpFilePath:(NSString *)filePath
{
NSError *error = nil;
NSData *_Nullable data = [NSData dataWithContentsOfFile:filePath options:NSDataReadingMappedIfSafe error:&error];
if (!data || error) {
return CGSizeZero;
}
return [data sizeForWebpData];
}
- (CGSize)sizeForWebpData
{
WebPData webPData = { 0 };
webPData.bytes = self.bytes;
webPData.size = self.length;
WebPDemuxer *demuxer = WebPDemux(&webPData);
if (!demuxer) {
return CGSizeZero;
}
CGFloat canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
CGFloat canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
CGFloat frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
WebPDemuxDelete(demuxer);
if (canvasWidth > 0 && canvasHeight > 0 && frameCount > 0) {
return CGSizeMake(canvasWidth, canvasHeight);
}
return CGSizeZero;
}
@end
NS_ASSUME_NONNULL_END