Merge branch 'charlesmchen/imageEditor'

This commit is contained in:
Matthew Chen 2018-12-18 10:53:59 -05:00
commit 98137e9ddf
13 changed files with 1336 additions and 72 deletions

View file

@ -235,6 +235,12 @@
34BECE2B1F74C12700D7438D /* DebugUIStress.m in Sources */ = {isa = PBXBuildFile; fileRef = 34BECE2A1F74C12700D7438D /* DebugUIStress.m */; };
34BECE2E1F7ABCE000D7438D /* GifPickerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BECE2D1F7ABCE000D7438D /* GifPickerViewController.swift */; };
34BECE301F7ABCF800D7438D /* GifPickerLayout.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BECE2F1F7ABCF800D7438D /* GifPickerLayout.swift */; };
34BEDB0E21C405B0007B0EAE /* ImageEditorModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB0D21C405B0007B0EAE /* ImageEditorModel.swift */; };
34BEDB1121C41E71007B0EAE /* ImageEditorTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB1021C41E71007B0EAE /* ImageEditorTest.swift */; };
34BEDB1321C43F6A007B0EAE /* ImageEditorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB1221C43F69007B0EAE /* ImageEditorView.swift */; };
34BEDB1621C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.h in Headers */ = {isa = PBXBuildFile; fileRef = 34BEDB1421C80BC9007B0EAE /* OWSAnyTouchGestureRecognizer.h */; settings = {ATTRIBUTES = (Public, ); }; };
34BEDB1721C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB1521C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m */; };
34BEDB1921C82AC5007B0EAE /* ImageEditorGestureRecognizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB1821C82AC5007B0EAE /* ImageEditorGestureRecognizer.swift */; };
34C3C78D20409F320000134C /* Opening.m4r in Resources */ = {isa = PBXBuildFile; fileRef = 34C3C78C20409F320000134C /* Opening.m4r */; };
34C3C78F2040A4F70000134C /* sonarping.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = 34C3C78E2040A4F70000134C /* sonarping.mp3 */; };
34C3C7922040B0DD0000134C /* OWSAudioPlayer.h in Headers */ = {isa = PBXBuildFile; fileRef = 34C3C7902040B0DC0000134C /* OWSAudioPlayer.h */; settings = {ATTRIBUTES = (Public, ); }; };
@ -292,7 +298,6 @@
34EA69402194933900702471 /* MediaDownloadView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34EA693F2194933900702471 /* MediaDownloadView.swift */; };
34EA69422194DE8000702471 /* MediaUploadView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34EA69412194DE7F00702471 /* MediaUploadView.swift */; };
34F308A21ECB469700BB7697 /* OWSBezierPathView.m in Sources */ = {isa = PBXBuildFile; fileRef = 34F308A11ECB469700BB7697 /* OWSBezierPathView.m */; };
34FD93701E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m in Sources */ = {isa = PBXBuildFile; fileRef = 34FD936F1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m */; };
4503F1BE20470A5B00CEE724 /* classic-quiet.aifc in Resources */ = {isa = PBXBuildFile; fileRef = 4503F1BB20470A5B00CEE724 /* classic-quiet.aifc */; };
4503F1BF20470A5B00CEE724 /* classic.aifc in Resources */ = {isa = PBXBuildFile; fileRef = 4503F1BC20470A5B00CEE724 /* classic.aifc */; };
4503F1C3204711D300CEE724 /* OWS107LegacySounds.m in Sources */ = {isa = PBXBuildFile; fileRef = 4503F1C1204711D200CEE724 /* OWS107LegacySounds.m */; };
@ -906,6 +911,12 @@
34BECE2A1F74C12700D7438D /* DebugUIStress.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DebugUIStress.m; sourceTree = "<group>"; };
34BECE2D1F7ABCE000D7438D /* GifPickerViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GifPickerViewController.swift; sourceTree = "<group>"; };
34BECE2F1F7ABCF800D7438D /* GifPickerLayout.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GifPickerLayout.swift; sourceTree = "<group>"; };
34BEDB0D21C405B0007B0EAE /* ImageEditorModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorModel.swift; sourceTree = "<group>"; };
34BEDB1021C41E71007B0EAE /* ImageEditorTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorTest.swift; sourceTree = "<group>"; };
34BEDB1221C43F69007B0EAE /* ImageEditorView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorView.swift; sourceTree = "<group>"; };
34BEDB1421C80BC9007B0EAE /* OWSAnyTouchGestureRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSAnyTouchGestureRecognizer.h; sourceTree = "<group>"; };
34BEDB1521C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSAnyTouchGestureRecognizer.m; sourceTree = "<group>"; };
34BEDB1821C82AC5007B0EAE /* ImageEditorGestureRecognizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorGestureRecognizer.swift; sourceTree = "<group>"; };
34C3C78C20409F320000134C /* Opening.m4r */ = {isa = PBXFileReference; lastKnownFileType = file; path = Opening.m4r; sourceTree = "<group>"; };
34C3C78E2040A4F70000134C /* sonarping.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; name = sonarping.mp3; path = Signal/AudioFiles/sonarping.mp3; sourceTree = SOURCE_ROOT; };
34C3C7902040B0DC0000134C /* OWSAudioPlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSAudioPlayer.h; sourceTree = "<group>"; };
@ -1001,8 +1012,6 @@
34EA69412194DE7F00702471 /* MediaUploadView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MediaUploadView.swift; sourceTree = "<group>"; };
34F308A01ECB469700BB7697 /* OWSBezierPathView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSBezierPathView.h; sourceTree = "<group>"; };
34F308A11ECB469700BB7697 /* OWSBezierPathView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSBezierPathView.m; sourceTree = "<group>"; };
34FD936E1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = OWSAnyTouchGestureRecognizer.h; path = views/OWSAnyTouchGestureRecognizer.h; sourceTree = "<group>"; };
34FD936F1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = OWSAnyTouchGestureRecognizer.m; path = views/OWSAnyTouchGestureRecognizer.m; sourceTree = "<group>"; };
435EAC2E5E22D3F087EB3192 /* Pods-SignalShareExtension.app store release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-SignalShareExtension.app store release.xcconfig"; path = "Pods/Target Support Files/Pods-SignalShareExtension/Pods-SignalShareExtension.app store release.xcconfig"; sourceTree = "<group>"; };
4503F1BB20470A5B00CEE724 /* classic-quiet.aifc */ = {isa = PBXFileReference; lastKnownFileType = file; path = "classic-quiet.aifc"; sourceTree = "<group>"; };
4503F1BC20470A5B00CEE724 /* classic.aifc */ = {isa = PBXFileReference; lastKnownFileType = file; path = classic.aifc; sourceTree = "<group>"; };
@ -1529,6 +1538,7 @@
isa = PBXGroup;
children = (
452EC6E0205FF5DC000E787C /* Bench.swift */,
4C948FF62146EB4800349F0D /* BlockListCache.swift */,
343D3D991E9283F100165CA4 /* BlockListUIUtils.h */,
343D3D9A1E9283F100165CA4 /* BlockListUIUtils.m */,
451777C71FD61554001225FF /* ConversationSearcher.swift */,
@ -1539,6 +1549,8 @@
344F248C2007CCD600CFB4F4 /* DisplayableText.swift */,
346129AC1FD1F34E00532771 /* ImageCache.swift */,
4523D015206EDC2B00A2AB51 /* LRUCache.swift */,
34BEDB1421C80BC9007B0EAE /* OWSAnyTouchGestureRecognizer.h */,
34BEDB1521C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m */,
34C3C7902040B0DC0000134C /* OWSAudioPlayer.h */,
34C3C7912040B0DC0000134C /* OWSAudioPlayer.m */,
45666EC41D99483D008FE134 /* OWSAvatarBuilder.h */,
@ -1559,15 +1571,14 @@
34B6D27320F664C800765BE2 /* OWSUnreadIndicator.m */,
34641E1120878FB000E2EDE5 /* OWSWindowManager.h */,
34641E1020878FAF00E2EDE5 /* OWSWindowManager.m */,
4CB93DC12180FF07004B9764 /* ProximityMonitoringManager.swift */,
45360B8C1F9521F800FA666C /* Searcher.swift */,
346129BD1FD2068600532771 /* ThreadUtil.h */,
346129BE1FD2068600532771 /* ThreadUtil.m */,
4C858A51212DC5E1001B45D3 /* UIImage+OWS.swift */,
B97940251832BD2400BD66CB /* UIUtil.h */,
B97940261832BD2400BD66CB /* UIUtil.m */,
45F170D51E315310003FC1F2 /* Weak.swift */,
4C858A51212DC5E1001B45D3 /* UIImage+OWS.swift */,
4C948FF62146EB4800349F0D /* BlockListCache.swift */,
4CB93DC12180FF07004B9764 /* ProximityMonitoringManager.swift */,
);
path = utils;
sourceTree = "<group>";
@ -1714,7 +1725,9 @@
34AC0A00211B39E700997B47 /* DisappearingTimerConfigurationView.swift */,
4CA46F49219C78050038ABDE /* GalleryRailView.swift */,
34AC0A08211B39E900997B47 /* GradientView.swift */,
34BEDB0C21C405B0007B0EAE /* ImageEditor */,
34AC0A06211B39E900997B47 /* OWSAlerts.swift */,
4C618198219DF03A009BD6B5 /* OWSButton.swift */,
34AC0A09211B39E900997B47 /* OWSFlatButton.swift */,
34AC09FE211B39E700997B47 /* OWSLayerView.swift */,
34AC0A03211B39E800997B47 /* OWSNavigationBar.swift */,
@ -1729,7 +1742,6 @@
34AC0A0D211B39EA00997B47 /* ThreadViewHelper.h */,
34AC0A0B211B39EA00997B47 /* ThreadViewHelper.m */,
34AC0A04211B39E800997B47 /* VideoPlayerView.swift */,
4C618198219DF03A009BD6B5 /* OWSButton.swift */,
);
path = Views;
sourceTree = "<group>";
@ -1858,6 +1870,24 @@
path = GifPicker;
sourceTree = "<group>";
};
34BEDB0C21C405B0007B0EAE /* ImageEditor */ = {
isa = PBXGroup;
children = (
34BEDB1821C82AC5007B0EAE /* ImageEditorGestureRecognizer.swift */,
34BEDB0D21C405B0007B0EAE /* ImageEditorModel.swift */,
34BEDB1221C43F69007B0EAE /* ImageEditorView.swift */,
);
path = ImageEditor;
sourceTree = "<group>";
};
34BEDB0F21C41E71007B0EAE /* views */ = {
isa = PBXGroup;
children = (
34BEDB1021C41E71007B0EAE /* ImageEditorTest.swift */,
);
path = views;
sourceTree = "<group>";
};
34C3C78B20409F320000134C /* ringtoneSounds */ = {
isa = PBXGroup;
children = (
@ -1995,8 +2025,6 @@
children = (
450DF2071E0DD29E003D14BE /* Notifications */,
4C090A1A210FD9C7001FD7F9 /* HapticFeedback.swift */,
34FD936E1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.h */,
34FD936F1E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m */,
34B3F8331E8DF1700035BE1A /* ViewControllers */,
76EB052B18170B33006006FC /* Views */,
);
@ -2390,6 +2418,7 @@
B660F6A01C29868000687D6E /* TestUtil.h */,
B660F6A21C29868000687D6E /* util */,
34B3F8951E8DF1B90035BE1A /* ViewControllers */,
34BEDB0F21C41E71007B0EAE /* views */,
);
path = test;
sourceTree = "<group>";
@ -2586,6 +2615,7 @@
34AC09DE211B39B100997B47 /* OWSNavigationController.h in Headers */,
34612A011FD5F31400532771 /* OWS104CreateRecipientIdentities.h in Headers */,
450998691FD8C10200D89EB3 /* AttachmentSharing.h in Headers */,
34BEDB1621C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.h in Headers */,
346129C71FD2072E00532771 /* NSString+OWS.h in Headers */,
34AC09EC211B39B100997B47 /* OWSTableViewController.h in Headers */,
451F8A3C1FD71392005CB9DA /* UIUtil.h in Headers */,
@ -3264,15 +3294,18 @@
34AC09EF211B39B100997B47 /* ViewControllerUtils.m in Sources */,
346941A2215D2EE400B5BFAD /* OWSConversationColor.m in Sources */,
34AC0A17211B39EA00997B47 /* VideoPlayerView.swift in Sources */,
34BEDB1321C43F6A007B0EAE /* ImageEditorView.swift in Sources */,
34AC09EE211B39B100997B47 /* EditContactShareNameViewController.swift in Sources */,
346129F71FD5F31400532771 /* OWS105AttachmentFilePaths.m in Sources */,
45194F931FD7215C00333B2C /* OWSContactOffersInteraction.m in Sources */,
4523D016206EDC2B00A2AB51 /* LRUCache.swift in Sources */,
450998681FD8C0FF00D89EB3 /* AttachmentSharing.m in Sources */,
347850711FDAEB17007B8332 /* OWSUserProfile.m in Sources */,
34BEDB1921C82AC5007B0EAE /* ImageEditorGestureRecognizer.swift in Sources */,
346129F81FD5F31400532771 /* OWS100RemoveTSRecipientsMigration.m in Sources */,
34AC09DF211B39B100997B47 /* OWSNavigationController.m in Sources */,
34074F61203D0CBE004596AE /* OWSSounds.m in Sources */,
34BEDB1721C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m in Sources */,
34B6A909218B8824007C4606 /* OWS112TypingIndicatorsMigration.swift in Sources */,
346129B51FD1F7E800532771 /* OWSProfileManager.m in Sources */,
342950832124C9750000B063 /* OWSTextView.m in Sources */,
@ -3323,6 +3356,7 @@
34AC09E9211B39B100997B47 /* OWSTableViewController.m in Sources */,
346129F51FD5F31400532771 /* OWS102MoveLoggingPreferenceToUserDefaults.m in Sources */,
45194F8F1FD71FF500333B2C /* ThreadUtil.m in Sources */,
34BEDB0E21C405B0007B0EAE /* ImageEditorModel.swift in Sources */,
451F8A3B1FD71297005CB9DA /* UIUtil.m in Sources */,
450C800F20AD1AB900F3A091 /* OWSWindowManager.m in Sources */,
454A965A1FD6017E008D2A0E /* SignalAttachment.swift in Sources */,
@ -3434,7 +3468,6 @@
340FC8AA204DAC8D007AEB0F /* NotificationSettingsViewController.m in Sources */,
4C090A1B210FD9C7001FD7F9 /* HapticFeedback.swift in Sources */,
3496744F2076ACD000080B5F /* LongTextViewController.swift in Sources */,
34FD93701E3BD43A00109093 /* OWSAnyTouchGestureRecognizer.m in Sources */,
34B3F8931E8DF1710035BE1A /* SignalsNavigationController.m in Sources */,
34F308A21ECB469700BB7697 /* OWSBezierPathView.m in Sources */,
45B27B862037FFB400A539DF /* DebugUIFileBrowser.swift in Sources */,
@ -3579,6 +3612,7 @@
files = (
456F6E2F1E261D1000FD2210 /* PeerConnectionClientTest.swift in Sources */,
458967111DC117CC00E9DD21 /* AccountManagerTest.swift in Sources */,
34BEDB1121C41E71007B0EAE /* ImageEditorTest.swift in Sources */,
3491D9A121022DB7001EF5A1 /* CDSSigningCertificateTest.m in Sources */,
340B02BA1FA0D6C700F9CFEC /* ConversationViewItemTest.m in Sources */,
458E383A1D6699FA0094BD24 /* OWSDeviceProvisioningURLParserTest.m in Sources */,

View file

@ -7,7 +7,7 @@
<key>CarthageVersion</key>
<string>0.31.2</string>
<key>OSXVersion</key>
<string>10.14.1</string>
<string>10.14.2</string>
<key>WebRTCCommit</key>
<string>ca71024b4993ba95e3e6b8d0758004cffc54ddaf M70</string>
</dict>

View file

@ -0,0 +1,111 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import XCTest
@testable import Signal
@testable import SignalMessaging
extension ImageEditorModel {
func itemIds() -> [String] {
return items().map { (item) in
item.itemId
}
}
}
class ImageEditorTest: SignalBaseTest {
override func setUp() {
super.setUp()
}
override func tearDown() {
// Put teardown code here. This method is called after the invocation of each test method in the class.
super.tearDown()
}
func testImageEditorContents() {
let contents = ImageEditorContents()
XCTAssertEqual(0, contents.itemMap.count)
let item = ImageEditorItem(itemType: .test)
contents.append(item: item)
XCTAssertEqual(1, contents.itemMap.count)
let contentsCopy = contents.clone()
XCTAssertEqual(1, contents.itemMap.count)
XCTAssertEqual(1, contentsCopy.itemMap.count)
contentsCopy.remove(item: item)
XCTAssertEqual(1, contents.itemMap.count)
XCTAssertEqual(0, contentsCopy.itemMap.count)
let modifiedItem = ImageEditorItem(itemId: item.itemId, itemType: item.itemType)
contents.replace(item: modifiedItem)
XCTAssertEqual(1, contents.itemMap.count)
XCTAssertEqual(0, contentsCopy.itemMap.count)
}
private func writeDummyImage() -> String {
let image = UIImage.init(color: .red, size: CGSize(width: 1, height: 1))
guard let data = UIImagePNGRepresentation(image) else {
owsFail("Couldn't export dummy image.")
}
let filePath = OWSFileSystem.temporaryFilePath(withFileExtension: "png")
try! data.write(to: URL(fileURLWithPath: filePath))
return filePath
}
func testImageEditor() {
let imagePath = writeDummyImage()
let imageEditor = try! ImageEditorModel(srcImagePath: imagePath)
XCTAssertFalse(imageEditor.canUndo())
XCTAssertFalse(imageEditor.canRedo())
XCTAssertEqual(0, imageEditor.itemCount())
let itemA = ImageEditorItem(itemType: .test)
imageEditor.append(item: itemA)
XCTAssertTrue(imageEditor.canUndo())
XCTAssertFalse(imageEditor.canRedo())
XCTAssertEqual(1, imageEditor.itemCount())
XCTAssertEqual([itemA.itemId], imageEditor.itemIds())
imageEditor.undo()
XCTAssertFalse(imageEditor.canUndo())
XCTAssertTrue(imageEditor.canRedo())
XCTAssertEqual(0, imageEditor.itemCount())
imageEditor.redo()
XCTAssertTrue(imageEditor.canUndo())
XCTAssertFalse(imageEditor.canRedo())
XCTAssertEqual(1, imageEditor.itemCount())
XCTAssertEqual([itemA.itemId], imageEditor.itemIds())
imageEditor.undo()
XCTAssertFalse(imageEditor.canUndo())
XCTAssertTrue(imageEditor.canRedo())
XCTAssertEqual(0, imageEditor.itemCount())
let itemB = ImageEditorItem(itemType: .test)
imageEditor.append(item: itemB)
XCTAssertTrue(imageEditor.canUndo())
XCTAssertFalse(imageEditor.canRedo())
XCTAssertEqual(1, imageEditor.itemCount())
XCTAssertEqual([itemB.itemId], imageEditor.itemIds())
let itemC = ImageEditorItem(itemType: .test)
imageEditor.append(item: itemC)
XCTAssertTrue(imageEditor.canUndo())
XCTAssertFalse(imageEditor.canRedo())
XCTAssertEqual(2, imageEditor.itemCount())
XCTAssertEqual([itemB.itemId, itemC.itemId], imageEditor.itemIds())
imageEditor.undo()
XCTAssertTrue(imageEditor.canUndo())
XCTAssertTrue(imageEditor.canRedo())
XCTAssertEqual(1, imageEditor.itemCount())
XCTAssertEqual([itemB.itemId], imageEditor.itemIds())
}
}

View file

@ -24,6 +24,7 @@ FOUNDATION_EXPORT const unsigned char SignalMessagingVersionString[];
#import <SignalMessaging/NSAttributedString+OWS.h>
#import <SignalMessaging/NSString+OWS.h>
#import <SignalMessaging/NewNonContactConversationViewController.h>
#import <SignalMessaging/OWSAnyTouchGestureRecognizer.h>
#import <SignalMessaging/OWSAudioPlayer.h>
#import <SignalMessaging/OWSContactAvatarBuilder.h>
#import <SignalMessaging/OWSContactOffersInteraction.h>

View file

@ -15,6 +15,8 @@ public protocol AttachmentApprovalViewControllerDelegate: class {
@objc optional func attachmentApproval(_ attachmentApproval: AttachmentApprovalViewController, changedCaptionOfAttachment attachment: SignalAttachment)
}
// MARK: -
class AttachmentItemCollection {
private (set) var attachmentItems: [SignalAttachmentItem]
init(attachmentItems: [SignalAttachmentItem]) {
@ -48,6 +50,8 @@ class AttachmentItemCollection {
}
}
// MARK: -
class SignalAttachmentItem: Hashable {
enum SignalAttachmentItemError: Error {
@ -56,8 +60,24 @@ class SignalAttachmentItem: Hashable {
let attachment: SignalAttachment
// This might be nil if the attachment is not a valid image.
var imageEditorModel: ImageEditorModel?
init(attachment: SignalAttachment) {
self.attachment = attachment
// Try and make a ImageEditorModel.
// This will only apply for valid images.
if let dataUrl: URL = attachment.dataUrl,
dataUrl.isFileURL {
let path = dataUrl.path
do {
imageEditorModel = try ImageEditorModel(srcImagePath: path)
} catch {
// Usually not an error; this usually indicates invalid input.
Logger.warn("Could not create image editor: \(error)")
}
}
}
// MARK:
@ -97,12 +117,16 @@ class SignalAttachmentItem: Hashable {
}
}
// MARK: -
@objc
public enum AttachmentApprovalViewControllerMode: UInt {
case modal
case sharedNavigation
}
// MARK: -
@objc
public class AttachmentApprovalViewController: UIPageViewController, UIPageViewControllerDataSource, UIPageViewControllerDelegate {
@ -558,7 +582,63 @@ public class AttachmentApprovalViewController: UIPageViewController, UIPageViewC
}
var attachments: [SignalAttachment] {
return attachmentItems.map { $0.attachment }
return attachmentItems.map { self.processedAttachment(forAttachmentItem: $0) }
}
// For any attachments edited with the image editor, returns a
// new SignalAttachment that reflects those changes. Otherwise,
// returns the original attachment.
//
// If any errors occurs in the export process, we fail over to
// sending the original attachment. This seems better than trying
// to involve the user in resolving the issue.
func processedAttachment(forAttachmentItem attachmentItem: SignalAttachmentItem) -> SignalAttachment {
guard let imageEditorModel = attachmentItem.imageEditorModel else {
// Image was not edited.
return attachmentItem.attachment
}
guard imageEditorModel.itemCount() > 0 else {
// Image editor has no changes.
return attachmentItem.attachment
}
guard let dstImage = ImageEditorView.renderForOutput(model: imageEditorModel) else {
owsFailDebug("Could not render for output.")
return attachmentItem.attachment
}
var dataUTI = kUTTypeImage as String
guard let dstData: Data = {
let isLossy: Bool = attachmentItem.attachment.mimeType.caseInsensitiveCompare(OWSMimeTypeImageJpeg) == .orderedSame
if isLossy {
dataUTI = kUTTypeJPEG as String
return UIImageJPEGRepresentation(dstImage, 0.9)
} else {
dataUTI = kUTTypePNG as String
return UIImagePNGRepresentation(dstImage)
}
}() else {
owsFailDebug("Could not export for output.")
return attachmentItem.attachment
}
guard let dataSource = DataSourceValue.dataSource(with: dstData, utiType: dataUTI) else {
owsFailDebug("Could not prepare data source for output.")
return attachmentItem.attachment
}
// Rewrite the filename's extension to reflect the output file format.
var filename: String? = attachmentItem.attachment.sourceFilename
if let sourceFilename = attachmentItem.attachment.sourceFilename {
if let fileExtension: String = MIMETypeUtil.fileExtension(forUTIType: dataUTI) {
filename = (sourceFilename as NSString).deletingPathExtension.appendingFileExtension(fileExtension)
}
}
dataSource.sourceFilename = filename
let dstAttachment = SignalAttachment.attachment(dataSource: dataSource, dataUTI: dataUTI, imageQuality: .original)
if let attachmentError = dstAttachment.error {
owsFailDebug("Could not prepare attachment for output: \(attachmentError).")
return attachmentItem.attachment
}
return dstAttachment
}
func attachmentItem(before currentItem: SignalAttachmentItem) -> SignalAttachmentItem? {
@ -777,6 +857,8 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
private(set) var contentContainer: UIView!
private(set) var playVideoButton: UIView?
private var imageEditorView: ImageEditorView?
// MARK: - Initializers
init(attachmentItem: SignalAttachmentItem) {
@ -865,6 +947,17 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
containerView.addSubview(mediaMessageView)
mediaMessageView.autoPinEdgesToSuperviewEdges()
#if DEBUG
if let imageEditorModel = attachmentItem.imageEditorModel,
let imageMediaView = self.mediaMessageView.contentView {
let imageEditorView = ImageEditorView(model: imageEditorModel)
imageMediaView.isUserInteractionEnabled = true
imageMediaView.addSubview(imageEditorView)
imageEditorView.autoPinEdgesToSuperviewEdges()
self.imageEditorView = imageEditorView
}
#endif
if isZoomable {
// Add top and bottom gradients to ensure toolbar controls are legible
// when placed over image/video preview which may be a clashing color.

View file

@ -0,0 +1,179 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import UIKit
class ImageEditorGestureRecognizer: UIGestureRecognizer {
@objc
override func canPrevent(_ preventedGestureRecognizer: UIGestureRecognizer) -> Bool {
return false
}
@objc
override func canBePrevented(by: UIGestureRecognizer) -> Bool {
return false
}
@objc
override func shouldRequireFailure(of: UIGestureRecognizer) -> Bool {
return false
}
@objc
override func shouldBeRequiredToFail(by: UIGestureRecognizer) -> Bool {
return true
}
// MARK: - Touch Handling
@objc
override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesBegan(touches, with: event)
if state == .possible,
touchType(for: touches, with: event) == .valid {
// If a gesture starts with a valid touch, begin stroke.
state = .began
} else {
state = .failed
}
}
@objc
override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesMoved(touches, with: event)
switch state {
case .began, .changed:
switch touchType(for: touches, with: event) {
case .valid:
// If a gesture continues with a valid touch, continue stroke.
state = .changed
case .invalid:
state = .failed
case .outside:
// If a gesture continues with a valid touch _outside the canvas_,
// end stroke.
state = .ended
}
default:
state = .failed
}
}
@objc
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesEnded(touches, with: event)
switch state {
case .began, .changed:
switch touchType(for: touches, with: event) {
case .valid, .outside:
// If a gesture ends with a valid touch, end stroke.
state = .ended
case .invalid:
state = .failed
}
default:
state = .failed
}
}
@objc
override func touchesCancelled(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesCancelled(touches, with: event)
state = .cancelled
}
public enum TouchType {
case invalid
case valid
case outside
}
private func touchType(for touches: Set<UITouch>, with event: UIEvent) -> TouchType {
guard let view = self.view else {
owsFailDebug("Missing view")
return .invalid
}
guard let allTouches = event.allTouches else {
owsFailDebug("Missing allTouches")
return .invalid
}
guard allTouches.count <= 1 else {
return .invalid
}
guard touches.count == 1 else {
return .invalid
}
guard let firstTouch: UITouch = touches.first else {
return .invalid
}
let location = firstTouch.location(in: view)
let isNewTouch = firstTouch.phase == .began
if isNewTouch {
// Reject new touches that are inside a control subview.
if subviewControl(ofView: view, contains: firstTouch) {
return .invalid
}
}
// Reject new touches outside this GR's view's bounds.
guard view.bounds.contains(location) else {
return isNewTouch ? .invalid : .outside
}
if isNewTouch {
// Ignore touches that start near the top or bottom edge of the screen;
// they may be a system edge swipe gesture.
let rootView = self.rootView(of: view)
let rootLocation = firstTouch.location(in: rootView)
let distanceToTopEdge = max(0, rootLocation.y)
let distanceToBottomEdge = max(0, rootView.bounds.size.height - rootLocation.y)
let distanceToNearestEdge = min(distanceToTopEdge, distanceToBottomEdge)
let kSystemEdgeSwipeTolerance: CGFloat = 50
if (distanceToNearestEdge < kSystemEdgeSwipeTolerance) {
return .invalid
}
}
return .valid
}
private func subviewControl(ofView superview: UIView, contains touch: UITouch) -> Bool {
for subview in superview.subviews {
guard !subview.isHidden, subview.isUserInteractionEnabled else {
continue
}
let location = touch.location(in: subview)
guard subview.bounds.contains(location) else {
continue
}
if subview as? UIControl != nil {
return true
}
if subviewControl(ofView: subview, contains: touch) {
return true
}
}
return false
}
private func rootView(of view: UIView) -> UIView {
var responder: UIResponder? = view
var lastView: UIView = view
while true {
guard let currentResponder = responder else {
return lastView
}
if let currentView = currentResponder as? UIView {
lastView = currentView
}
responder = currentResponder.next
}
}
}

View file

@ -0,0 +1,435 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import UIKit
@objc public enum ImageEditorError: Int, Error {
case assertionError
case invalidInput
}
@objc
public enum ImageEditorItemType: Int {
case test
case stroke
}
// MARK: -
// Instances of ImageEditorItem should be treated
// as immutable, once configured.
@objc
public class ImageEditorItem: NSObject {
@objc
public let itemId: String
@objc
public let itemType: ImageEditorItemType
@objc
public init(itemType: ImageEditorItemType) {
self.itemId = UUID().uuidString
self.itemType = itemType
super.init()
}
@objc
public init(itemId: String,
itemType: ImageEditorItemType) {
self.itemId = itemId
self.itemType = itemType
super.init()
}
}
// MARK: -
@objc
public class ImageEditorStrokeItem: ImageEditorItem {
// Until we need to serialize these items,
// just use UIColor.
@objc
public let color: UIColor
// Represented in a "ULO unit" coordinate system
// for source image.
//
// "ULO" coordinate system is "upper-left-origin".
//
// "Unit" coordinate system means values are expressed
// in terms of some other values, in this case the
// width and height of the source image.
//
// * 0.0 = left edge
// * 1.0 = right edge
// * 0.0 = top edge
// * 1.0 = bottom edge
public typealias StrokeSample = CGPoint
@objc
public let unitSamples: [StrokeSample]
// Expressed as a "Unit" value as a fraction of
// min(width, height) of the destination viewport.
@objc
public let unitStrokeWidth: CGFloat
@objc
public init(color: UIColor,
unitSamples: [StrokeSample],
unitStrokeWidth: CGFloat) {
self.color = color
self.unitSamples = unitSamples
self.unitStrokeWidth = unitStrokeWidth
super.init(itemType: .stroke)
}
@objc
public init(itemId: String,
color: UIColor,
unitSamples: [StrokeSample],
unitStrokeWidth: CGFloat) {
self.color = color
self.unitSamples = unitSamples
self.unitStrokeWidth = unitStrokeWidth
super.init(itemId: itemId, itemType: .stroke)
}
@objc
public class func defaultUnitStrokeWidth() -> CGFloat {
return 0.02
}
@objc
public class func strokeWidth(forUnitStrokeWidth unitStrokeWidth: CGFloat,
dstSize: CGSize) -> CGFloat {
return CGFloatClamp01(unitStrokeWidth) * min(dstSize.width, dstSize.height)
}
}
// MARK: -
public class OrderedDictionary<ValueType>: NSObject {
public typealias KeyType = String
var keyValueMap = [KeyType: ValueType]()
var orderedKeys = [KeyType]()
public override init() {
}
// Used to clone copies of instances of this class.
public init(keyValueMap: [KeyType: ValueType],
orderedKeys: [KeyType]) {
self.keyValueMap = keyValueMap
self.orderedKeys = orderedKeys
}
// Since the contents are immutable, we only modify copies
// made with this method.
public func clone() -> OrderedDictionary<ValueType> {
return OrderedDictionary(keyValueMap: keyValueMap, orderedKeys: orderedKeys)
}
public func append(key: KeyType, value: ValueType) {
if keyValueMap[key] != nil {
owsFailDebug("Unexpected duplicate key in key map: \(key)")
}
keyValueMap[key] = value
if orderedKeys.contains(key) {
owsFailDebug("Unexpected duplicate key in key list: \(key)")
} else {
orderedKeys.append(key)
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
}
public func replace(key: KeyType, value: ValueType) {
if keyValueMap[key] == nil {
owsFailDebug("Missing key in key map: \(key)")
}
keyValueMap[key] = value
if !orderedKeys.contains(key) {
owsFailDebug("Missing key in key list: \(key)")
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
}
public func remove(key: KeyType) {
if keyValueMap[key] == nil {
owsFailDebug("Missing key in key map: \(key)")
} else {
keyValueMap.removeValue(forKey: key)
}
if !orderedKeys.contains(key) {
owsFailDebug("Missing key in key list: \(key)")
} else {
orderedKeys = orderedKeys.filter { $0 != key }
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
}
public var count: Int {
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
return orderedKeys.count
}
public func orderedValues() -> [ValueType] {
var values = [ValueType]()
for key in orderedKeys {
guard let value = self.keyValueMap[key] else {
owsFailDebug("Missing value")
continue
}
values.append(value)
}
return values
}
}
// MARK: -
// ImageEditorContents represents a snapshot of canvas
// state.
//
// Instances of ImageEditorContents should be treated
// as immutable, once configured.
public class ImageEditorContents: NSObject {
public typealias ItemMapType = OrderedDictionary<ImageEditorItem>
// This represents the current state of each item,
// a mapping of [itemId : item].
var itemMap = ItemMapType()
// Used to create an initial, empty instances of this class.
public override init() {
}
// Used to clone copies of instances of this class.
public init(itemMap: ItemMapType) {
self.itemMap = itemMap
}
// Since the contents are immutable, we only modify copies
// made with this method.
public func clone() -> ImageEditorContents {
return ImageEditorContents(itemMap: itemMap.clone())
}
@objc
public func append(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.append(key: item.itemId, value: item)
}
@objc
public func replace(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.replace(key: item.itemId, value: item)
}
@objc
public func remove(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.remove(key: item.itemId)
}
@objc
public func remove(itemId: String) {
Logger.verbose("\(itemId)")
itemMap.remove(key: itemId)
}
@objc
public func itemCount() -> Int {
return itemMap.count
}
@objc
public func items() -> [ImageEditorItem] {
return itemMap.orderedValues()
}
}
// MARK: -
// Used to represent undo/redo operations.
//
// Because the image editor's "contents" and "items"
// are immutable, these operations simply take a
// snapshot of the current contents which can be used
// (multiple times) to preserve/restore editor state.
private class ImageEditorOperation: NSObject {
let contents: ImageEditorContents
required init(contents: ImageEditorContents) {
self.contents = contents
}
}
// MARK: -
@objc
public protocol ImageEditorModelDelegate: class {
func imageEditorModelDidChange()
}
// MARK: -
@objc
public class ImageEditorModel: NSObject {
@objc
public weak var delegate: ImageEditorModelDelegate?
@objc
public let srcImagePath: String
@objc
public let srcImageSizePixels: CGSize
private var contents = ImageEditorContents()
private var undoStack = [ImageEditorOperation]()
private var redoStack = [ImageEditorOperation]()
// We don't want to allow editing of images if:
//
// * They are invalid.
// * We can't determine their size / aspect-ratio.
@objc
public required init(srcImagePath: String) throws {
self.srcImagePath = srcImagePath
let srcFileName = (srcImagePath as NSString).lastPathComponent
let srcFileExtension = (srcFileName as NSString).pathExtension
guard let mimeType = MIMETypeUtil.mimeType(forFileExtension: srcFileExtension) else {
Logger.error("Couldn't determine MIME type for file.")
throw ImageEditorError.invalidInput
}
guard MIMETypeUtil.isImage(mimeType),
!MIMETypeUtil.isAnimated(mimeType) else {
Logger.error("Invalid MIME type: \(mimeType).")
throw ImageEditorError.invalidInput
}
let srcImageSizePixels = NSData.imageSize(forFilePath: srcImagePath, mimeType: mimeType)
guard srcImageSizePixels.width > 0, srcImageSizePixels.height > 0 else {
Logger.error("Couldn't determine image size.")
throw ImageEditorError.invalidInput
}
self.srcImageSizePixels = srcImageSizePixels
super.init()
}
@objc
public func itemCount() -> Int {
return contents.itemCount()
}
@objc
public func items() -> [ImageEditorItem] {
return contents.items()
}
@objc
public func canUndo() -> Bool {
return !undoStack.isEmpty
}
@objc
public func canRedo() -> Bool {
return !redoStack.isEmpty
}
@objc
public func undo() {
guard let undoOperation = undoStack.popLast() else {
owsFailDebug("Cannot undo.")
return
}
let redoOperation = ImageEditorOperation(contents: contents)
redoStack.append(redoOperation)
self.contents = undoOperation.contents
delegate?.imageEditorModelDidChange()
}
@objc
public func redo() {
guard let redoOperation = redoStack.popLast() else {
owsFailDebug("Cannot redo.")
return
}
let undoOperation = ImageEditorOperation(contents: contents)
undoStack.append(undoOperation)
self.contents = redoOperation.contents
delegate?.imageEditorModelDidChange()
}
@objc
public func append(item: ImageEditorItem) {
performAction { (newContents) in
newContents.append(item: item)
}
}
@objc
public func replace(item: ImageEditorItem) {
performAction { (newContents) in
newContents.replace(item: item)
}
}
@objc
public func remove(item: ImageEditorItem) {
performAction { (newContents) in
newContents.remove(item: item)
}
}
private func performAction(action: (ImageEditorContents) -> Void) {
let undoOperation = ImageEditorOperation(contents: contents)
undoStack.append(undoOperation)
redoStack.removeAll()
let newContents = contents.clone()
action(newContents)
contents = newContents
delegate?.imageEditorModelDidChange()
}
}

View file

@ -0,0 +1,344 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
import UIKit
// A view for editing outgoing image attachments.
// It can also be used to render the final output.
@objc
public class ImageEditorView: UIView, ImageEditorModelDelegate {
private let model: ImageEditorModel
@objc
public required init(model: ImageEditorModel) {
self.model = model
super.init(frame: .zero)
model.delegate = self
self.isUserInteractionEnabled = true
let anyTouchGesture = ImageEditorGestureRecognizer(target: self, action: #selector(handleTouchGesture(_:)))
self.addGestureRecognizer(anyTouchGesture)
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
// MARK: - Actions
// These properties are non-empty while drawing a stroke.
private var currentStroke: ImageEditorStrokeItem?
private var currentStrokeSamples = [ImageEditorStrokeItem.StrokeSample]()
@objc
public func handleTouchGesture(_ gestureRecognizer: UIGestureRecognizer) {
AssertIsOnMainThread()
Logger.verbose("\(NSStringForUIGestureRecognizerState(gestureRecognizer.state))")
let removeCurrentStroke = {
if let stroke = self.currentStroke {
self.model.remove(item: stroke)
}
self.currentStroke = nil
self.currentStrokeSamples.removeAll()
}
let referenceView = self
let unitSampleForGestureLocation = { () -> CGPoint in
// TODO: Smooth touch samples before converting into stroke samples.
let location = gestureRecognizer.location(in: referenceView)
let x = CGFloatClamp01(CGFloatInverseLerp(location.x, 0, referenceView.bounds.width))
let y = CGFloatClamp01(CGFloatInverseLerp(location.y, 0, referenceView.bounds.height))
return CGPoint(x: x, y: y)
}
// TODO: Color picker.
let strokeColor = UIColor.blue
// TODO: Tune stroke width.
let unitStrokeWidth = ImageEditorStrokeItem.defaultUnitStrokeWidth()
switch gestureRecognizer.state {
case .began:
removeCurrentStroke()
currentStrokeSamples.append(unitSampleForGestureLocation())
let stroke = ImageEditorStrokeItem(color: strokeColor, unitSamples: self.currentStrokeSamples, unitStrokeWidth: unitStrokeWidth)
self.model.append(item: stroke)
self.currentStroke = stroke
case .changed, .ended:
currentStrokeSamples.append(unitSampleForGestureLocation())
guard let lastStroke = self.currentStroke else {
owsFailDebug("Missing last stroke.")
removeCurrentStroke()
return
}
// Model items are immutable; we _replace_ the
// stroke item rather than modify it.
let stroke = ImageEditorStrokeItem(itemId: lastStroke.itemId, color: strokeColor, unitSamples: self.currentStrokeSamples, unitStrokeWidth: unitStrokeWidth)
self.model.replace(item: stroke)
self.currentStroke = stroke
if gestureRecognizer.state == .ended {
self.currentStroke = nil
self.currentStrokeSamples.removeAll()
}
default:
removeCurrentStroke()
}
}
// MARK: - ImageEditorModelDelegate
public func imageEditorModelDidChange() {
// TODO: We eventually want to narrow our change events
// to reflect the specific item(s) which changed.
updateAllContent()
}
// MARK: - Accessor Overrides
@objc public override var bounds: CGRect {
didSet {
if oldValue != bounds {
updateAllContent()
}
}
}
@objc public override var frame: CGRect {
didSet {
if oldValue != frame {
updateAllContent()
}
}
}
// MARK: - Content
var contentLayers = [CALayer]()
internal func updateAllContent() {
AssertIsOnMainThread()
for layer in contentLayers {
layer.removeFromSuperlayer()
}
contentLayers.removeAll()
guard bounds.width > 0,
bounds.height > 0 else {
return
}
// Don't animate changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
for item in model.items() {
guard let layer = ImageEditorView.layerForItem(item: item,
viewSize: bounds.size) else {
continue
}
self.layer.addSublayer(layer)
contentLayers.append(layer)
}
CATransaction.commit()
}
private class func layerForItem(item: ImageEditorItem,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
switch item.itemType {
case .test:
owsFailDebug("Unexpected test item.")
return nil
case .stroke:
guard let strokeItem = item as? ImageEditorStrokeItem else {
owsFailDebug("Item has unexpected type: \(type(of: item)).")
return nil
}
return strokeLayerForItem(item: strokeItem, viewSize: viewSize)
}
}
private class func strokeLayerForItem(item: ImageEditorStrokeItem,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
Logger.verbose("\(item.itemId)")
let strokeWidth = ImageEditorStrokeItem.strokeWidth(forUnitStrokeWidth: item.unitStrokeWidth,
dstSize: viewSize)
let unitSamples = item.unitSamples
guard unitSamples.count > 1 else {
// Not an error; the stroke doesn't have enough samples to render yet.
return nil
}
let shapeLayer = CAShapeLayer()
shapeLayer.lineWidth = strokeWidth
shapeLayer.strokeColor = item.color.cgColor
shapeLayer.frame = CGRect(origin: .zero, size: viewSize)
let transformSampleToPoint = { (unitSample: CGPoint) -> CGPoint in
return CGPoint(x: viewSize.width * unitSample.x,
y: viewSize.height * unitSample.y)
}
// TODO: Use bezier curves to smooth stroke.
let bezierPath = UIBezierPath()
let points = applySmoothing(to: unitSamples.map { (unitSample) in
transformSampleToPoint(unitSample)
})
var previousForwardVector = CGPoint.zero
for index in 0..<points.count {
let point = points[index]
let forwardVector: CGPoint
if index == 0 {
// First sample.
let nextPoint = points[index + 1]
forwardVector = CGPointSubtract(nextPoint, point)
} else if index == points.count - 1 {
// Last sample.
let previousPoint = points[index - 1]
forwardVector = CGPointSubtract(point, previousPoint)
} else {
// Middle samples.
let previousPoint = points[index - 1]
let previousPointForwardVector = CGPointSubtract(point, previousPoint)
let nextPoint = points[index + 1]
let nextPointForwardVector = CGPointSubtract(nextPoint, point)
forwardVector = CGPointScale(CGPointAdd(previousPointForwardVector, nextPointForwardVector), 0.5)
}
if index == 0 {
// First sample.
bezierPath.move(to: point)
} else {
let previousPoint = points[index - 1]
// We apply more than one kind of smoothing.
// This smoothing avoids rendering "angled segments"
// by drawing the stroke as a series of curves.
// We use bezier curves and infer the control points
// from the "next" and "prev" points.
//
// This factor controls how much we're smoothing.
//
// * 0.0 = No smoothing.
//
// TODO: Tune this variable once we have stroke input.
let controlPointFactor: CGFloat = 0.25
let controlPoint1 = CGPointAdd(previousPoint, CGPointScale(previousForwardVector, +controlPointFactor))
let controlPoint2 = CGPointAdd(point, CGPointScale(forwardVector, -controlPointFactor))
// We're using Cubic curves.
bezierPath.addCurve(to: point, controlPoint1: controlPoint1, controlPoint2: controlPoint2)
}
previousForwardVector = forwardVector
}
shapeLayer.path = bezierPath.cgPath
shapeLayer.fillColor = nil
shapeLayer.lineCap = kCALineCapRound
return shapeLayer
}
// We apply more than one kind of smoothing.
//
// This (simple) smoothing reduces jitter from the touch sensor.
private class func applySmoothing(to points: [CGPoint]) -> [CGPoint] {
AssertIsOnMainThread()
var result = [CGPoint]()
for index in 0..<points.count {
let point = points[index]
if index == 0 {
// First sample.
result.append(point)
} else if index == points.count - 1 {
// Last sample.
result.append(point)
} else {
// Middle samples.
let lastPoint = points[index - 1]
let nextPoint = points[index + 1]
let alpha: CGFloat = 0.1
let smoothedPoint = CGPointAdd(CGPointScale(point, 1.0 - 2.0 * alpha),
CGPointAdd(CGPointScale(lastPoint, alpha),
CGPointScale(nextPoint, alpha)))
result.append(smoothedPoint)
}
}
return result
}
// MARK: - Actions
// Returns nil on error.
@objc
public class func renderForOutput(model: ImageEditorModel) -> UIImage? {
// TODO: Do we want to render off the main thread?
AssertIsOnMainThread()
// Render output at same size as source image.
let dstSizePixels = model.srcImageSizePixels
let hasAlpha = NSData.hasAlpha(forValidImageFilePath: model.srcImagePath)
guard let srcImage = UIImage(contentsOfFile: model.srcImagePath) else {
owsFailDebug("Could not load src image.")
return nil
}
let dstScale: CGFloat = 1.0 // The size is specified in pixels, not in points.
UIGraphicsBeginImageContextWithOptions(dstSizePixels, !hasAlpha, dstScale)
defer { UIGraphicsEndImageContext() }
guard let context = UIGraphicsGetCurrentContext() else {
owsFailDebug("Could not create output context.")
return nil
}
context.interpolationQuality = .high
// Draw source image.
let dstFrame = CGRect(origin: .zero, size: model.srcImageSizePixels)
srcImage.draw(in: dstFrame)
for item in model.items() {
guard let layer = layerForItem(item: item,
viewSize: dstSizePixels) else {
Logger.error("Couldn't create layer for item.")
continue
}
// This might be superfluous, but ensure that the layer renders
// at "point=pixel" scale.
layer.contentsScale = 1.0
layer.render(in: context)
}
let scaledImage = UIGraphicsGetImageFromCurrentImageContext()
if scaledImage == nil {
owsFailDebug("could not generate dst image.")
}
return scaledImage
}
}

View file

@ -1,9 +1,13 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
NSString *NSStringForUIGestureRecognizerState(UIGestureRecognizerState state);
// This custom GR can be used to detect touches when they
// begin in a view. In order to honor touch dispatch, this
// GR will ignore touches that:
@ -14,3 +18,5 @@
@interface OWSAnyTouchGestureRecognizer : UIGestureRecognizer
@end
NS_ASSUME_NONNULL_END

View file

@ -1,10 +1,30 @@
//
// Copyright (c) 2017 Open Whisper Systems. All rights reserved.
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
#import "OWSAnyTouchGestureRecognizer.h"
#import <UIKit/UIGestureRecognizerSubclass.h>
NS_ASSUME_NONNULL_BEGIN
NSString *NSStringForUIGestureRecognizerState(UIGestureRecognizerState state)
{
switch (state) {
case UIGestureRecognizerStatePossible:
return @"UIGestureRecognizerStatePossible";
case UIGestureRecognizerStateBegan:
return @"UIGestureRecognizerStateBegan";
case UIGestureRecognizerStateChanged:
return @"UIGestureRecognizerStateChanged";
case UIGestureRecognizerStateEnded:
return @"UIGestureRecognizerStateEnded";
case UIGestureRecognizerStateCancelled:
return @"UIGestureRecognizerStateCancelled";
case UIGestureRecognizerStateFailed:
return @"UIGestureRecognizerStateFailed";
}
}
@implementation OWSAnyTouchGestureRecognizer
- (BOOL)canPreventGestureRecognizer:(UIGestureRecognizer *)preventedGestureRecognizer
@ -108,3 +128,5 @@
}
@end
NS_ASSUME_NONNULL_END

View file

@ -494,68 +494,13 @@ typedef void (^OWSLoadedThumbnailSuccess)(OWSLoadedThumbnail *loadedThumbnail);
}
return [self videoStillImage].size;
} else if ([self isImage] || [self isAnimated]) {
NSURL *_Nullable mediaUrl = self.originalMediaURL;
if (!mediaUrl) {
return CGSizeZero;
}
if (![self isValidImage]) {
return CGSizeZero;
}
// With CGImageSource we avoid loading the whole image into memory.
CGImageSourceRef source = CGImageSourceCreateWithURL((CFURLRef)mediaUrl, NULL);
if (!source) {
OWSFailDebug(@"Could not load image: %@", mediaUrl);
return CGSizeZero;
}
NSDictionary *options = @{
(NSString *)kCGImageSourceShouldCache : @(NO),
};
NSDictionary *properties
= (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, 0, (CFDictionaryRef)options);
CGSize imageSize = CGSizeZero;
if (properties) {
NSNumber *orientation = properties[(NSString *)kCGImagePropertyOrientation];
NSNumber *width = properties[(NSString *)kCGImagePropertyPixelWidth];
NSNumber *height = properties[(NSString *)kCGImagePropertyPixelHeight];
if (width && height) {
imageSize = CGSizeMake(width.floatValue, height.floatValue);
if (orientation) {
imageSize =
[self applyImageOrientation:(UIImageOrientation)orientation.intValue toImageSize:imageSize];
}
} else {
OWSFailDebug(@"Could not determine size of image: %@", mediaUrl);
}
}
CFRelease(source);
return imageSize;
// imageSizeForFilePath checks validity.
return [NSData imageSizeForFilePath:self.originalFilePath mimeType:self.contentType];
} else {
return CGSizeZero;
}
}
- (CGSize)applyImageOrientation:(UIImageOrientation)orientation toImageSize:(CGSize)imageSize
{
switch (orientation) {
case UIImageOrientationUp: // EXIF = 1
case UIImageOrientationUpMirrored: // EXIF = 2
case UIImageOrientationDown: // EXIF = 3
case UIImageOrientationDownMirrored: // EXIF = 4
return imageSize;
case UIImageOrientationLeftMirrored: // EXIF = 5
case UIImageOrientationLeft: // EXIF = 6
case UIImageOrientationRightMirrored: // EXIF = 7
case UIImageOrientationRight: // EXIF = 8
return CGSizeMake(imageSize.height, imageSize.width);
default:
return imageSize;
}
}
- (BOOL)shouldHaveImageSize
{
return ([self isVideo] || [self isImage] || [self isAnimated]);

View file

@ -11,4 +11,11 @@
- (BOOL)ows_isValidImage;
- (BOOL)ows_isValidImageWithMimeType:(nullable NSString *)mimeType;
// Returns the image size in pixels.
//
// Returns CGSizeZero on error.
+ (CGSize)imageSizeForFilePath:(NSString *)filePath mimeType:(NSString *)mimeType;
+ (BOOL)hasAlphaForValidImageFilePath:(NSString *)filePath;
@end

View file

@ -2,8 +2,8 @@
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
//
#import "MIMETypeUtil.h"
#import "NSData+Image.h"
#import "MIMETypeUtil.h"
#import "OWSFileSystem.h"
#import <AVFoundation/AVFoundation.h>
#import <SignalServiceKit/SignalServiceKit-Swift.h>
@ -312,4 +312,91 @@ typedef NS_ENUM(NSInteger, ImageFormat) {
return (width > 0 && width < kMaxValidSize && height > 0 && height < kMaxValidSize);
}
+ (CGSize)imageSizeForFilePath:(NSString *)filePath mimeType:(NSString *)mimeType
{
if (![NSData ows_isValidImageAtPath:filePath mimeType:mimeType]) {
OWSLogError(@"Invalid image.");
return CGSizeZero;
}
NSURL *url = [NSURL fileURLWithPath:filePath];
// With CGImageSource we avoid loading the whole image into memory.
CGImageSourceRef source = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
if (!source) {
OWSFailDebug(@"Could not load image: %@", url);
return CGSizeZero;
}
NSDictionary *options = @{
(NSString *)kCGImageSourceShouldCache : @(NO),
};
NSDictionary *properties
= (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, 0, (CFDictionaryRef)options);
CGSize imageSize = CGSizeZero;
if (properties) {
NSNumber *orientation = properties[(NSString *)kCGImagePropertyOrientation];
NSNumber *width = properties[(NSString *)kCGImagePropertyPixelWidth];
NSNumber *height = properties[(NSString *)kCGImagePropertyPixelHeight];
if (width && height) {
imageSize = CGSizeMake(width.floatValue, height.floatValue);
if (orientation) {
imageSize = [self applyImageOrientation:(UIImageOrientation)orientation.intValue toImageSize:imageSize];
}
} else {
OWSFailDebug(@"Could not determine size of image: %@", url);
}
}
CFRelease(source);
return imageSize;
}
+ (CGSize)applyImageOrientation:(UIImageOrientation)orientation toImageSize:(CGSize)imageSize
{
switch (orientation) {
case UIImageOrientationUp: // EXIF = 1
case UIImageOrientationUpMirrored: // EXIF = 2
case UIImageOrientationDown: // EXIF = 3
case UIImageOrientationDownMirrored: // EXIF = 4
return imageSize;
case UIImageOrientationLeftMirrored: // EXIF = 5
case UIImageOrientationLeft: // EXIF = 6
case UIImageOrientationRightMirrored: // EXIF = 7
case UIImageOrientationRight: // EXIF = 8
return CGSizeMake(imageSize.height, imageSize.width);
default:
return imageSize;
}
}
+ (BOOL)hasAlphaForValidImageFilePath:(NSString *)filePath
{
NSURL *url = [NSURL fileURLWithPath:filePath];
// With CGImageSource we avoid loading the whole image into memory.
CGImageSourceRef source = CGImageSourceCreateWithURL((CFURLRef)url, NULL);
if (!source) {
OWSFailDebug(@"Could not load image: %@", url);
return NO;
}
NSDictionary *options = @{
(NSString *)kCGImageSourceShouldCache : @(NO),
};
NSDictionary *properties
= (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, 0, (CFDictionaryRef)options);
BOOL result = NO;
if (properties) {
NSNumber *hasAlpha = properties[(NSString *)kCGImagePropertyHasAlpha];
if (hasAlpha) {
result = hasAlpha.boolValue;
} else {
OWSFailDebug(@"Could not determine transparency of image: %@", url);
}
}
CFRelease(source);
return result;
}
@end