Sketch out crop tool.

This commit is contained in:
Matthew Chen 2019-02-06 16:00:22 -05:00
parent 0807325190
commit 618a3b1d47
27 changed files with 2401 additions and 1421 deletions

View File

@ -234,12 +234,21 @@
34B6D27520F664C900765BE2 /* OWSUnreadIndicator.m in Sources */ = {isa = PBXBuildFile; fileRef = 34B6D27320F664C800765BE2 /* OWSUnreadIndicator.m */; };
34BBC84B220B2CB200857249 /* ImageEditorTextViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC84A220B2CB200857249 /* ImageEditorTextViewController.swift */; };
34BBC84D220B2D0800857249 /* ImageEditorPinchGestureRecognizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC84C220B2D0800857249 /* ImageEditorPinchGestureRecognizer.swift */; };
34BBC84F220B8A0100857249 /* ImageEditorCropViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC84E220B8A0100857249 /* ImageEditorCropViewController.swift */; };
34BBC851220B8EEF00857249 /* ImageEditorCanvasView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC850220B8EEF00857249 /* ImageEditorCanvasView.swift */; };
34BBC857220C7ADA00857249 /* ImageEditorItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC852220C7AD900857249 /* ImageEditorItem.swift */; };
34BBC858220C7ADA00857249 /* ImageEditorContents.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC853220C7ADA00857249 /* ImageEditorContents.swift */; };
34BBC859220C7ADA00857249 /* ImageEditorStrokeItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC854220C7ADA00857249 /* ImageEditorStrokeItem.swift */; };
34BBC85A220C7ADA00857249 /* ImageEditorTextItem.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC855220C7ADA00857249 /* ImageEditorTextItem.swift */; };
34BBC85B220C7ADA00857249 /* OrderedDictionary.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC856220C7ADA00857249 /* OrderedDictionary.swift */; };
34BBC85D220D19D600857249 /* ImageEditorPanGestureRecognizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC85C220D19D600857249 /* ImageEditorPanGestureRecognizer.swift */; };
34BBC861220E883300857249 /* ImageEditorModelTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC85F220E883200857249 /* ImageEditorModelTest.swift */; };
34BBC862220E883300857249 /* ImageEditorTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BBC860220E883200857249 /* ImageEditorTest.swift */; };
34BECE2B1F74C12700D7438D /* DebugUIStress.m in Sources */ = {isa = PBXBuildFile; fileRef = 34BECE2A1F74C12700D7438D /* DebugUIStress.m */; };
34BECE2E1F7ABCE000D7438D /* GifPickerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BECE2D1F7ABCE000D7438D /* GifPickerViewController.swift */; };
34BECE301F7ABCF800D7438D /* GifPickerLayout.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BECE2F1F7ABCF800D7438D /* GifPickerLayout.swift */; };
34BEDB0B21C2FA3D007B0EAE /* OWS114RemoveDynamicInteractions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB0A21C2FA3D007B0EAE /* OWS114RemoveDynamicInteractions.swift */; };
34BEDB0E21C405B0007B0EAE /* ImageEditorModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB0D21C405B0007B0EAE /* ImageEditorModel.swift */; };
34BEDB1121C41E71007B0EAE /* ImageEditorTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB1021C41E71007B0EAE /* ImageEditorTest.swift */; };
34BEDB1321C43F6A007B0EAE /* ImageEditorView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB1221C43F69007B0EAE /* ImageEditorView.swift */; };
34BEDB1621C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.h in Headers */ = {isa = PBXBuildFile; fileRef = 34BEDB1421C80BC9007B0EAE /* OWSAnyTouchGestureRecognizer.h */; settings = {ATTRIBUTES = (Public, ); }; };
34BEDB1721C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m in Sources */ = {isa = PBXBuildFile; fileRef = 34BEDB1521C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m */; };
@ -914,13 +923,22 @@
34B6D27320F664C800765BE2 /* OWSUnreadIndicator.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSUnreadIndicator.m; sourceTree = "<group>"; };
34BBC84A220B2CB200857249 /* ImageEditorTextViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorTextViewController.swift; sourceTree = "<group>"; };
34BBC84C220B2D0800857249 /* ImageEditorPinchGestureRecognizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorPinchGestureRecognizer.swift; sourceTree = "<group>"; };
34BBC84E220B8A0100857249 /* ImageEditorCropViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorCropViewController.swift; sourceTree = "<group>"; };
34BBC850220B8EEF00857249 /* ImageEditorCanvasView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorCanvasView.swift; sourceTree = "<group>"; };
34BBC852220C7AD900857249 /* ImageEditorItem.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorItem.swift; sourceTree = "<group>"; };
34BBC853220C7ADA00857249 /* ImageEditorContents.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorContents.swift; sourceTree = "<group>"; };
34BBC854220C7ADA00857249 /* ImageEditorStrokeItem.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorStrokeItem.swift; sourceTree = "<group>"; };
34BBC855220C7ADA00857249 /* ImageEditorTextItem.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorTextItem.swift; sourceTree = "<group>"; };
34BBC856220C7ADA00857249 /* OrderedDictionary.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OrderedDictionary.swift; sourceTree = "<group>"; };
34BBC85C220D19D600857249 /* ImageEditorPanGestureRecognizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorPanGestureRecognizer.swift; sourceTree = "<group>"; };
34BBC85F220E883200857249 /* ImageEditorModelTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorModelTest.swift; sourceTree = "<group>"; };
34BBC860220E883200857249 /* ImageEditorTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorTest.swift; sourceTree = "<group>"; };
34BECE291F74C12700D7438D /* DebugUIStress.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DebugUIStress.h; sourceTree = "<group>"; };
34BECE2A1F74C12700D7438D /* DebugUIStress.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DebugUIStress.m; sourceTree = "<group>"; };
34BECE2D1F7ABCE000D7438D /* GifPickerViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GifPickerViewController.swift; sourceTree = "<group>"; };
34BECE2F1F7ABCF800D7438D /* GifPickerLayout.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = GifPickerLayout.swift; sourceTree = "<group>"; };
34BEDB0A21C2FA3D007B0EAE /* OWS114RemoveDynamicInteractions.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = OWS114RemoveDynamicInteractions.swift; sourceTree = "<group>"; };
34BEDB0D21C405B0007B0EAE /* ImageEditorModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorModel.swift; sourceTree = "<group>"; };
34BEDB1021C41E71007B0EAE /* ImageEditorTest.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorTest.swift; sourceTree = "<group>"; };
34BEDB1221C43F69007B0EAE /* ImageEditorView.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ImageEditorView.swift; sourceTree = "<group>"; };
34BEDB1421C80BC9007B0EAE /* OWSAnyTouchGestureRecognizer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OWSAnyTouchGestureRecognizer.h; sourceTree = "<group>"; };
34BEDB1521C80BCA007B0EAE /* OWSAnyTouchGestureRecognizer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OWSAnyTouchGestureRecognizer.m; sourceTree = "<group>"; };
@ -1867,6 +1885,15 @@
path = ViewControllers;
sourceTree = "<group>";
};
34BBC85E220E883200857249 /* ImageEditor */ = {
isa = PBXGroup;
children = (
34BBC85F220E883200857249 /* ImageEditorModelTest.swift */,
34BBC860220E883200857249 /* ImageEditorTest.swift */,
);
path = ImageEditor;
sourceTree = "<group>";
};
34BECE2C1F7ABCE000D7438D /* GifPicker */ = {
isa = PBXGroup;
children = (
@ -1880,11 +1907,19 @@
34BEDB0C21C405B0007B0EAE /* ImageEditor */ = {
isa = PBXGroup;
children = (
34BBC850220B8EEF00857249 /* ImageEditorCanvasView.swift */,
34BBC853220C7ADA00857249 /* ImageEditorContents.swift */,
34BBC84E220B8A0100857249 /* ImageEditorCropViewController.swift */,
34BEDB1821C82AC5007B0EAE /* ImageEditorGestureRecognizer.swift */,
34BBC852220C7AD900857249 /* ImageEditorItem.swift */,
34BEDB0D21C405B0007B0EAE /* ImageEditorModel.swift */,
34BBC85C220D19D600857249 /* ImageEditorPanGestureRecognizer.swift */,
34BBC84C220B2D0800857249 /* ImageEditorPinchGestureRecognizer.swift */,
34BBC854220C7ADA00857249 /* ImageEditorStrokeItem.swift */,
34BBC855220C7ADA00857249 /* ImageEditorTextItem.swift */,
34BBC84A220B2CB200857249 /* ImageEditorTextViewController.swift */,
34BEDB1221C43F69007B0EAE /* ImageEditorView.swift */,
34BBC856220C7ADA00857249 /* OrderedDictionary.swift */,
);
path = ImageEditor;
sourceTree = "<group>";
@ -1892,7 +1927,7 @@
34BEDB0F21C41E71007B0EAE /* views */ = {
isa = PBXGroup;
children = (
34BEDB1021C41E71007B0EAE /* ImageEditorTest.swift */,
34BBC85E220E883200857249 /* ImageEditor */,
);
path = views;
sourceTree = "<group>";
@ -3305,17 +3340,20 @@
4CBBCA6321714B4500EEB37D /* OWS110SortIdMigration.swift in Sources */,
342950832124C9750000B063 /* OWSTextView.m in Sources */,
452EC6E1205FF5DC000E787C /* Bench.swift in Sources */,
34BBC85D220D19D600857249 /* ImageEditorPanGestureRecognizer.swift in Sources */,
342950882124CB0A0000B063 /* OWSSearchBar.m in Sources */,
342950822124C9750000B063 /* OWSTextField.m in Sources */,
34AC0A13211B39EA00997B47 /* DisappearingTimerConfigurationView.swift in Sources */,
4CA46F4D219CFDAA0038ABDE /* GalleryRailView.swift in Sources */,
34480B621FD0A98800BC14EF /* UIColor+OWS.m in Sources */,
4C20B2B720CA0034001BAC90 /* ThreadViewModel.swift in Sources */,
34BBC857220C7ADA00857249 /* ImageEditorItem.swift in Sources */,
34480B641FD0A98800BC14EF /* UIView+OWS.m in Sources */,
34AC0A1C211B39EA00997B47 /* OWSFlatButton.swift in Sources */,
34C3C7932040B0DD0000134C /* OWSAudioPlayer.m in Sources */,
34AC09E5211B39B100997B47 /* ScreenLockViewController.m in Sources */,
34AC09F7211B39B100997B47 /* MediaMessageView.swift in Sources */,
34BBC858220C7ADA00857249 /* ImageEditorContents.swift in Sources */,
3461293A1FD1B47300532771 /* OWSPreferences.m in Sources */,
34AC09E6211B39B100997B47 /* SelectRecipientViewController.m in Sources */,
4C858A52212DC5E1001B45D3 /* UIImage+OWS.swift in Sources */,
@ -3327,6 +3365,7 @@
346129AB1FD1F0EE00532771 /* OWSFormat.m in Sources */,
34AC0A12211B39EA00997B47 /* ContactTableViewCell.m in Sources */,
451F8A461FD715BA005CB9DA /* OWSGroupAvatarBuilder.m in Sources */,
34BBC85B220C7ADA00857249 /* OrderedDictionary.swift in Sources */,
346129961FD1E30000532771 /* OWSDatabaseMigration.m in Sources */,
346129FB1FD5F31400532771 /* OWS101ExistingUsersBlockOnIdentityChange.m in Sources */,
34AC09EA211B39B100997B47 /* ModalActivityIndicatorViewController.swift in Sources */,
@ -3355,6 +3394,7 @@
45BC829D1FD9C4B400011CF3 /* ShareViewDelegate.swift in Sources */,
3461295B1FD1D74C00532771 /* Environment.m in Sources */,
346129D51FD20ADC00532771 /* UIViewController+OWS.m in Sources */,
34BBC851220B8EEF00857249 /* ImageEditorCanvasView.swift in Sources */,
347850691FD9B78A007B8332 /* AppSetup.m in Sources */,
346941A3215D2EE400B5BFAD /* Theme.m in Sources */,
4C23A5F2215C4ADE00534937 /* SheetViewController.swift in Sources */,
@ -3369,6 +3409,7 @@
34AC0A1A211B39EA00997B47 /* CommonStrings.swift in Sources */,
34AC0A19211B39EA00997B47 /* OWSAlerts.swift in Sources */,
34FDB29221FF986600A01202 /* UIView+OWS.swift in Sources */,
34BBC859220C7ADA00857249 /* ImageEditorStrokeItem.swift in Sources */,
451F8A351FD710DE005CB9DA /* Searcher.swift in Sources */,
451F8A481FD715BA005CB9DA /* OWSContactAvatarBuilder.m in Sources */,
4503F1C3204711D300CEE724 /* OWS107LegacySounds.m in Sources */,
@ -3391,6 +3432,7 @@
34AC09F0211B39B100997B47 /* AttachmentApprovalViewController.swift in Sources */,
451F8A441FD7156B005CB9DA /* BlockListUIUtils.m in Sources */,
34AC0A1E211B39EA00997B47 /* ThreadViewHelper.m in Sources */,
34BBC85A220C7ADA00857249 /* ImageEditorTextItem.swift in Sources */,
34641E182088D7E900E2EDE5 /* OWSScreenLock.swift in Sources */,
346129721FD1D74C00532771 /* SignalKeyingStorage.m in Sources */,
349EA07C2162AEA800F7B17F /* OWS111UDAttributesMigration.swift in Sources */,
@ -3399,6 +3441,7 @@
34ABB2C42090C59700C727A6 /* OWSResaveCollectionDBMigration.m in Sources */,
4C948FF72146EB4800349F0D /* BlockListCache.swift in Sources */,
4551DB5A205C562300C8AE75 /* Collection+OWS.swift in Sources */,
34BBC84F220B8A0100857249 /* ImageEditorCropViewController.swift in Sources */,
34AC09ED211B39B100997B47 /* ContactFieldView.swift in Sources */,
346129AF1FD1F5D900532771 /* SystemContactsFetcher.swift in Sources */,
34AC09E3211B39B100997B47 /* OWSViewController.m in Sources */,
@ -3614,14 +3657,15 @@
files = (
456F6E2F1E261D1000FD2210 /* PeerConnectionClientTest.swift in Sources */,
458967111DC117CC00E9DD21 /* AccountManagerTest.swift in Sources */,
34BEDB1121C41E71007B0EAE /* ImageEditorTest.swift in Sources */,
3491D9A121022DB7001EF5A1 /* CDSSigningCertificateTest.m in Sources */,
34BBC861220E883300857249 /* ImageEditorModelTest.swift in Sources */,
340B02BA1FA0D6C700F9CFEC /* ConversationViewItemTest.m in Sources */,
458E383A1D6699FA0094BD24 /* OWSDeviceProvisioningURLParserTest.m in Sources */,
3421981C21061D2E00C57195 /* ByteParserTest.swift in Sources */,
34843B26214327C9004DED45 /* OWSOrphanDataCleanerTest.m in Sources */,
4C04F58421C860C50090D0BB /* MantlePerfTest.swift in Sources */,
45360B901F9527DA00FA666C /* SearcherTest.swift in Sources */,
34BBC862220E883300857249 /* ImageEditorTest.swift in Sources */,
34DB0BED2011548B007B313F /* OWSDatabaseConverterTest.m in Sources */,
34843B2C214FE296004DED45 /* MockEnvironment.m in Sources */,
45360B911F952AA900FA666C /* MarqueeLabel.swift in Sources */,

View File

@ -28,7 +28,7 @@
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "EF7BE58B1F949AF31E4AF4FD37150A86"
BlueprintIdentifier = "4D5E7522A33906C902399C86F0A95AA5"
BuildableName = "SignalServiceKit.framework"
BlueprintName = "SignalServiceKit"
ReferencedContainer = "container:Pods/Pods.xcodeproj">
@ -56,7 +56,7 @@
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "2349E04BB99C0118E8F578CCBEAAC665"
BlueprintIdentifier = "AB62203226FE5032747AA668B1E97176"
BuildableName = "SignalServiceKit-Unit-Tests.xctest"
BlueprintName = "SignalServiceKit-Unit-Tests"
ReferencedContainer = "container:Pods/Pods.xcodeproj">
@ -66,7 +66,7 @@
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "3AC929F2E4978F42ED9E9EA232D7247B"
BlueprintIdentifier = "BC89BC6B06642C78EABF18B43FFB41DE"
BuildableName = "SignalCoreKit-Unit-Tests.xctest"
BlueprintName = "SignalCoreKit-Unit-Tests"
ReferencedContainer = "container:Pods/Pods.xcodeproj">
@ -76,7 +76,7 @@
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "5B34FB0B5ABA685EF33F1BA1C388F016"
BlueprintIdentifier = "E012D804E31EC876E21973F300B46CCB"
BuildableName = "AxolotlKit-Unit-Tests.xctest"
BlueprintName = "AxolotlKit-Unit-Tests"
ReferencedContainer = "container:Pods/Pods.xcodeproj">
@ -86,7 +86,7 @@
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "D85B45003BA81D72F606FDF3EB4B4E1C"
BlueprintIdentifier = "B4A8ABAC14D962F8F42814238460DF44"
BuildableName = "Curve25519Kit-Unit-Tests.xctest"
BlueprintName = "Curve25519Kit-Unit-Tests"
ReferencedContainer = "container:Pods/Pods.xcodeproj">
@ -96,7 +96,7 @@
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "F9C2DA0BADF4F69559F0AA5BB4FC1E06"
BlueprintIdentifier = "E5C44C64CDE3569B16010274DCD48BCC"
BuildableName = "HKDFKit-Unit-Tests.xctest"
BlueprintName = "HKDFKit-Unit-Tests"
ReferencedContainer = "container:Pods/Pods.xcodeproj">
@ -106,7 +106,7 @@
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "92057C418B970541FF6BE6E64A49D8C2"
BlueprintIdentifier = "00649589DE58D9DECF419A3CC47D6924"
BuildableName = "SignalMetadataKit-Unit-Tests.xctest"
BlueprintName = "SignalMetadataKit-Unit-Tests"
ReferencedContainer = "container:Pods/Pods.xcodeproj">
@ -136,7 +136,8 @@
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
disableMainThreadChecker = "YES"
enableThreadSanitizer = "YES"
enableUBSanitizer = "YES"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"

View File

@ -1246,6 +1246,22 @@ typedef enum : NSUInteger {
OWSFailDebug(@"Missing asset.");
}
for (ConversationInteractionViewItem *viewItem in self.conversationViewModel.viewItems
.reverseObjectEnumerator) {
if (viewItem.mediaAlbumItems.count < 1) {
continue;
}
ConversationMediaAlbumItem *mediaItem = viewItem.mediaAlbumItems.firstObject;
if (mediaItem.attachmentStream == nil) {
continue;
}
if (!mediaItem.attachmentStream.isValidImage) {
continue;
}
filePath = mediaItem.attachmentStream.originalFilePath;
break;
}
DataSource *_Nullable dataSource =
[DataSourcePath dataSourceWithFilePath:filePath shouldDeleteOnDeallocation:NO];
if (!dataSource) {
@ -1253,10 +1269,12 @@ typedef enum : NSUInteger {
return;
}
NSString *fileExtension = filePath.pathExtension;
NSString *dataUTI = [MIMETypeUtil utiTypeForFileExtension:fileExtension];
// "Document picker" attachments _SHOULD NOT_ be resized, if possible.
SignalAttachment *attachment = [SignalAttachment attachmentWithDataSource:dataSource
dataUTI:(NSString *)kUTTypePNG
imageQuality:TSImageQualityOriginal];
SignalAttachment *attachment =
[SignalAttachment attachmentWithDataSource:dataSource dataUTI:dataUTI imageQuality:TSImageQualityOriginal];
[self showApprovalDialogForAttachment:attachment];
});

View File

@ -104,7 +104,7 @@ NS_ASSUME_NONNULL_BEGIN
CGFloat baseProgress = borderThickness * 2;
CGFloat minProgress = baseProgress;
CGFloat maxProgress = MAX(0, self.bounds.size.width - baseProgress);
progressRect.size.width = CGFloatLerp(minProgress, maxProgress, self.progress);
progressRect.size.width = CGFloatLerp(minProgress, maxProgress, CGFloatClamp01(self.progress));
UIBezierPath *progressPath = [UIBezierPath bezierPathWithRoundedRect:progressRect cornerRadius:cornerRadius];
self.progressLayer.path = progressPath.CGPath;
self.progressLayer.fillColor = self.color.CGColor;

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
@objc class TypingIndicatorView: UIStackView {
@ -108,9 +108,9 @@
var animationDuration: CFTimeInterval = 0
let addDotKeyFrame = { (keyFrameTime: CFTimeInterval, progress: CGFloat) in
let dotColor = baseColor.withAlphaComponent(CGFloatLerp(0.4, 1.0, progress))
let dotColor = baseColor.withAlphaComponent(CGFloatLerp(0.4, 1.0, CGFloatClamp01(progress)))
colorValues.append(dotColor.cgColor)
let radius = CGFloatLerp(TypingIndicatorView.kMinRadiusPt, TypingIndicatorView.kMaxRadiusPt, progress)
let radius = CGFloatLerp(TypingIndicatorView.kMinRadiusPt, TypingIndicatorView.kMaxRadiusPt, CGFloatClamp01(progress))
let margin = (TypingIndicatorView.kMaxRadiusPt - radius) * 0.5
let bezierPath = UIBezierPath(ovalIn: CGRect(x: margin, y: margin, width: radius, height: radius))
pathValues.append(bezierPath.cgPath)

View File

@ -0,0 +1,81 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import XCTest
@testable import Signal
@testable import SignalMessaging
class ImageEditorModelTest: SignalBaseTest {
// override func setUp() {
// super.setUp()
// }
//
// override func tearDown() {
// // Put teardown code here. This method is called after the invocation of each test method in the class.
// super.tearDown()
// }
func testImageEditorTransform0() {
let imageSizePixels = CGSize(width: 200, height: 300)
let outputSizePixels = CGSize(width: 200, height: 300)
let unitTranslation = CGPoint.zero
let rotationRadians: CGFloat = 0
let scaling: CGFloat = 1
let transform = ImageEditorTransform(outputSizePixels: outputSizePixels, unitTranslation: unitTranslation, rotationRadians: rotationRadians, scaling: scaling)
let viewSize = outputSizePixels
let imageFrame = ImageEditorCanvasView.imageFrame(forViewSize: viewSize, imageSize: imageSizePixels, transform: transform)
let affineTransform = transform.affineTransform(viewSize: viewSize)
XCTAssertEqual(0.0, imageFrame.topLeft.applying(affineTransform).x, accuracy: 0.1)
XCTAssertEqual(0.0, imageFrame.topLeft.applying(affineTransform).y, accuracy: 0.1)
XCTAssertEqual(100.0, imageFrame.center.applying(affineTransform).x, accuracy: 0.1)
XCTAssertEqual(150.0, imageFrame.center.applying(affineTransform).y, accuracy: 0.1)
XCTAssertEqual(200.0, imageFrame.bottomRight.applying(affineTransform).x, accuracy: 0.1)
XCTAssertEqual(300.0, imageFrame.bottomRight.applying(affineTransform).y, accuracy: 0.1)
}
func testImageEditorTransform1() {
let imageSizePixels = CGSize(width: 864, height: 1536)
let outputSizePixels = CGSize(width: 432, height: 768)
let unitTranslation = CGPoint(x: +0.5, y: -0.5)
let rotationRadians: CGFloat = 0
let scaling: CGFloat = 2
let transform = ImageEditorTransform(outputSizePixels: outputSizePixels, unitTranslation: unitTranslation, rotationRadians: rotationRadians, scaling: scaling)
let viewSize = CGSize(width: 335, height: 595)
let imageFrame = ImageEditorCanvasView.imageFrame(forViewSize: viewSize, imageSize: imageSizePixels, transform: transform)
let affineTransform = transform.affineTransform(viewSize: viewSize)
XCTAssertEqual(0.0, imageFrame.topLeft.applying(affineTransform).x, accuracy: 0.1)
XCTAssertEqual(0.0, imageFrame.topLeft.applying(affineTransform).y, accuracy: 0.1)
XCTAssertEqual(100.0, imageFrame.center.applying(affineTransform).x, accuracy: 0.1)
XCTAssertEqual(150.0, imageFrame.center.applying(affineTransform).y, accuracy: 0.1)
XCTAssertEqual(200.0, imageFrame.bottomRight.applying(affineTransform).x, accuracy: 0.1)
XCTAssertEqual(300.0, imageFrame.bottomRight.applying(affineTransform).y, accuracy: 0.1)
}
func testAffineTransformComposition() {
XCTAssertEqual(+20.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+30.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).y, accuracy: 0.1)
XCTAssertEqual(+100.0, CGPoint.zero.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+150.0, CGPoint.zero.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
XCTAssertEqual(+20.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+30.0, CGPoint.zero.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).y, accuracy: 0.1)
XCTAssertEqual(-150.0, CGPoint.zero.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+100.0, CGPoint.zero.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
XCTAssertEqual(+25.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+35.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).scale(5)).y, accuracy: 0.1)
XCTAssertEqual(+105.0, CGPoint.unit.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+155.0, CGPoint.unit.applying(CGAffineTransform.scale(5).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
XCTAssertEqual(+15.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).x, accuracy: 0.1)
XCTAssertEqual(+35.0, CGPoint.unit.applying(CGAffineTransform.translate(CGPoint(x: 20, y: 30)).rotate(CGFloat.halfPi).scale(5)).y, accuracy: 0.1)
XCTAssertEqual(-155.0, CGPoint.unit.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).x, accuracy: 0.1)
XCTAssertEqual(+105.0, CGPoint.unit.applying(CGAffineTransform.scale(5).rotate(CGFloat.halfPi).translate(CGPoint(x: 20, y: 30))).y, accuracy: 0.1)
}
}

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import XCTest
@ -26,10 +26,7 @@ class ImageEditorTest: SignalBaseTest {
}
func testImageEditorContents() {
let imagePath = writeDummyImage()
let contents = ImageEditorContents(imagePath: imagePath,
imageSizePixels: CGSize(width: 1, height: 1))
let contents = ImageEditorContents()
XCTAssertEqual(0, contents.itemMap.count)
let item = ImageEditorItem(itemType: .test)

View File

@ -1086,6 +1086,15 @@
/* Label for crop button in image editor. */
"IMAGE_EDITOR_CROP_BUTTON" = "Crop";
/* Label for button that resets crop & rotation state. */
"IMAGE_EDITOR_RESET_BUTTON" = "Reset";
/* Label for button that rotates image 90 degrees. */
"IMAGE_EDITOR_ROTATE_90_BUTTON" = "Rotate 90°";
/* Label for button that rotates image 90 degrees. */
"IMAGE_EDITOR_ROTATE_45_BUTTON" = "Rotate 45°";
/* Momentarily shown to the user when attempting to select more images than is allowed. Embeds {{max number of items}} that can be shared. */
"IMAGE_PICKER_CAN_SELECT_NO_MORE_TOAST_FORMAT" = "You can't share more than %@ items.";

View File

@ -589,11 +589,11 @@ public class AttachmentApprovalViewController: UIPageViewController, UIPageViewC
// Image was not edited.
return attachmentItem.attachment
}
guard imageEditorModel.itemCount() > 0 else {
guard imageEditorModel.isDirty() else {
// Image editor has no changes.
return attachmentItem.attachment
}
guard let dstImage = ImageEditorView.renderForOutput(model: imageEditorModel) else {
guard let dstImage = ImageEditorCanvasView.renderForOutput(model: imageEditorModel, transform: imageEditorModel.currentTransform()) else {
owsFailDebug("Could not render for output.")
return attachmentItem.attachment
}
@ -945,9 +945,13 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
if imageEditorView.configureSubviews() {
mediaMessageView.isHidden = true
// TODO: Is this necessary?
imageMediaView.isUserInteractionEnabled = true
mediaMessageView.superview?.addSubview(imageEditorView)
imageEditorView.autoPin(toEdgesOf: mediaMessageView)
contentContainer.addSubview(imageEditorView)
imageEditorView.autoPin(toTopLayoutGuideOf: self, withInset: 0)
autoPinView(toBottomOfViewControllerOrKeyboard: imageEditorView, avoidNotch: true)
imageEditorView.autoPinWidthToSuperview()
imageEditorView.addControls(to: imageEditorView)
}

View File

@ -0,0 +1,657 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
public class EditorTextLayer: CATextLayer {
let itemId: String
public init(itemId: String) {
self.itemId = itemId
super.init()
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
}
// MARK: -
// A view for previewing an image editor model.
@objc
public class ImageEditorCanvasView: UIView {
private let model: ImageEditorModel
@objc
public required init(model: ImageEditorModel) {
self.model = model
super.init(frame: .zero)
model.add(observer: self)
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
// MARK: - Views
// TODO: Audit all usage of this view.
public let contentView = OWSLayerView()
private let clipView = OWSLayerView()
private var contentViewConstraints = [NSLayoutConstraint]()
private var srcImage: UIImage?
private var imageLayer = CALayer()
@objc
public func configureSubviews() -> Bool {
self.backgroundColor = .clear
self.isOpaque = false
self.srcImage = loadSrcImage()
clipView.clipsToBounds = true
clipView.backgroundColor = .clear
clipView.isOpaque = false
clipView.layoutCallback = { [weak self] (_) in
guard let strongSelf = self else {
return
}
strongSelf.updateLayout()
}
addSubview(clipView)
if let srcImage = srcImage {
imageLayer.contents = srcImage.cgImage
imageLayer.contentsScale = srcImage.scale
}
contentView.backgroundColor = .clear
contentView.isOpaque = false
contentView.layer.addSublayer(imageLayer)
contentView.layoutCallback = { [weak self] (_) in
guard let strongSelf = self else {
return
}
strongSelf.updateAllContent()
}
clipView.addSubview(contentView)
contentView.ows_autoPinToSuperviewEdges()
updateLayout()
return true
}
public var gestureReferenceView: UIView {
return clipView
}
private func updateLayout() {
NSLayoutConstraint.deactivate(contentViewConstraints)
contentViewConstraints = ImageEditorCanvasView.updateContentLayout(transform: model.currentTransform(),
contentView: clipView)
}
public class func updateContentLayout(transform: ImageEditorTransform,
contentView: UIView) -> [NSLayoutConstraint] {
guard let superview = contentView.superview else {
owsFailDebug("Content view has no superview.")
return []
}
let outputSizePixels = transform.outputSizePixels
let aspectRatio = outputSizePixels
var constraints = superview.applyScaleAspectFitLayout(subview: contentView, aspectRatio: aspectRatio.width / aspectRatio.height)
let screenSize = UIScreen.main.bounds.size
let maxScreenSize = max(screenSize.width, screenSize.height)
let outputSizePoints = CGSize(width: maxScreenSize, height: maxScreenSize)
// TODO: Add a "shouldFill" parameter.
// let outputSizePoints = CGSizeScale(outputSizePixels, 1.0 / UIScreen.main.scale)
NSLayoutConstraint.autoSetPriority(UILayoutPriority.defaultLow) {
constraints.append(contentsOf: contentView.autoSetDimensions(to: outputSizePoints))
}
return constraints
}
@objc
public func loadSrcImage() -> UIImage? {
return ImageEditorCanvasView.loadSrcImage(model: model)
}
@objc
public class func loadSrcImage(model: ImageEditorModel) -> UIImage? {
let srcImageData: Data
do {
let srcImagePath = model.srcImagePath
let srcImageUrl = URL(fileURLWithPath: srcImagePath)
srcImageData = try Data(contentsOf: srcImageUrl)
} catch {
Logger.error("Couldn't parse srcImageUrl")
return nil
}
// We use this constructor so that we can specify the scale.
guard let srcImage = UIImage(data: srcImageData, scale: 1.0) else {
owsFailDebug("Couldn't load background image.")
return nil
}
return srcImage
}
// MARK: - Content
var contentLayerMap = [String: CALayer]()
internal func updateAllContent() {
AssertIsOnMainThread()
Logger.verbose("")
// Don't animate changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
for layer in contentLayerMap.values {
layer.removeFromSuperlayer()
}
contentLayerMap.removeAll()
let viewSize = clipView.bounds.size
if viewSize.width > 0,
viewSize.height > 0 {
applyTransform()
updateImageLayer()
for item in model.items() {
guard let layer = ImageEditorCanvasView.layerForItem(item: item,
model: model,
viewSize: viewSize) else {
continue
}
contentView.layer.addSublayer(layer)
contentLayerMap[item.itemId] = layer
}
}
updateLayout()
// Force layout now.
setNeedsLayout()
layoutIfNeeded()
CATransaction.commit()
}
internal func updateContent(changedItemIds: [String]) {
AssertIsOnMainThread()
Logger.verbose("")
// Don't animate changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
// Remove all changed items.
for itemId in changedItemIds {
if let layer = contentLayerMap[itemId] {
layer.removeFromSuperlayer()
}
contentLayerMap.removeValue(forKey: itemId)
}
let viewSize = clipView.bounds.size
if viewSize.width > 0,
viewSize.height > 0 {
applyTransform()
updateImageLayer()
// Create layers for inserted and updated items.
for itemId in changedItemIds {
guard let item = model.item(forId: itemId) else {
// Item was deleted.
continue
}
// Item was inserted or updated.
guard let layer = ImageEditorCanvasView.layerForItem(item: item,
model: model,
viewSize: viewSize) else {
continue
}
contentView.layer.addSublayer(layer)
contentLayerMap[item.itemId] = layer
}
}
CATransaction.commit()
}
private func applyTransform() {
Logger.verbose("")
let viewSize = clipView.bounds.size
contentView.layer.setAffineTransform(model.currentTransform().affineTransform(viewSize: viewSize))
}
private func updateImageLayer() {
let viewSize = clipView.bounds.size
ImageEditorCanvasView.updateImageLayer(imageLayer: imageLayer,
viewSize: viewSize,
imageSize: model.srcImageSizePixels,
transform: model.currentTransform())
}
public class func updateImageLayer(imageLayer: CALayer, viewSize: CGSize, imageSize: CGSize, transform: ImageEditorTransform) {
imageLayer.frame = imageFrame(forViewSize: viewSize, imageSize: imageSize, transform: transform)
}
public class func imageFrame(forViewSize viewSize: CGSize, imageSize: CGSize, transform: ImageEditorTransform) -> CGRect {
guard viewSize.width > 0, viewSize.height > 0 else {
owsFailDebug("Invalid viewSize")
return .zero
}
guard imageSize.width > 0, imageSize.height > 0 else {
owsFailDebug("Invalid imageSize")
return .zero
}
// We want to "fill" the output rect.
//
// Find the smallest possible image size that will completely fill the output size.
//
// NOTE: The "bounding box" of the output size that we need to fill needs to
// reflect the rotation.
let sinValue = abs(sin(transform.rotationRadians))
let cosValue = abs(cos(transform.rotationRadians))
let outputSize = CGSize(width: viewSize.width * cosValue + viewSize.height * sinValue,
height: viewSize.width * sinValue + viewSize.height * cosValue)
var width = outputSize.width
var height = outputSize.width * imageSize.height / imageSize.width
if height < outputSize.height {
width = outputSize.height * imageSize.width / imageSize.height
height = outputSize.height
}
let imageFrame = CGRect(x: (width - viewSize.width) * -0.5,
y: (height - viewSize.height) * -0.5,
width: width,
height: height)
Logger.verbose("viewSize: \(viewSize), imageFrame: \(imageFrame), ")
return imageFrame
}
private class func imageLayerForItem(model: ImageEditorModel,
transform: ImageEditorTransform,
viewSize: CGSize) -> CALayer? {
guard let srcImage = loadSrcImage(model: model) else {
owsFailDebug("Could not load src image.")
return nil
}
let imageLayer = CALayer()
imageLayer.contents = srcImage.cgImage
imageLayer.contentsScale = srcImage.scale
updateImageLayer(imageLayer: imageLayer,
viewSize: viewSize,
imageSize: model.srcImageSizePixels,
transform: transform)
return imageLayer
}
private class func layerForItem(item: ImageEditorItem,
model: ImageEditorModel,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
switch item.itemType {
case .test:
owsFailDebug("Unexpected test item.")
return nil
case .stroke:
guard let strokeItem = item as? ImageEditorStrokeItem else {
owsFailDebug("Item has unexpected type: \(type(of: item)).")
return nil
}
return strokeLayerForItem(item: strokeItem, viewSize: viewSize)
case .text:
guard let textItem = item as? ImageEditorTextItem else {
owsFailDebug("Item has unexpected type: \(type(of: item)).")
return nil
}
return textLayerForItem(item: textItem,
model: model,
viewSize: viewSize)
}
}
private class func strokeLayerForItem(item: ImageEditorStrokeItem,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
let strokeWidth = ImageEditorStrokeItem.strokeWidth(forUnitStrokeWidth: item.unitStrokeWidth,
dstSize: viewSize)
let unitSamples = item.unitSamples
guard unitSamples.count > 0 else {
// Not an error; the stroke doesn't have enough samples to render yet.
return nil
}
let shapeLayer = CAShapeLayer()
shapeLayer.lineWidth = strokeWidth
shapeLayer.strokeColor = item.color.cgColor
shapeLayer.frame = CGRect(origin: .zero, size: viewSize)
let transformSampleToPoint = { (unitSample: CGPoint) -> CGPoint in
return CGPoint(x: viewSize.width * unitSample.x,
y: viewSize.height * unitSample.y)
}
// TODO: Use bezier curves to smooth stroke.
let bezierPath = UIBezierPath()
let points = applySmoothing(to: unitSamples.map { (unitSample) in
transformSampleToPoint(unitSample)
})
var previousForwardVector = CGPoint.zero
for index in 0..<points.count {
let point = points[index]
let forwardVector: CGPoint
if points.count <= 1 {
// Skip forward vectors.
forwardVector = .zero
} else if index == 0 {
// First sample.
let nextPoint = points[index + 1]
forwardVector = CGPointSubtract(nextPoint, point)
} else if index == points.count - 1 {
// Last sample.
let previousPoint = points[index - 1]
forwardVector = CGPointSubtract(point, previousPoint)
} else {
// Middle samples.
let previousPoint = points[index - 1]
let previousPointForwardVector = CGPointSubtract(point, previousPoint)
let nextPoint = points[index + 1]
let nextPointForwardVector = CGPointSubtract(nextPoint, point)
forwardVector = CGPointScale(CGPointAdd(previousPointForwardVector, nextPointForwardVector), 0.5)
}
if index == 0 {
// First sample.
bezierPath.move(to: point)
if points.count == 1 {
bezierPath.addLine(to: point)
}
} else {
let previousPoint = points[index - 1]
// We apply more than one kind of smoothing.
// This smoothing avoids rendering "angled segments"
// by drawing the stroke as a series of curves.
// We use bezier curves and infer the control points
// from the "next" and "prev" points.
//
// This factor controls how much we're smoothing.
//
// * 0.0 = No smoothing.
//
// TODO: Tune this variable once we have stroke input.
let controlPointFactor: CGFloat = 0.25
let controlPoint1 = CGPointAdd(previousPoint, CGPointScale(previousForwardVector, +controlPointFactor))
let controlPoint2 = CGPointAdd(point, CGPointScale(forwardVector, -controlPointFactor))
// We're using Cubic curves.
bezierPath.addCurve(to: point, controlPoint1: controlPoint1, controlPoint2: controlPoint2)
}
previousForwardVector = forwardVector
}
shapeLayer.path = bezierPath.cgPath
shapeLayer.fillColor = nil
shapeLayer.lineCap = kCALineCapRound
shapeLayer.lineJoin = kCALineJoinRound
return shapeLayer
}
private class func textLayerForItem(item: ImageEditorTextItem,
model: ImageEditorModel,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
let imageFrame = self.imageFrame(forViewSize: viewSize, imageSize: model.srcImageSizePixels,
transform: model.currentTransform())
// We need to adjust the font size to reflect the current output scale,
// using the image width as reference.
let fontSize = item.font.pointSize * imageFrame.size.width / item.fontReferenceImageWidth
let layer = EditorTextLayer(itemId: item.itemId)
layer.string = item.text
layer.foregroundColor = item.color.cgColor
layer.font = CGFont(item.font.fontName as CFString)
layer.fontSize = fontSize
layer.isWrapped = true
layer.alignmentMode = kCAAlignmentCenter
// I don't think we need to enable allowsFontSubpixelQuantization
// or set truncationMode.
// This text needs to be rendered at a scale that reflects the sceen scaling
// AND the item's scaling.
layer.contentsScale = UIScreen.main.scale * item.scaling
// TODO: Min with measured width.
let maxWidth = imageFrame.size.width * item.unitWidth
// let maxWidth = viewSize.width * item.unitWidth
let maxSize = CGSize(width: maxWidth, height: CGFloat.greatestFiniteMagnitude)
// TODO: Is there a more accurate way to measure text in a CATextLayer?
// CoreText?
let textBounds = (item.text as NSString).boundingRect(with: maxSize,
options: [
.usesLineFragmentOrigin,
.usesFontLeading
],
attributes: [
.font: item.font.withSize(fontSize)
],
context: nil)
Logger.verbose("---- maxWidth: \(maxWidth), viewSize: \(viewSize), item.unitWidth: \(item.unitWidth), textBounds: \(textBounds)")
let center = CGPoint(x: viewSize.width * item.unitCenter.x,
y: viewSize.height * item.unitCenter.y)
let layerSize = CGSizeCeil(textBounds.size)
layer.frame = CGRect(origin: CGPoint(x: center.x - layerSize.width * 0.5,
y: center.y - layerSize.height * 0.5),
size: layerSize)
let transform = CGAffineTransform.identity.scaledBy(x: item.scaling, y: item.scaling).rotated(by: item.rotationRadians)
layer.setAffineTransform(transform)
return layer
}
// We apply more than one kind of smoothing.
//
// This (simple) smoothing reduces jitter from the touch sensor.
private class func applySmoothing(to points: [CGPoint]) -> [CGPoint] {
AssertIsOnMainThread()
var result = [CGPoint]()
for index in 0..<points.count {
let point = points[index]
if index == 0 {
// First sample.
result.append(point)
} else if index == points.count - 1 {
// Last sample.
result.append(point)
} else {
// Middle samples.
let lastPoint = points[index - 1]
let nextPoint = points[index + 1]
let alpha: CGFloat = 0.1
let smoothedPoint = CGPointAdd(CGPointScale(point, 1.0 - 2.0 * alpha),
CGPointAdd(CGPointScale(lastPoint, alpha),
CGPointScale(nextPoint, alpha)))
result.append(smoothedPoint)
}
}
return result
}
// MARK: - Coordinates
public func locationUnit(forGestureRecognizer gestureRecognizer: UIGestureRecognizer,
transform: ImageEditorTransform) -> CGPoint {
return ImageEditorCanvasView.locationUnit(forGestureRecognizer: gestureRecognizer,
view: self.clipView,
transform: transform)
}
public class func locationUnit(forGestureRecognizer gestureRecognizer: UIGestureRecognizer,
view: UIView,
transform: ImageEditorTransform) -> CGPoint {
let locationInView = gestureRecognizer.location(in: view)
return locationUnit(forLocationInView: locationInView,
viewSize: view.bounds.size,
transform: transform)
}
public func locationUnit(forLocationInView locationInView: CGPoint,
transform: ImageEditorTransform) -> CGPoint {
let viewSize = self.clipView.bounds.size
return ImageEditorCanvasView.locationUnit(forLocationInView: locationInView,
viewSize: viewSize,
transform: transform)
}
public class func locationUnit(forLocationInView locationInView: CGPoint,
viewSize: CGSize,
transform: ImageEditorTransform) -> CGPoint {
let affineTransformStart = transform.affineTransform(viewSize: viewSize)
let locationInContent = locationInView.applyingInverse(affineTransformStart)
let locationUnit = locationInContent.toUnitCoordinates(viewSize: viewSize, shouldClamp: false)
return locationUnit
}
// MARK: - Actions
// Returns nil on error.
//
// We render using the transform parameter, not the transform from the model.
// This allows this same method to be used for rendering "previews" for the
// crop tool and the final output.
@objc
public class func renderForOutput(model: ImageEditorModel, transform: ImageEditorTransform) -> UIImage? {
// TODO: Do we want to render off the main thread?
AssertIsOnMainThread()
// Render output at same size as source image.
let dstSizePixels = transform.outputSizePixels
let dstScale: CGFloat = 1.0 // The size is specified in pixels, not in points.
// TODO: Reflect crop rectangle.
let viewSize = dstSizePixels
let hasAlpha = NSData.hasAlpha(forValidImageFilePath: model.srcImagePath)
// We use an UIImageView + UIView.renderAsImage() instead of a CGGraphicsContext
// Because CALayer.renderInContext() doesn't honor CALayer properties like frame,
// transform, etc.
let view = UIView()
view.backgroundColor = UIColor.clear
view.isOpaque = false
view.frame = CGRect(origin: .zero, size: viewSize)
// Rendering a UIView to an image will not honor the root image's layer transform.
// We therefore use a subview.
let contentView = UIView()
contentView.backgroundColor = UIColor.clear
contentView.isOpaque = false
contentView.frame = CGRect(origin: .zero, size: viewSize)
view.addSubview(contentView)
CATransaction.begin()
CATransaction.setDisableActions(true)
contentView.layer.setAffineTransform(transform.affineTransform(viewSize: viewSize))
guard let imageLayer = imageLayerForItem(model: model,
transform: transform,
viewSize: viewSize) else {
owsFailDebug("Could not load src image.")
return nil
}
// TODO:
imageLayer.contentsScale = dstScale * transform.scaling
contentView.layer.addSublayer(imageLayer)
for item in model.items() {
guard let layer = layerForItem(item: item,
model: model,
viewSize: viewSize) else {
Logger.error("Couldn't create layer for item.")
continue
}
// TODO: Should we do this for all layers?
layer.contentsScale = dstScale * transform.scaling * item.outputScale()
contentView.layer.addSublayer(layer)
}
CATransaction.commit()
let image = view.renderAsImage(opaque: !hasAlpha, scale: dstScale)
return image
}
// MARK: -
public func textLayer(forLocation point: CGPoint) -> EditorTextLayer? {
guard let sublayers = contentView.layer.sublayers else {
return nil
}
for layer in sublayers {
guard let textLayer = layer as? EditorTextLayer else {
continue
}
if textLayer.hitTest(point) != nil {
return textLayer
}
}
return nil
}
}
// MARK: -
extension ImageEditorCanvasView: ImageEditorModelObserver {
public func imageEditorModelDidChange(before: ImageEditorContents,
after: ImageEditorContents) {
updateAllContent()
}
public func imageEditorModelDidChange(changedItemIds: [String]) {
updateContent(changedItemIds: changedItemIds)
}
}

View File

@ -0,0 +1,77 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
// ImageEditorContents represents a snapshot of canvas
// state.
//
// Instances of ImageEditorContents should be treated
// as immutable, once configured.
public class ImageEditorContents: NSObject {
public typealias ItemMapType = OrderedDictionary<ImageEditorItem>
// This represents the current state of each item,
// a mapping of [itemId : item].
var itemMap = ItemMapType()
// Used to create an initial, empty instances of this class.
public override init() {
}
// Used to clone copies of instances of this class.
public init(itemMap: ItemMapType) {
self.itemMap = itemMap
}
// Since the contents are immutable, we only modify copies
// made with this method.
public func clone() -> ImageEditorContents {
return ImageEditorContents(itemMap: itemMap.clone())
}
@objc
public func item(forId itemId: String) -> ImageEditorItem? {
return itemMap.value(forKey: itemId)
}
@objc
public func append(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.append(key: item.itemId, value: item)
}
@objc
public func replace(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.replace(key: item.itemId, value: item)
}
@objc
public func remove(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.remove(key: item.itemId)
}
@objc
public func remove(itemId: String) {
Logger.verbose("\(itemId)")
itemMap.remove(key: itemId)
}
@objc
public func itemCount() -> Int {
return itemMap.count
}
@objc
public func items() -> [ImageEditorItem] {
return itemMap.orderedValues()
}
}

View File

@ -0,0 +1,649 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
public protocol ImageEditorCropViewControllerDelegate: class {
func cropDidComplete(transform: ImageEditorTransform)
func cropDidCancel()
}
// MARK: -
// A view for editing text item in image editor.
class ImageEditorCropViewController: OWSViewController {
private weak var delegate: ImageEditorCropViewControllerDelegate?
private let model: ImageEditorModel
private let srcImage: UIImage
private let previewImage: UIImage
private var transform: ImageEditorTransform
public let contentView = OWSLayerView()
public let clipView = OWSLayerView()
private var imageLayer = CALayer()
private enum CropRegion {
// The sides of the crop region.
case left, right, top, bottom
// The corners of the crop region.
case topLeft, topRight, bottomLeft, bottomRight
}
private class CropCornerView: UIView {
let cropRegion: CropRegion
init(cropRegion: CropRegion) {
self.cropRegion = cropRegion
super.init(frame: .zero)
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
}
private let cropView = UIView()
private let cropCornerViews: [CropCornerView] = [
CropCornerView(cropRegion: .topLeft),
CropCornerView(cropRegion: .topRight),
CropCornerView(cropRegion: .bottomLeft),
CropCornerView(cropRegion: .bottomRight)
]
init(delegate: ImageEditorCropViewControllerDelegate,
model: ImageEditorModel,
srcImage: UIImage,
previewImage: UIImage) {
self.delegate = delegate
self.model = model
self.srcImage = srcImage
self.previewImage = previewImage
transform = model.currentTransform()
super.init(nibName: nil, bundle: nil)
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
// MARK: - View Lifecycle
override func loadView() {
self.view = UIView()
if (UIAccessibilityIsReduceTransparencyEnabled()) {
self.view.backgroundColor = UIColor(white: 0.5, alpha: 0.5)
} else {
let alpha = OWSNavigationBar.backgroundBlurMutingFactor
self.view.backgroundColor = UIColor(white: 0.5, alpha: alpha)
let blurEffectView = UIVisualEffectView(effect: Theme.barBlurEffect)
blurEffectView.layer.zPosition = -1
self.view.addSubview(blurEffectView)
blurEffectView.autoPinEdgesToSuperviewEdges()
}
let stackView = UIStackView()
stackView.axis = .vertical
stackView.alignment = .fill
stackView.spacing = 16
stackView.layoutMargins = UIEdgeInsets(top: 16, left: 20, bottom: 16, right: 20)
stackView.isLayoutMarginsRelativeArrangement = true
self.view.addSubview(stackView)
stackView.ows_autoPinToSuperviewEdges()
navigationItem.leftBarButtonItem = UIBarButtonItem(barButtonSystemItem: .stop,
target: self,
action: #selector(didTapBackButton))
let wrapperView = UIView.container()
wrapperView.backgroundColor = .clear
wrapperView.isOpaque = false
stackView.addArrangedSubview(wrapperView)
// TODO: We could mask the clipped region with a semi-transparent overlay like WA.
clipView.clipsToBounds = true
clipView.backgroundColor = .clear
clipView.isOpaque = false
clipView.layoutCallback = { [weak self] (_) in
guard let strongSelf = self else {
return
}
strongSelf.updateCropViewLayout()
}
wrapperView.addSubview(clipView)
imageLayer.contents = previewImage.cgImage
imageLayer.contentsScale = previewImage.scale
contentView.backgroundColor = .clear
contentView.isOpaque = false
contentView.layer.addSublayer(imageLayer)
contentView.layoutCallback = { [weak self] (_) in
guard let strongSelf = self else {
return
}
strongSelf.updateContent()
}
clipView.addSubview(contentView)
contentView.ows_autoPinToSuperviewEdges()
let rotate90Button = OWSButton()
rotate90Button.setTitle(NSLocalizedString("IMAGE_EDITOR_ROTATE_90_BUTTON", comment: "Label for button that rotates image 90 degrees."),
for: .normal)
rotate90Button.block = { [weak self] in
self?.rotate90ButtonPressed()
}
let rotate45Button = OWSButton()
rotate45Button.setTitle(NSLocalizedString("IMAGE_EDITOR_ROTATE_45_BUTTON", comment: "Label for button that rotates image 45 degrees."),
for: .normal)
rotate45Button.block = { [weak self] in
self?.rotate45ButtonPressed()
}
let resetButton = OWSButton()
resetButton.setTitle(NSLocalizedString("IMAGE_EDITOR_RESET_BUTTON", comment: "Label for button that resets crop & rotation state."),
for: .normal)
resetButton.block = { [weak self] in
self?.resetButtonPressed()
}
let zoom2xButton = OWSButton()
zoom2xButton.setTitle("Zoom 2x",
for: .normal)
zoom2xButton.block = { [weak self] in
self?.zoom2xButtonPressed()
}
cropView.setContentHuggingLow()
cropView.setCompressionResistanceLow()
view.addSubview(cropView)
for cropCornerView in cropCornerViews {
cropView.addSubview(cropCornerView)
switch cropCornerView.cropRegion {
case .topLeft, .bottomLeft:
cropCornerView.autoPinEdge(toSuperviewEdge: .left)
case .topRight, .bottomRight:
cropCornerView.autoPinEdge(toSuperviewEdge: .right)
default:
owsFailDebug("Invalid crop region: \(cropRegion)")
}
switch cropCornerView.cropRegion {
case .topLeft, .topRight:
cropCornerView.autoPinEdge(toSuperviewEdge: .top)
case .bottomLeft, .bottomRight:
cropCornerView.autoPinEdge(toSuperviewEdge: .bottom)
default:
owsFailDebug("Invalid crop region: \(cropRegion)")
}
}
let footer = UIStackView(arrangedSubviews: [rotate90Button, rotate45Button, resetButton, zoom2xButton])
footer.axis = .horizontal
footer.spacing = 16
footer.backgroundColor = .clear
footer.isOpaque = false
stackView.addArrangedSubview(footer)
updateClipViewLayout()
configureGestures()
}
private static let desiredCornerSize: CGFloat = 30
private static let minCropSize: CGFloat = desiredCornerSize * 2
private var cornerSize = CGSize.zero
private var clipViewConstraints = [NSLayoutConstraint]()
private func updateClipViewLayout() {
NSLayoutConstraint.deactivate(clipViewConstraints)
clipViewConstraints = ImageEditorCanvasView.updateContentLayout(transform: transform,
contentView: clipView)
clipView.superview?.setNeedsLayout()
clipView.superview?.layoutIfNeeded()
updateCropViewLayout()
}
private var cropViewConstraints = [NSLayoutConstraint]()
private func updateCropViewLayout() {
NSLayoutConstraint.deactivate(cropViewConstraints)
cropViewConstraints.removeAll()
// TODO: Tune the size.
let cornerSize = CGSize(width: min(clipView.width() * 0.5, ImageEditorCropViewController.desiredCornerSize),
height: min(clipView.height() * 0.5, ImageEditorCropViewController.desiredCornerSize))
self.cornerSize = cornerSize
for cropCornerView in cropCornerViews {
cropViewConstraints.append(contentsOf: cropCornerView.autoSetDimensions(to: cornerSize))
cropCornerView.addRedBorder()
cropView.addRedBorder()
}
if !isCropGestureActive {
cropView.frame = view.convert(clipView.bounds, from: clipView)
}
}
internal func updateContent() {
AssertIsOnMainThread()
Logger.verbose("")
let viewSize = contentView.bounds.size
guard viewSize.width > 0,
viewSize.height > 0 else {
return
}
updateTransform(transform)
}
private func updateTransform(_ transform: ImageEditorTransform) {
self.transform = transform
// Don't animate changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
applyTransform()
updateClipViewLayout()
updateImageLayer()
CATransaction.commit()
}
private func applyTransform() {
Logger.verbose("")
let viewSize = contentView.bounds.size
// contentView.layer.anchorPoint = .zero
contentView.layer.setAffineTransform(transform.affineTransform(viewSize: viewSize))
}
private func updateImageLayer() {
let viewSize = contentView.bounds.size
ImageEditorCanvasView.updateImageLayer(imageLayer: imageLayer, viewSize: viewSize, imageSize: model.srcImageSizePixels, transform: transform)
}
private func configureGestures() {
self.view.isUserInteractionEnabled = true
let pinchGestureRecognizer = ImageEditorPinchGestureRecognizer(target: self, action: #selector(handlePinchGesture(_:)))
pinchGestureRecognizer.referenceView = self.clipView
view.addGestureRecognizer(pinchGestureRecognizer)
let panGestureRecognizer = ImageEditorPanGestureRecognizer(target: self, action: #selector(handlePanGesture(_:)))
panGestureRecognizer.maximumNumberOfTouches = 1
panGestureRecognizer.referenceView = self.clipView
view.addGestureRecognizer(panGestureRecognizer)
}
override public var canBecomeFirstResponder: Bool {
return true
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
_ = self.becomeFirstResponder()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
_ = self.becomeFirstResponder()
}
// MARK: - Pinch Gesture
@objc
public func handlePinchGesture(_ gestureRecognizer: ImageEditorPinchGestureRecognizer) {
AssertIsOnMainThread()
Logger.verbose("")
// We could undo an in-progress pinch if the gesture is cancelled, but it seems gratuitous.
switch gestureRecognizer.state {
case .began:
gestureStartTransform = transform
case .changed, .ended:
guard let gestureStartTransform = gestureStartTransform else {
owsFailDebug("Missing pinchTransform.")
return
}
let locationUnitStart = self.locationUnit(forLocationInView: gestureRecognizer.pinchStateStart.centroid,
transform: gestureStartTransform)
let locationUnitLast = self.locationUnit(forLocationInView: gestureRecognizer.pinchStateLast.centroid,
transform: gestureStartTransform)
let locationUnitDelta = CGPointSubtract(locationUnitLast, locationUnitStart)
let newUnitTranslation = CGPointAdd(gestureStartTransform.unitTranslation, locationUnitDelta)
let newRotationRadians = gestureStartTransform.rotationRadians + gestureRecognizer.pinchStateLast.angleRadians - gestureRecognizer.pinchStateStart.angleRadians
// NOTE: We use max(1, ...) to avoid divide-by-zero.
//
// TODO: The clamp limits are wrong.
let newScaling = CGFloatClamp(gestureStartTransform.scaling * gestureRecognizer.pinchStateLast.distance / max(1.0, gestureRecognizer.pinchStateStart.distance),
ImageEditorTextItem.kMinScaling,
ImageEditorTextItem.kMaxScaling)
updateTransform(ImageEditorTransform(outputSizePixels: gestureStartTransform.outputSizePixels,
unitTranslation: newUnitTranslation,
rotationRadians: newRotationRadians,
scaling: newScaling).normalize())
default:
break
}
}
// MARK: - Pan Gesture
private var gestureStartTransform: ImageEditorTransform?
private var panCropRegion: CropRegion?
private var isCropGestureActive: Bool {
return panCropRegion != nil
}
@objc
public func handlePanGesture(_ gestureRecognizer: ImageEditorPanGestureRecognizer) {
AssertIsOnMainThread()
Logger.verbose("")
// We could undo an in-progress pinch if the gesture is cancelled, but it seems gratuitous.
switch gestureRecognizer.state {
case .began:
Logger.verbose("began: \(transform.unitTranslation)")
gestureStartTransform = transform
// Pans that start near the crop rectangle should be treated as crop gestures.
panCropRegion = cropRegion(forGestureRecognizer: gestureRecognizer)
case .changed, .ended:
if let panCropRegion = panCropRegion {
// Crop pan gesture
handleCropPanGesture(gestureRecognizer, panCropRegion: panCropRegion)
} else {
handleNormalPanGesture(gestureRecognizer)
}
default:
break
}
switch gestureRecognizer.state {
case .ended, .failed, .cancelled, .possible:
if panCropRegion != nil {
panCropRegion = nil
// Don't animate changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
updateCropViewLayout()
CATransaction.commit()
}
default:
break
}
}
private func handleCropPanGesture(_ gestureRecognizer: ImageEditorPanGestureRecognizer,
panCropRegion: CropRegion) {
AssertIsOnMainThread()
Logger.verbose("")
guard let locationStart = gestureRecognizer.locationStart else {
owsFailDebug("Missing locationStart.")
return
}
let locationNow = gestureRecognizer.location(in: self.clipView)
// Crop pan gesture
let locationDelta = CGPointSubtract(locationNow, locationStart)
let cropRectangleStart = clipView.bounds
var cropRectangleNow = cropRectangleStart
let maxDeltaX = cropRectangleNow.size.width - cornerSize.width * 2
let maxDeltaY = cropRectangleNow.size.height - cornerSize.height * 2
switch panCropRegion {
case .left, .topLeft, .bottomLeft:
let delta = min(maxDeltaX, max(0, locationDelta.x))
cropRectangleNow.origin.x += delta
cropRectangleNow.size.width -= delta
case .right, .topRight, .bottomRight:
let delta = min(maxDeltaX, max(0, -locationDelta.x))
cropRectangleNow.size.width -= delta
default:
break
}
switch panCropRegion {
case .top, .topLeft, .topRight:
let delta = min(maxDeltaY, max(0, locationDelta.y))
cropRectangleNow.origin.y += delta
cropRectangleNow.size.height -= delta
case .bottom, .bottomLeft, .bottomRight:
let delta = min(maxDeltaY, max(0, -locationDelta.y))
cropRectangleNow.size.height -= delta
default:
break
}
cropView.frame = view.convert(cropRectangleNow, from: clipView)
switch gestureRecognizer.state {
case .ended:
crop(toRect: cropRectangleNow)
default:
break
}
}
private func crop(toRect cropRect: CGRect) {
let viewBounds = clipView.bounds
// TODO: The output size should be rounded, although this can
// cause crop to be slightly not WYSIWYG.
let croppedOutputSizePixels = CGSizeRound(CGSize(width: transform.outputSizePixels.width * cropRect.width / clipView.width(),
height: transform.outputSizePixels.height * cropRect.height / clipView.height()))
// We need to update the transform's unitTranslation and scaling properties
// to reflect the crop.
//
// Cropping involves changing the output size AND aspect ratio. The output aspect ratio
// has complicated effects on the rendering behavior of the image background, since the
// default rendering size of the image is an "aspect fill" of the output bounds.
// Therefore, the simplest and more reliable way to update the scaling is to measure
// the difference between the "before crop"/"after crop" image frames and adjust the
// scaling accordingly.
let naiveTransform = ImageEditorTransform(outputSizePixels: croppedOutputSizePixels,
unitTranslation: transform.unitTranslation,
rotationRadians: transform.rotationRadians,
scaling: transform.scaling)
let naiveImageFrameOld = ImageEditorCanvasView.imageFrame(forViewSize: transform.outputSizePixels, imageSize: model.srcImageSizePixels, transform: naiveTransform)
let naiveImageFrameNew = ImageEditorCanvasView.imageFrame(forViewSize: croppedOutputSizePixels, imageSize: model.srcImageSizePixels, transform: naiveTransform)
let scalingDeltaX = naiveImageFrameNew.width / naiveImageFrameOld.width
let scalingDeltaY = naiveImageFrameNew.height / naiveImageFrameOld.height
// scalingDeltaX and scalingDeltaY should only differ by rounding error.
let scalingDelta = (scalingDeltaX + scalingDeltaY) * 0.5
let scaling = transform.scaling / scalingDelta
// We also need to update the transform's translation, to ensure that the correct
// content (background image and items) ends up in the crop region.
//
// To do this, we use the center of the image content. Due to
// scaling and rotation of the image content, it's far simpler to
// use the center.
let oldAffineTransform = transform.affineTransform(viewSize: viewBounds.size)
// We determine the pre-crop render frame for the image.
let oldImageFrameCanvas = ImageEditorCanvasView.imageFrame(forViewSize: viewBounds.size, imageSize: model.srcImageSizePixels, transform: transform)
// We project it into pre-crop view coordinates (the coordinate
// system of the crop rectangle). Note that a CALayer's tranform
// is applied using its "anchor point", the center of the layer.
// so we translate before and after the projection to be consistent.
let oldImageCenterView = oldImageFrameCanvas.center.minus(viewBounds.center).applying(oldAffineTransform).plus(viewBounds.center)
// We transform the "image content center" into the unit coordinates
// of the crop rectangle.
let newImageCenterUnit = oldImageCenterView.toUnitCoordinates(viewBounds: cropRect, shouldClamp: false)
// The transform's "unit translation" represents a deviation from
// the center of the output canvas, so we need to subtract the
// unit midpoint.
let unitTranslation = newImageCenterUnit.minus(CGPoint.unitMidpoint)
// Clear the panCropRegion now so that the crop bounds are updated
// immediately.
panCropRegion = nil
updateTransform(ImageEditorTransform(outputSizePixels: croppedOutputSizePixels,
unitTranslation: unitTranslation,
rotationRadians: transform.rotationRadians,
scaling: scaling).normalize())
}
private func handleNormalPanGesture(_ gestureRecognizer: ImageEditorPanGestureRecognizer) {
AssertIsOnMainThread()
guard let gestureStartTransform = gestureStartTransform else {
owsFailDebug("Missing pinchTransform.")
return
}
guard let locationStart = gestureRecognizer.locationStart else {
owsFailDebug("Missing locationStart.")
return
}
let locationNow = gestureRecognizer.location(in: self.clipView)
let locationUnitStart = self.locationUnit(forLocationInView: locationStart, transform: gestureStartTransform)
let locationUnitNow = self.locationUnit(forLocationInView: locationNow, transform: gestureStartTransform)
let locationUnitDelta = CGPointSubtract(locationUnitNow, locationUnitStart)
let newUnitTranslation = CGPointAdd(gestureStartTransform.unitTranslation, locationUnitDelta)
updateTransform(ImageEditorTransform(outputSizePixels: gestureStartTransform.outputSizePixels,
unitTranslation: newUnitTranslation,
rotationRadians: gestureStartTransform.rotationRadians,
scaling: gestureStartTransform.scaling).normalize())
}
private func cropRegion(forGestureRecognizer gestureRecognizer: ImageEditorPanGestureRecognizer) -> CropRegion? {
guard let location = gestureRecognizer.locationStart else {
owsFailDebug("Missing locationStart.")
return nil
}
let tolerance: CGFloat = ImageEditorCropViewController.desiredCornerSize * 2.0
let left = tolerance
let top = tolerance
let right = clipView.width() - tolerance
let bottom = clipView.height() - tolerance
// We could ignore touches far outside the crop rectangle.
if location.x < left {
if location.y < top {
return .topLeft
} else if location.y > bottom {
return .bottomLeft
} else {
return .left
}
} else if location.x > right {
if location.y < top {
return .topRight
} else if location.y > bottom {
return .bottomRight
} else {
return .right
}
} else {
if location.y < top {
return .top
} else if location.y > bottom {
return .bottom
} else {
return nil
}
}
}
// MARK: - Coordinates
private func locationUnit(forGestureRecognizer gestureRecognizer: UIGestureRecognizer,
transform: ImageEditorTransform) -> CGPoint {
return ImageEditorCanvasView.locationUnit(forGestureRecognizer: gestureRecognizer, view: clipView, transform: transform)
}
private func locationUnit(forLocationInView locationInView: CGPoint,
transform: ImageEditorTransform) -> CGPoint {
return ImageEditorCanvasView.locationUnit(forLocationInView: locationInView, viewSize: clipView.bounds.size, transform: transform)
}
// MARK: - Events
@objc public func didTapBackButton() {
completeAndDismiss()
}
private func completeAndDismiss() {
// TODO:
self.delegate?.cropDidComplete(transform: transform)
self.dismiss(animated: true) {
// Do nothing.
}
}
@objc public func rotate90ButtonPressed() {
rotateButtonPressed(angleRadians: CGFloat.pi * 0.5)
}
@objc public func rotate45ButtonPressed() {
rotateButtonPressed(angleRadians: CGFloat.pi * 0.25)
}
private func rotateButtonPressed(angleRadians: CGFloat) {
// Invert width and height.
let outputSizePixels = CGSize(width: transform.outputSizePixels.height,
height: transform.outputSizePixels.width)
let unitTranslation = transform.unitTranslation
let rotationRadians = transform.rotationRadians + angleRadians
let scaling = transform.scaling
updateTransform(ImageEditorTransform(outputSizePixels: outputSizePixels,
unitTranslation: unitTranslation,
rotationRadians: rotationRadians,
scaling: scaling).normalize())
}
@objc public func zoom2xButtonPressed() {
let outputSizePixels = transform.outputSizePixels
let unitTranslation = transform.unitTranslation
let rotationRadians = transform.rotationRadians
let scaling = transform.scaling * 2.0
updateTransform(ImageEditorTransform(outputSizePixels: outputSizePixels,
unitTranslation: unitTranslation,
rotationRadians: rotationRadians,
scaling: scaling).normalize())
}
@objc public func resetButtonPressed() {
updateTransform(ImageEditorTransform.defaultTransform(srcImageSizePixels: model.srcImageSizePixels))
}
}

View File

@ -2,208 +2,4 @@
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
public class ImageEditorGestureRecognizer: UIGestureRecognizer {
@objc
public var shouldAllowOutsideView = true
@objc
public weak var canvasView: UIView?
@objc
public var startLocationInView: CGPoint = .zero
@objc
public override func canPrevent(_ preventedGestureRecognizer: UIGestureRecognizer) -> Bool {
return false
}
@objc
public override func canBePrevented(by: UIGestureRecognizer) -> Bool {
return false
}
@objc
public override func shouldRequireFailure(of: UIGestureRecognizer) -> Bool {
return false
}
@objc
public override func shouldBeRequiredToFail(by: UIGestureRecognizer) -> Bool {
return true
}
// MARK: - Touch Handling
@objc
public override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesBegan(touches, with: event)
if state == .possible,
touchType(for: touches, with: event) == .valid {
// If a gesture starts with a valid touch, begin stroke.
state = .began
startLocationInView = .zero
guard let view = view else {
owsFailDebug("Missing view.")
return
}
guard let touch = touches.randomElement() else {
owsFailDebug("Missing touch.")
return
}
startLocationInView = touch.location(in: view)
} else {
state = .failed
}
}
@objc
public override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesMoved(touches, with: event)
switch state {
case .began, .changed:
switch touchType(for: touches, with: event) {
case .valid:
// If a gesture continues with a valid touch, continue stroke.
state = .changed
case .invalid:
state = .failed
case .outside:
// If a gesture continues with a valid touch _outside the canvas_,
// end stroke.
state = .ended
}
default:
state = .failed
}
}
@objc
public override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesEnded(touches, with: event)
switch state {
case .began, .changed:
switch touchType(for: touches, with: event) {
case .valid, .outside:
// If a gesture ends with a valid touch, end stroke.
state = .ended
case .invalid:
state = .failed
}
default:
state = .failed
}
}
@objc
public override func touchesCancelled(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesCancelled(touches, with: event)
state = .cancelled
}
public enum TouchType {
case invalid
case valid
case outside
}
private func touchType(for touches: Set<UITouch>, with event: UIEvent) -> TouchType {
guard let gestureView = self.view else {
owsFailDebug("Missing gestureView")
return .invalid
}
guard let canvasView = canvasView else {
owsFailDebug("Missing canvasView")
return .invalid
}
guard let allTouches = event.allTouches else {
owsFailDebug("Missing allTouches")
return .invalid
}
guard allTouches.count <= 1 else {
return .invalid
}
guard touches.count == 1 else {
return .invalid
}
guard let firstTouch: UITouch = touches.first else {
return .invalid
}
let isNewTouch = firstTouch.phase == .began
if isNewTouch {
// Reject new touches that are inside a control subview.
if subviewControl(ofView: gestureView, contains: firstTouch) {
return .invalid
}
}
// Reject new touches outside this GR's view's bounds.
let location = firstTouch.location(in: canvasView)
if !canvasView.bounds.contains(location) {
if shouldAllowOutsideView {
// Do nothing
} else if isNewTouch {
return .invalid
} else {
return .outside
}
}
if isNewTouch {
// Ignore touches that start near the top or bottom edge of the screen;
// they may be a system edge swipe gesture.
let rootView = self.rootView(of: gestureView)
let rootLocation = firstTouch.location(in: rootView)
let distanceToTopEdge = max(0, rootLocation.y)
let distanceToBottomEdge = max(0, rootView.bounds.size.height - rootLocation.y)
let distanceToNearestEdge = min(distanceToTopEdge, distanceToBottomEdge)
let kSystemEdgeSwipeTolerance: CGFloat = 50
if (distanceToNearestEdge < kSystemEdgeSwipeTolerance) {
return .invalid
}
}
return .valid
}
private func subviewControl(ofView superview: UIView, contains touch: UITouch) -> Bool {
for subview in superview.subviews {
guard !subview.isHidden, subview.isUserInteractionEnabled else {
continue
}
let location = touch.location(in: subview)
guard subview.bounds.contains(location) else {
continue
}
if subview as? UIControl != nil {
return true
}
if subviewControl(ofView: subview, contains: touch) {
return true
}
}
return false
}
private func rootView(of view: UIView) -> UIView {
var responder: UIResponder? = view
var lastView: UIView = view
while true {
guard let currentResponder = responder else {
return lastView
}
if let currentView = currentResponder as? UIView {
lastView = currentView
}
responder = currentResponder.next
}
}
}

View File

@ -0,0 +1,70 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
@objc public enum ImageEditorError: Int, Error {
case assertionError
case invalidInput
}
@objc
public enum ImageEditorItemType: Int {
case test
case stroke
case text
}
// MARK: -
// Represented in a "ULO unit" coordinate system
// for source image.
//
// "ULO" coordinate system is "upper-left-origin".
//
// "Unit" coordinate system means values are expressed
// in terms of some other values, in this case the
// width and height of the source image.
//
// * 0.0 = left edge
// * 1.0 = right edge
// * 0.0 = top edge
// * 1.0 = bottom edge
public typealias ImageEditorSample = CGPoint
// MARK: -
// Instances of ImageEditorItem should be treated
// as immutable, once configured.
@objc
public class ImageEditorItem: NSObject {
@objc
public let itemId: String
@objc
public let itemType: ImageEditorItemType
@objc
public init(itemType: ImageEditorItemType) {
self.itemId = UUID().uuidString
self.itemType = itemType
super.init()
}
@objc
public init(itemId: String,
itemType: ImageEditorItemType) {
self.itemId = itemId
self.itemType = itemType
super.init()
}
// The scale with which to render this item's content
// when rendering the "output" image for sending.
public func outputScale() -> CGFloat {
return 1.0
}
}

View File

@ -4,472 +4,80 @@
import UIKit
@objc public enum ImageEditorError: Int, Error {
case assertionError
case invalidInput
}
@objc
public enum ImageEditorItemType: Int {
case test
case stroke
case text
}
// MARK: -
// Represented in a "ULO unit" coordinate system
// for source image.
//
// "ULO" coordinate system is "upper-left-origin".
//
// "Unit" coordinate system means values are expressed
// in terms of some other values, in this case the
// width and height of the source image.
//
// * 0.0 = left edge
// * 1.0 = right edge
// * 0.0 = top edge
// * 1.0 = bottom edge
public typealias ImageEditorSample = CGPoint
public typealias ImageEditorConversion = (ImageEditorSample) -> ImageEditorSample
// MARK: -
// Instances of ImageEditorItem should be treated
// as immutable, once configured.
@objc
public class ImageEditorItem: NSObject {
@objc
public let itemId: String
@objc
public let itemType: ImageEditorItemType
@objc
public init(itemType: ImageEditorItemType) {
self.itemId = UUID().uuidString
self.itemType = itemType
super.init()
}
@objc
public init(itemId: String,
itemType: ImageEditorItemType) {
self.itemId = itemId
self.itemType = itemType
super.init()
}
public func clone(withImageEditorConversion conversion: ImageEditorConversion) -> ImageEditorItem {
return ImageEditorItem(itemId: itemId, itemType: itemType)
}
// The scale with which to render this item's content
// when rendering the "output" image for sending.
public func outputScale() -> CGFloat {
return 1.0
}
}
// MARK: -
@objc
public class ImageEditorStrokeItem: ImageEditorItem {
// Until we need to serialize these items,
// just use UIColor.
@objc
public let color: UIColor
public typealias StrokeSample = ImageEditorSample
@objc
public let unitSamples: [StrokeSample]
// Expressed as a "Unit" value as a fraction of
// min(width, height) of the destination viewport.
@objc
public let unitStrokeWidth: CGFloat
@objc
public init(color: UIColor,
unitSamples: [StrokeSample],
unitStrokeWidth: CGFloat) {
self.color = color
self.unitSamples = unitSamples
self.unitStrokeWidth = unitStrokeWidth
super.init(itemType: .stroke)
}
@objc
public init(itemId: String,
color: UIColor,
unitSamples: [StrokeSample],
unitStrokeWidth: CGFloat) {
self.color = color
self.unitSamples = unitSamples
self.unitStrokeWidth = unitStrokeWidth
super.init(itemId: itemId, itemType: .stroke)
}
@objc
public class func defaultUnitStrokeWidth() -> CGFloat {
return 0.02
}
@objc
public class func strokeWidth(forUnitStrokeWidth unitStrokeWidth: CGFloat,
dstSize: CGSize) -> CGFloat {
return CGFloatClamp01(unitStrokeWidth) * min(dstSize.width, dstSize.height)
}
public override func clone(withImageEditorConversion conversion: ImageEditorConversion) -> ImageEditorItem {
// TODO: We might want to convert the unitStrokeWidth too.
let convertedUnitSamples = unitSamples.map { (sample) in
conversion(sample)
}
return ImageEditorStrokeItem(itemId: itemId,
color: color,
unitSamples: convertedUnitSamples,
unitStrokeWidth: unitStrokeWidth)
}
}
// MARK: -
@objc
public class ImageEditorTextItem: ImageEditorItem {
@objc
public let color: UIColor
@objc
public let font: UIFont
@objc
public let text: String
@objc
public let unitCenter: ImageEditorSample
// Leave some margins against the edge of the image.
@objc
public static let kDefaultUnitWidth: CGFloat = 0.9
// The max width of the text as a fraction of the image width.
//
// This provides continuity of text layout before/after cropping.
//
// NOTE: When you scale the text with with a pinch gesture, that
// affects _scaling_, not the _unit width_, since we don't want
// to change how the text wraps when scaling.
@objc
public let unitWidth: CGFloat
// 0 = no rotation.
// CGFloat.pi * 0.5 = rotation 90 degrees clockwise.
@objc
public class ImageEditorTransform: NSObject {
public let outputSizePixels: CGSize
public let unitTranslation: CGPoint
public let rotationRadians: CGFloat
@objc
public static let kMaxScaling: CGFloat = 4.0
@objc
public static let kMinScaling: CGFloat = 0.5
@objc
public let scaling: CGFloat
@objc
public init(color: UIColor,
font: UIFont,
text: String,
unitCenter: ImageEditorSample = ImageEditorSample(x: 0.5, y: 0.5),
unitWidth: CGFloat = ImageEditorTextItem.kDefaultUnitWidth,
rotationRadians: CGFloat = 0.0,
scaling: CGFloat = 1.0) {
self.color = color
self.font = font
self.text = text
self.unitCenter = unitCenter
self.unitWidth = unitWidth
self.rotationRadians = rotationRadians
self.scaling = scaling
super.init(itemType: .text)
}
private init(itemId: String,
color: UIColor,
font: UIFont,
text: String,
unitCenter: ImageEditorSample,
unitWidth: CGFloat,
public init(outputSizePixels: CGSize,
unitTranslation: CGPoint,
rotationRadians: CGFloat,
scaling: CGFloat) {
self.color = color
self.font = font
self.text = text
self.unitCenter = unitCenter
self.unitWidth = unitWidth
self.outputSizePixels = outputSizePixels
self.unitTranslation = unitTranslation
self.rotationRadians = rotationRadians
self.scaling = scaling
super.init(itemId: itemId, itemType: .text)
}
@objc
public class func empty(withColor color: UIColor) -> ImageEditorTextItem {
let font = UIFont.boldSystemFont(ofSize: 30.0)
return ImageEditorTextItem(color: color, font: font, text: "")
public class func defaultTransform(srcImageSizePixels: CGSize) -> ImageEditorTransform {
// It shouldn't be necessary normalize the default transform, but we do so to be safe.
return ImageEditorTransform(outputSizePixels: srcImageSizePixels,
unitTranslation: .zero,
rotationRadians: 0.0,
scaling: 1.0).normalize()
}
@objc
public func copy(withText newText: String) -> ImageEditorTextItem {
return ImageEditorTextItem(itemId: itemId,
color: color,
font: font,
text: newText,
unitCenter: unitCenter,
unitWidth: unitWidth,
rotationRadians: rotationRadians,
scaling: scaling)
public func affineTransform(viewSize: CGSize) -> CGAffineTransform {
let translation = unitTranslation.fromUnitCoordinates(viewSize: viewSize)
Logger.verbose("viewSize: \(viewSize), translation: \(translation), unitTranslation: \(unitTranslation), scaling: \(scaling), rotationRadians: \(rotationRadians), ")
// Order matters. We need want SRT (scale-rotate-translate) ordering so that the translation
// is not affected affected by the scaling or rotation, which shoud both be about the "origin"
// (in this case the center of the content).
//
// NOTE: CGAffineTransform transforms are composed in reverse order.
let transform = CGAffineTransform.identity.translate(translation).rotated(by: rotationRadians).scaledBy(x: scaling, y: scaling)
return transform
}
@objc
public func copy(withUnitCenter newUnitCenter: CGPoint) -> ImageEditorTextItem {
return ImageEditorTextItem(itemId: itemId,
color: color,
font: font,
text: text,
unitCenter: newUnitCenter,
unitWidth: unitWidth,
rotationRadians: rotationRadians,
scaling: scaling)
public func normalize() -> ImageEditorTransform {
// TODO: Normalize translation.
// public let unitTranslation: CGPoint
// We need to ensure that
let minScaling: CGFloat = 1.0
let scaling = max(minScaling, self.scaling)
// We don't need to normalize rotation.
return ImageEditorTransform(outputSizePixels: outputSizePixels,
unitTranslation: unitTranslation,
rotationRadians: rotationRadians,
scaling: scaling)
}
@objc
public func copy(withUnitCenter newUnitCenter: CGPoint,
scaling newScaling: CGFloat,
rotationRadians newRotationRadians: CGFloat) -> ImageEditorTextItem {
return ImageEditorTextItem(itemId: itemId,
color: color,
font: font,
text: text,
unitCenter: newUnitCenter,
unitWidth: unitWidth,
rotationRadians: newRotationRadians,
scaling: newScaling)
}
public override func clone(withImageEditorConversion conversion: ImageEditorConversion) -> ImageEditorItem {
let convertedUnitCenter = conversion(unitCenter)
let convertedUnitWidth = conversion(CGPoint(x: unitWidth, y: 0)).x
return ImageEditorTextItem(itemId: itemId,
color: color,
font: font,
text: text,
unitCenter: convertedUnitCenter,
unitWidth: convertedUnitWidth,
rotationRadians: rotationRadians,
scaling: scaling)
}
public override func outputScale() -> CGFloat {
return scaling
}
}
// MARK: -
public class OrderedDictionary<ValueType>: NSObject {
public typealias KeyType = String
var keyValueMap = [KeyType: ValueType]()
var orderedKeys = [KeyType]()
public override init() {
}
// Used to clone copies of instances of this class.
public init(keyValueMap: [KeyType: ValueType],
orderedKeys: [KeyType]) {
self.keyValueMap = keyValueMap
self.orderedKeys = orderedKeys
}
// Since the contents are immutable, we only modify copies
// made with this method.
public func clone() -> OrderedDictionary<ValueType> {
return OrderedDictionary(keyValueMap: keyValueMap, orderedKeys: orderedKeys)
}
public func value(forKey key: KeyType) -> ValueType? {
return keyValueMap[key]
}
public func append(key: KeyType, value: ValueType) {
if keyValueMap[key] != nil {
owsFailDebug("Unexpected duplicate key in key map: \(key)")
}
keyValueMap[key] = value
if orderedKeys.contains(key) {
owsFailDebug("Unexpected duplicate key in key list: \(key)")
} else {
orderedKeys.append(key)
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
public override func isEqual(_ object: Any?) -> Bool {
guard let other = object as? ImageEditorTransform else {
return false
}
return (outputSizePixels == other.outputSizePixels &&
unitTranslation == other.unitTranslation &&
rotationRadians == other.rotationRadians &&
scaling == other.scaling)
}
public func replace(key: KeyType, value: ValueType) {
if keyValueMap[key] == nil {
owsFailDebug("Missing key in key map: \(key)")
}
keyValueMap[key] = value
if !orderedKeys.contains(key) {
owsFailDebug("Missing key in key list: \(key)")
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
public override var hash: Int {
return (outputSizePixels.width.hashValue ^
outputSizePixels.height.hashValue ^
unitTranslation.x.hashValue ^
unitTranslation.y.hashValue ^
rotationRadians.hashValue ^
scaling.hashValue)
}
public func remove(key: KeyType) {
if keyValueMap[key] == nil {
owsFailDebug("Missing key in key map: \(key)")
} else {
keyValueMap.removeValue(forKey: key)
}
if !orderedKeys.contains(key) {
owsFailDebug("Missing key in key list: \(key)")
} else {
orderedKeys = orderedKeys.filter { $0 != key }
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
}
public var count: Int {
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
return orderedKeys.count
}
public func orderedValues() -> [ValueType] {
var values = [ValueType]()
for key in orderedKeys {
guard let value = self.keyValueMap[key] else {
owsFailDebug("Missing value")
continue
}
values.append(value)
}
return values
}
}
// MARK: -
// ImageEditorContents represents a snapshot of canvas
// state.
//
// Instances of ImageEditorContents should be treated
// as immutable, once configured.
public class ImageEditorContents: NSObject {
@objc
public let imagePath: String
@objc
public let imageSizePixels: CGSize
public typealias ItemMapType = OrderedDictionary<ImageEditorItem>
// This represents the current state of each item,
// a mapping of [itemId : item].
var itemMap = ItemMapType()
// Used to create an initial, empty instances of this class.
public init(imagePath: String,
imageSizePixels: CGSize) {
self.imagePath = imagePath
self.imageSizePixels = imageSizePixels
}
// Used to clone copies of instances of this class.
public init(imagePath: String,
imageSizePixels: CGSize,
itemMap: ItemMapType) {
self.imagePath = imagePath
self.imageSizePixels = imageSizePixels
self.itemMap = itemMap
}
// Since the contents are immutable, we only modify copies
// made with this method.
public func clone() -> ImageEditorContents {
return ImageEditorContents(imagePath: imagePath,
imageSizePixels: imageSizePixels,
itemMap: itemMap.clone())
}
@objc
public func item(forId itemId: String) -> ImageEditorItem? {
return itemMap.value(forKey: itemId)
}
@objc
public func append(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.append(key: item.itemId, value: item)
}
@objc
public func replace(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.replace(key: item.itemId, value: item)
}
@objc
public func remove(item: ImageEditorItem) {
Logger.verbose("\(item.itemId)")
itemMap.remove(key: item.itemId)
}
@objc
public func remove(itemId: String) {
Logger.verbose("\(itemId)")
itemMap.remove(key: itemId)
}
@objc
public func itemCount() -> Int {
return itemMap.count
}
@objc
public func items() -> [ImageEditorItem] {
return itemMap.orderedValues()
open override var description: String {
return "[outputSizePixels: \(outputSizePixels), unitTranslation: \(unitTranslation), rotationRadians: \(rotationRadians), scaling: \(scaling)]"
}
}
@ -493,7 +101,7 @@ private class ImageEditorOperation: NSObject {
// MARK: -
@objc
public protocol ImageEditorModelDelegate: class {
public protocol ImageEditorModelObserver: class {
// Used for large changes to the model, when the entire
// model should be reloaded.
func imageEditorModelDidChange(before: ImageEditorContents,
@ -514,9 +122,6 @@ public class ImageEditorModel: NSObject {
return _isDebugAssertConfiguration()
}
@objc
public weak var delegate: ImageEditorModelDelegate?
@objc
public let srcImagePath: String
@ -525,6 +130,8 @@ public class ImageEditorModel: NSObject {
private var contents: ImageEditorContents
private var transform: ImageEditorTransform
private var undoStack = [ImageEditorOperation]()
private var redoStack = [ImageEditorOperation]()
@ -544,8 +151,8 @@ public class ImageEditorModel: NSObject {
}
guard MIMETypeUtil.isImage(mimeType),
!MIMETypeUtil.isAnimated(mimeType) else {
Logger.error("Invalid MIME type: \(mimeType).")
throw ImageEditorError.invalidInput
Logger.error("Invalid MIME type: \(mimeType).")
throw ImageEditorError.invalidInput
}
let srcImageSizePixels = NSData.imageSize(forFilePath: srcImagePath, mimeType: mimeType)
@ -555,15 +162,22 @@ public class ImageEditorModel: NSObject {
}
self.srcImageSizePixels = srcImageSizePixels
self.contents = ImageEditorContents(imagePath: srcImagePath,
imageSizePixels: srcImageSizePixels)
self.contents = ImageEditorContents()
self.transform = ImageEditorTransform.defaultTransform(srcImageSizePixels: srcImageSizePixels)
super.init()
}
public func currentTransform() -> ImageEditorTransform {
return transform
}
@objc
public var currentImagePath: String {
return contents.imagePath
public func isDirty() -> Bool {
if itemCount() > 0 {
return true
}
return transform != ImageEditorTransform.defaultTransform(srcImageSizePixels: srcImageSizePixels)
}
@objc
@ -596,6 +210,39 @@ public class ImageEditorModel: NSObject {
return !redoStack.isEmpty
}
// MARK: - Observers
private var observers = [Weak<ImageEditorModelObserver>]()
@objc
public func add(observer: ImageEditorModelObserver) {
observers.append(Weak(value: observer))
}
private func fireModelDidChange(before: ImageEditorContents,
after: ImageEditorContents) {
// We could diff here and yield a more narrow change event.
for weakObserver in observers {
guard let observer = weakObserver.value else {
continue
}
observer.imageEditorModelDidChange(before: before,
after: after)
}
}
private func fireModelDidChange(changedItemIds: [String]) {
// We could diff here and yield a more narrow change event.
for weakObserver in observers {
guard let observer = weakObserver.value else {
continue
}
observer.imageEditorModelDidChange(changedItemIds: changedItemIds)
}
}
// MARK: -
@objc
public func undo() {
guard let undoOperation = undoStack.popLast() else {
@ -610,8 +257,7 @@ public class ImageEditorModel: NSObject {
self.contents = undoOperation.contents
// We could diff here and yield a more narrow change event.
delegate?.imageEditorModelDidChange(before: oldContents,
after: self.contents)
fireModelDidChange(before: oldContents, after: self.contents)
}
@objc
@ -628,8 +274,7 @@ public class ImageEditorModel: NSObject {
self.contents = redoOperation.contents
// We could diff here and yield a more narrow change event.
delegate?.imageEditorModelDidChange(before: oldContents,
after: self.contents)
fireModelDidChange(before: oldContents, after: self.contents)
}
@objc
@ -661,6 +306,16 @@ public class ImageEditorModel: NSObject {
}, changedItemIds: [item.itemId])
}
@objc
public func replace(transform: ImageEditorTransform) {
self.transform = transform
// The contents haven't changed, but this event prods the
// observers to reload everything, which is necessary if
// the transform changes.
fireModelDidChange(before: self.contents, after: self.contents)
}
// MARK: - Temp Files
private var temporaryFilePaths = [String]()
@ -689,63 +344,6 @@ public class ImageEditorModel: NSObject {
}
}
// MARK: - Crop
@objc
public func crop(unitCropRect: CGRect) {
guard let croppedImage = ImageEditorModel.crop(imagePath: contents.imagePath,
unitCropRect: unitCropRect) else {
// Not an error; user might have tapped or
// otherwise drawn an invalid crop region.
Logger.warn("Could not crop image.")
return
}
// Use PNG for temp files; PNG is lossless.
guard let croppedImageData = UIImagePNGRepresentation(croppedImage) else {
owsFailDebug("Could not convert cropped image to PNG.")
return
}
let croppedImagePath = temporaryFilePath(withFileExtension: "png")
do {
try croppedImageData.write(to: NSURL.fileURL(withPath: croppedImagePath), options: .atomicWrite)
} catch let error as NSError {
owsFailDebug("File write failed: \(error)")
return
}
let croppedImageSizePixels = CGSizeScale(croppedImage.size, croppedImage.scale)
let left = unitCropRect.origin.x
let right = unitCropRect.origin.x + unitCropRect.size.width
let top = unitCropRect.origin.y
let bottom = unitCropRect.origin.y + unitCropRect.size.height
let conversion: ImageEditorConversion = { (point) in
// Convert from the pre-crop unit coordinate system
// to post-crop unit coordinate system using inverse
// lerp.
//
// NOTE: Some post-conversion unit values will _NOT_
// be clamped. e.g. strokes outside the crop
// are that < 0 or > 1. This is fine.
// We could hypothethically discard any items
// whose bounding box is entirely outside the
// new unit rectangle (e.g. have been completely
// cropped) but it doesn't seem worthwhile.
let converted = CGPoint(x: CGFloatInverseLerp(point.x, left, right),
y: CGFloatInverseLerp(point.y, top, bottom))
return converted
}
performAction({ (oldContents) in
let newContents = ImageEditorContents(imagePath: croppedImagePath,
imageSizePixels: croppedImageSizePixels)
for oldItem in oldContents.items() {
let newItem = oldItem.clone(withImageEditorConversion: conversion)
newContents.append(item: newItem)
}
return newContents
}, changedItemIds: nil)
}
private func performAction(_ action: (ImageEditorContents) -> ImageEditorContents,
changedItemIds: [String]?,
suppressUndo: Bool = false) {
@ -760,10 +358,10 @@ public class ImageEditorModel: NSObject {
contents = newContents
if let changedItemIds = changedItemIds {
delegate?.imageEditorModelDidChange(changedItemIds: changedItemIds)
fireModelDidChange(changedItemIds: changedItemIds)
} else {
delegate?.imageEditorModelDidChange(before: oldContents,
after: self.contents)
fireModelDidChange(before: oldContents,
after: self.contents)
}
}

View File

@ -0,0 +1,36 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
// This GR:
//
// * Tries to fail quickly to avoid conflicts with other GRs, especially pans/swipes.
// * Captures a bunch of useful "pan state" that makes using this GR much easier
// than UIPanGestureRecognizer.
public class ImageEditorPanGestureRecognizer: UIPanGestureRecognizer {
public weak var referenceView: UIView?
public var locationStart: CGPoint?
// MARK: - Touch Handling
@objc
public override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent) {
super.touchesBegan(touches, with: event)
guard let referenceView = referenceView else {
owsFailDebug("Missing view")
return
}
locationStart = self.location(in: referenceView)
}
public override func reset() {
super.reset()
locationStart = nil
}
}

View File

@ -29,6 +29,8 @@ public struct ImageEditorPinchState {
// than UIPinchGestureRecognizer.
public class ImageEditorPinchGestureRecognizer: UIGestureRecognizer {
public weak var referenceView: UIView?
public var pinchStateStart = ImageEditorPinchState.empty
public var pinchStateLast = ImageEditorPinchState.empty
@ -144,32 +146,24 @@ public class ImageEditorPinchGestureRecognizer: UIGestureRecognizer {
if allTouches.count < 2 {
return .possible
}
guard let pinchState = pinchState(for: allTouches) else {
guard let pinchState = pinchState() else {
return .invalid
}
return .valid(pinchState:pinchState)
}
private func pinchState(for touches: Set<UITouch>) -> ImageEditorPinchState? {
guard let view = self.view else {
private func pinchState() -> ImageEditorPinchState? {
guard let referenceView = referenceView else {
owsFailDebug("Missing view")
return nil
}
guard touches.count == 2 else {
guard numberOfTouches == 2 else {
return nil
}
let touchList = Array<UITouch>(touches).sorted { (left, right) -> Bool in
// TODO: Will timestamp yield stable sort?
left.timestamp < right.timestamp
}
guard let touch0 = touchList.first else {
return nil
}
guard let touch1 = touchList.last else {
return nil
}
let location0 = touch0.location(in: view)
let location1 = touch1.location(in: view)
// We need the touch locations _with a stable ordering_.
// The only way to ensure the ordering is to use location(ofTouch:in:).
let location0 = location(ofTouch: 0, in: referenceView)
let location1 = location(ofTouch: 1, in: referenceView)
let centroid = CGPointScale(CGPointAdd(location0, location1), 0.5)
let distance = CGPointDistance(location0, location1)
@ -178,7 +172,6 @@ public class ImageEditorPinchGestureRecognizer: UIGestureRecognizer {
// changes to the angle.
let delta = CGPointSubtract(location1, location0)
let angleRadians = atan2(delta.y, delta.x)
return ImageEditorPinchState(centroid: centroid,
distance: distance,
angleRadians: angleRadians)

View File

@ -0,0 +1,57 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
@objc
public class ImageEditorStrokeItem: ImageEditorItem {
// Until we need to serialize these items,
// just use UIColor.
@objc
public let color: UIColor
public typealias StrokeSample = ImageEditorSample
@objc
public let unitSamples: [StrokeSample]
// Expressed as a "Unit" value as a fraction of
// min(width, height) of the destination viewport.
@objc
public let unitStrokeWidth: CGFloat
@objc
public init(color: UIColor,
unitSamples: [StrokeSample],
unitStrokeWidth: CGFloat) {
self.color = color
self.unitSamples = unitSamples
self.unitStrokeWidth = unitStrokeWidth
super.init(itemType: .stroke)
}
@objc
public init(itemId: String,
color: UIColor,
unitSamples: [StrokeSample],
unitStrokeWidth: CGFloat) {
self.color = color
self.unitSamples = unitSamples
self.unitStrokeWidth = unitStrokeWidth
super.init(itemId: itemId, itemType: .stroke)
}
@objc
public class func defaultUnitStrokeWidth() -> CGFloat {
return 0.02
}
@objc
public class func strokeWidth(forUnitStrokeWidth unitStrokeWidth: CGFloat,
dstSize: CGSize) -> CGFloat {
return CGFloatClamp01(unitStrokeWidth) * min(dstSize.width, dstSize.height)
}
}

View File

@ -0,0 +1,154 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import UIKit
@objc
public class ImageEditorTextItem: ImageEditorItem {
@objc
public let text: String
@objc
public let color: UIColor
@objc
public let font: UIFont
// In order to render the text at a consistent size
// in very differently sized contexts (canvas in
// portrait, landscape, in the crop tool, before and
// after cropping, while rendering output),
// we need to scale the font size to reflect the
// view width.
//
// We use the image's rendering width as the reference value,
// since we want to be consistent with regard to the image's
// content.
@objc
public let fontReferenceImageWidth: CGFloat
@objc
public let unitCenter: ImageEditorSample
// Leave some margins against the edge of the image.
@objc
public static let kDefaultUnitWidth: CGFloat = 0.9
// The max width of the text as a fraction of the image width.
//
// This provides continuity of text layout before/after cropping.
//
// NOTE: When you scale the text with with a pinch gesture, that
// affects _scaling_, not the _unit width_, since we don't want
// to change how the text wraps when scaling.
@objc
public let unitWidth: CGFloat
// 0 = no rotation.
// CGFloat.pi * 0.5 = rotation 90 degrees clockwise.
@objc
public let rotationRadians: CGFloat
@objc
public static let kMaxScaling: CGFloat = 4.0
@objc
public static let kMinScaling: CGFloat = 0.5
@objc
public let scaling: CGFloat
@objc
public init(text: String,
color: UIColor,
font: UIFont,
fontReferenceImageWidth: CGFloat,
unitCenter: ImageEditorSample = ImageEditorSample(x: 0.5, y: 0.5),
unitWidth: CGFloat = ImageEditorTextItem.kDefaultUnitWidth,
rotationRadians: CGFloat = 0.0,
scaling: CGFloat = 1.0) {
self.text = text
self.color = color
self.font = font
self.fontReferenceImageWidth = fontReferenceImageWidth
self.unitCenter = unitCenter
self.unitWidth = unitWidth
self.rotationRadians = rotationRadians
self.scaling = scaling
super.init(itemType: .text)
}
private init(itemId: String,
text: String,
color: UIColor,
font: UIFont,
fontReferenceImageWidth: CGFloat,
unitCenter: ImageEditorSample,
unitWidth: CGFloat,
rotationRadians: CGFloat,
scaling: CGFloat) {
self.text = text
self.color = color
self.font = font
self.fontReferenceImageWidth = fontReferenceImageWidth
self.unitCenter = unitCenter
self.unitWidth = unitWidth
self.rotationRadians = rotationRadians
self.scaling = scaling
super.init(itemId: itemId, itemType: .text)
}
@objc
public class func empty(withColor color: UIColor, unitWidth: CGFloat, fontReferenceImageWidth: CGFloat) -> ImageEditorTextItem {
// TODO: Tune the default font size.
let font = UIFont.boldSystemFont(ofSize: 30.0)
return ImageEditorTextItem(text: "", color: color, font: font, fontReferenceImageWidth: fontReferenceImageWidth, unitWidth: unitWidth)
}
@objc
public func copy(withText newText: String) -> ImageEditorTextItem {
return ImageEditorTextItem(itemId: itemId,
text: newText,
color: color,
font: font,
fontReferenceImageWidth: fontReferenceImageWidth,
unitCenter: unitCenter,
unitWidth: unitWidth,
rotationRadians: rotationRadians,
scaling: scaling)
}
@objc
public func copy(withUnitCenter newUnitCenter: CGPoint) -> ImageEditorTextItem {
return ImageEditorTextItem(itemId: itemId,
text: text,
color: color,
font: font,
fontReferenceImageWidth: fontReferenceImageWidth,
unitCenter: newUnitCenter,
unitWidth: unitWidth,
rotationRadians: rotationRadians,
scaling: scaling)
}
@objc
public func copy(withUnitCenter newUnitCenter: CGPoint,
scaling newScaling: CGFloat,
rotationRadians newRotationRadians: CGFloat) -> ImageEditorTextItem {
return ImageEditorTextItem(itemId: itemId,
text: text,
color: color,
font: font,
fontReferenceImageWidth: fontReferenceImageWidth,
unitCenter: newUnitCenter,
unitWidth: unitWidth,
rotationRadians: newRotationRadians,
scaling: newScaling)
}
public override func outputScale() -> CGFloat {
return scaling
}
}

View File

@ -99,7 +99,7 @@ public protocol ImageEditorTextViewControllerDelegate: class {
// MARK: -
// A view for editing text item in image editor.
class ImageEditorTextViewController: OWSViewController, VAlignTextViewDelegate {
public class ImageEditorTextViewController: OWSViewController, VAlignTextViewDelegate {
private weak var delegate: ImageEditorTextViewControllerDelegate?
private let textItem: ImageEditorTextItem
@ -127,19 +127,19 @@ class ImageEditorTextViewController: OWSViewController, VAlignTextViewDelegate {
// MARK: - View Lifecycle
override func viewWillAppear(_ animated: Bool) {
public override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
textView.becomeFirstResponder()
}
override func viewDidAppear(_ animated: Bool) {
public override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
textView.becomeFirstResponder()
}
override func loadView() {
public override func loadView() {
self.view = UIView()
self.view.backgroundColor = UIColor(white: 0.5, alpha: 0.5)
@ -155,8 +155,12 @@ class ImageEditorTextViewController: OWSViewController, VAlignTextViewDelegate {
textView.autoHCenterInSuperview()
// In order to have text wrapping be as WYSIWYG as possible, we limit the text view
// to the max text width on the image.
let maxTextWidthPoints = max(self.maxTextWidthPoints, 200)
textView.autoSetDimension(.width, toSize: maxTextWidthPoints, relation: .lessThanOrEqual)
// let maxTextWidthPoints = max(textItem.widthPoints, 200)
// textView.autoSetDimension(.width, toSize: maxTextWidthPoints, relation: .lessThanOrEqual)
// textView.autoPinEdge(toSuperviewMargin: .leading, relation: .greaterThanOrEqual)
// textView.autoPinEdge(toSuperviewMargin: .trailing, relation: .greaterThanOrEqual)
textView.autoPinEdge(toSuperviewMargin: .leading)
textView.autoPinEdge(toSuperviewMargin: .trailing)
self.autoPinView(toBottomOfViewControllerOrKeyboard: textView, avoidNotch: true)
}
@ -207,7 +211,7 @@ class ImageEditorTextViewController: OWSViewController, VAlignTextViewDelegate {
// MARK: - VAlignTextViewDelegate
func textViewDidComplete() {
public func textViewDidComplete() {
completeAndDismiss()
}
}

View File

@ -4,23 +4,6 @@
import UIKit
private class EditorTextLayer: CATextLayer {
let itemId: String
public init(itemId: String) {
self.itemId = itemId
super.init()
}
@available(*, unavailable, message: "use other init() instead.")
required public init?(coder aDecoder: NSCoder) {
notImplemented()
}
}
// MARK: -
@objc
public protocol ImageEditorViewDelegate: class {
func imageEditor(presentFullScreenOverlay viewController: UIViewController)
@ -31,17 +14,18 @@ public protocol ImageEditorViewDelegate: class {
// A view for editing outgoing image attachments.
// It can also be used to render the final output.
@objc
public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextViewControllerDelegate, UIGestureRecognizerDelegate {
public class ImageEditorView: UIView {
weak var delegate: ImageEditorViewDelegate?
private let model: ImageEditorModel
private let canvasView: ImageEditorCanvasView
enum EditorMode: String {
// This is the default mode. It is used for interacting with text items.
case none
case brush
case crop
}
private var editorMode = EditorMode.none {
@ -59,10 +43,11 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
public required init(model: ImageEditorModel, delegate: ImageEditorViewDelegate) {
self.model = model
self.delegate = delegate
self.canvasView = ImageEditorCanvasView(model: model)
super.init(frame: .zero)
model.delegate = self
model.add(observer: self)
}
@available(*, unavailable, message: "use other init() instead.")
@ -72,51 +57,53 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
// MARK: - Views
private let imageView = UIImageView()
private var imageViewConstraints = [NSLayoutConstraint]()
private let layersView = OWSLayerView()
private var editorGestureRecognizer: ImageEditorGestureRecognizer?
private var moveTextGestureRecognizer: ImageEditorPanGestureRecognizer?
private var brushGestureRecognizer: ImageEditorPanGestureRecognizer?
private var tapGestureRecognizer: UITapGestureRecognizer?
private var pinchGestureRecognizer: ImageEditorPinchGestureRecognizer?
@objc
public func configureSubviews() -> Bool {
self.addSubview(imageView)
guard updateImageView() else {
guard canvasView.configureSubviews() else {
return false
}
layersView.clipsToBounds = true
layersView.layoutCallback = { [weak self] (_) in
self?.updateAllContent()
}
self.addSubview(layersView)
layersView.autoPin(toEdgesOf: imageView)
self.addSubview(canvasView)
canvasView.ows_autoPinToSuperviewEdges()
self.isUserInteractionEnabled = true
layersView.isUserInteractionEnabled = true
let editorGestureRecognizer = ImageEditorGestureRecognizer(target: self, action: #selector(handleEditorGesture(_:)))
editorGestureRecognizer.canvasView = layersView
editorGestureRecognizer.delegate = self
self.addGestureRecognizer(editorGestureRecognizer)
self.editorGestureRecognizer = editorGestureRecognizer
let moveTextGestureRecognizer = ImageEditorPanGestureRecognizer(target: self, action: #selector(handleMoveTextGesture(_:)))
moveTextGestureRecognizer.maximumNumberOfTouches = 1
moveTextGestureRecognizer.referenceView = canvasView.gestureReferenceView
moveTextGestureRecognizer.delegate = self
self.addGestureRecognizer(moveTextGestureRecognizer)
self.moveTextGestureRecognizer = moveTextGestureRecognizer
let brushGestureRecognizer = ImageEditorPanGestureRecognizer(target: self, action: #selector(handleBrushGesture(_:)))
brushGestureRecognizer.maximumNumberOfTouches = 1
brushGestureRecognizer.referenceView = canvasView.gestureReferenceView
self.addGestureRecognizer(brushGestureRecognizer)
self.brushGestureRecognizer = brushGestureRecognizer
let tapGestureRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleTapGesture(_:)))
self.addGestureRecognizer(tapGestureRecognizer)
self.tapGestureRecognizer = tapGestureRecognizer
let pinchGestureRecognizer = ImageEditorPinchGestureRecognizer(target: self, action: #selector(handlePinchGesture(_:)))
pinchGestureRecognizer.referenceView = canvasView.gestureReferenceView
self.addGestureRecognizer(pinchGestureRecognizer)
self.pinchGestureRecognizer = pinchGestureRecognizer
// De-conflict the GRs.
editorGestureRecognizer.require(toFail: tapGestureRecognizer)
editorGestureRecognizer.require(toFail: pinchGestureRecognizer)
// editorGestureRecognizer.require(toFail: tapGestureRecognizer)
// editorGestureRecognizer.require(toFail: pinchGestureRecognizer)
updateGestureState()
DispatchQueue.main.async {
self.presentCropTool()
}
return true
}
@ -138,44 +125,6 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
}
}
@objc
public func updateImageView() -> Bool {
guard let image = UIImage(contentsOfFile: model.currentImagePath) else {
owsFailDebug("Could not load image")
return false
}
guard image.size.width > 0 && image.size.height > 0 else {
owsFailDebug("Could not load image")
return false
}
imageView.image = image
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
let aspectRatio = image.size.width / image.size.height
for constraint in imageViewConstraints {
constraint.autoRemove()
}
imageViewConstraints = applyScaleAspectFitLayout(view: imageView, aspectRatio: aspectRatio)
return true
}
private func applyScaleAspectFitLayout(view: UIView, aspectRatio: CGFloat) -> [NSLayoutConstraint] {
// This emulates the behavior of contentMode = .scaleAspectFit using
// iOS auto layout constraints.
//
// This allows ConversationInputToolbar to place the "cancel" button
// in the upper-right hand corner of the preview content.
var constraints = [NSLayoutConstraint]()
constraints.append(contentsOf: view.autoCenterInSuperview())
constraints.append(view.autoPin(toAspectRatio: aspectRatio))
constraints.append(view.autoMatch(.width, to: .width, of: self, withMultiplier: 1.0, relation: .lessThanOrEqual))
constraints.append(view.autoMatch(.height, to: .height, of: self, withMultiplier: 1.0, relation: .lessThanOrEqual))
return constraints
}
private let undoButton = UIButton(type: .custom)
private let redoButton = UIButton(type: .custom)
private let brushButton = UIButton(type: .custom)
@ -249,7 +198,7 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
undoButton.isEnabled = model.canUndo()
redoButton.isEnabled = model.canRedo()
brushButton.isSelected = editorMode == .brush
cropButton.isSelected = editorMode == .crop
cropButton.isSelected = false
newTextButton.isSelected = false
for button in allButtons {
@ -286,13 +235,23 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
@objc func didTapCrop(sender: UIButton) {
Logger.verbose("")
toggle(editorMode: .crop)
presentCropTool()
}
@objc func didTapNewText(sender: UIButton) {
Logger.verbose("")
let textItem = ImageEditorTextItem.empty(withColor: currentColor)
let viewSize = canvasView.gestureReferenceView.bounds.size
let imageSize = model.srcImageSizePixels
let imageFrame = ImageEditorCanvasView.imageFrame(forViewSize: viewSize, imageSize: imageSize,
transform: model.currentTransform())
let textWidthPoints = viewSize.width * ImageEditorTextItem.kDefaultUnitWidth
let textWidthUnit = textWidthPoints / imageFrame.size.width
let textItem = ImageEditorTextItem.empty(withColor: currentColor,
unitWidth: textWidthUnit,
fontReferenceImageWidth: imageFrame.size.width)
edit(textItem: textItem)
}
@ -319,20 +278,14 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
switch editorMode {
case .none:
editorGestureRecognizer?.shouldAllowOutsideView = true
editorGestureRecognizer?.isEnabled = true
moveTextGestureRecognizer?.isEnabled = true
brushGestureRecognizer?.isEnabled = false
tapGestureRecognizer?.isEnabled = true
pinchGestureRecognizer?.isEnabled = true
case .brush:
// Brush strokes can start and end (and return from) outside the view.
editorGestureRecognizer?.shouldAllowOutsideView = true
editorGestureRecognizer?.isEnabled = true
tapGestureRecognizer?.isEnabled = false
pinchGestureRecognizer?.isEnabled = false
case .crop:
// Crop gestures can start and end (and return from) outside the view.
editorGestureRecognizer?.shouldAllowOutsideView = true
editorGestureRecognizer?.isEnabled = true
moveTextGestureRecognizer?.isEnabled = false
brushGestureRecognizer?.isEnabled = true
tapGestureRecognizer?.isEnabled = false
pinchGestureRecognizer?.isEnabled = false
}
@ -349,7 +302,8 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
return
}
guard let textLayer = textLayer(forGestureRecognizer: gestureRecognizer) else {
let location = gestureRecognizer.location(in: canvasView.gestureReferenceView)
guard let textLayer = canvasView.textLayer(forLocation: location) else {
return
}
@ -361,27 +315,6 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
edit(textItem: textItem)
}
private var isEditingTextItem = false {
didSet {
AssertIsOnMainThread()
updateButtons()
}
}
private func edit(textItem: ImageEditorTextItem) {
Logger.verbose("")
toggle(editorMode: .none)
isEditingTextItem = true
let maxTextWidthPoints = imageView.width() * ImageEditorTextItem.kDefaultUnitWidth
let textEditor = ImageEditorTextViewController(delegate: self, textItem: textItem, maxTextWidthPoints: maxTextWidthPoints)
self.delegate?.imageEditor(presentFullScreenOverlay: textEditor)
}
// MARK: - Pinch Gesture
// These properties are valid while moving a text item.
@ -397,12 +330,7 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
switch gestureRecognizer.state {
case .began:
let pinchState = gestureRecognizer.pinchStateStart
guard let gestureRecognizerView = gestureRecognizer.view else {
owsFailDebug("Missing gestureRecognizer.view.")
return
}
let location = gestureRecognizerView.convert(pinchState.centroid, to: unitReferenceView)
guard let textLayer = textLayer(forLocation: location) else {
guard let textLayer = canvasView.textLayer(forLocation: pinchState.centroid) else {
// The pinch needs to start centered on a text item.
return
}
@ -417,9 +345,13 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
return
}
let locationDelta = CGPointSubtract(gestureRecognizer.pinchStateLast.centroid,
gestureRecognizer.pinchStateStart.centroid)
let unitLocationDelta = convertToUnit(location: locationDelta, shouldClamp: false)
let locationStart = gestureRecognizer.pinchStateStart.centroid
let locationUnitStart = locationUnit(forLocationInView: locationStart, transform: model.currentTransform())
let locationNow = gestureRecognizer.pinchStateLast.centroid
let locationUnitNow = locationUnit(forLocationInView: locationNow, transform: model.currentTransform())
let unitLocationDelta = CGPointSubtract(locationUnitNow,
locationUnitStart)
let unitCenter = CGPointClamp01(CGPointAdd(textItem.unitCenter, unitLocationDelta))
// NOTE: We use max(1, ...) to avoid divide-by-zero.
@ -450,41 +382,24 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
// MARK: - Editor Gesture
@objc
public func handleEditorGesture(_ gestureRecognizer: ImageEditorGestureRecognizer) {
AssertIsOnMainThread()
switch editorMode {
case .none:
handleDefaultGesture(gestureRecognizer)
break
case .brush:
handleBrushGesture(gestureRecognizer)
case .crop:
handleCropGesture(gestureRecognizer)
}
}
// These properties are valid while moving a text item.
private var movingTextItem: ImageEditorTextItem?
private var movingTextStartUnitLocation = CGPoint.zero
private var movingTextStartUnitCenter = CGPoint.zero
private var movingTextStartUnitCenter: CGPoint?
private var movingTextHasMoved = false
@objc
public func handleDefaultGesture(_ gestureRecognizer: ImageEditorGestureRecognizer) {
public func handleMoveTextGesture(_ gestureRecognizer: ImageEditorPanGestureRecognizer) {
AssertIsOnMainThread()
// We could undo an in-progress move if the gesture is cancelled, but it seems gratuitous.
switch gestureRecognizer.state {
case .began:
guard let gestureRecognizerView = gestureRecognizer.view else {
owsFailDebug("Missing gestureRecognizer.view.")
guard let locationStart = gestureRecognizer.locationStart else {
owsFailDebug("Missing locationStart.")
return
}
let location = gestureRecognizerView.convert(gestureRecognizer.startLocationInView, to: unitReferenceView)
guard let textLayer = textLayer(forLocation: location) else {
guard let textLayer = canvasView.textLayer(forLocation: locationStart) else {
owsFailDebug("No text layer")
return
}
@ -492,9 +407,6 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
owsFailDebug("Missing or invalid text item.")
return
}
movingTextStartUnitLocation = convertToUnit(location: location,
shouldClamp: false)
movingTextItem = textItem
movingTextStartUnitCenter = textItem.unitCenter
movingTextHasMoved = false
@ -503,9 +415,20 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
guard let textItem = movingTextItem else {
return
}
guard let locationStart = gestureRecognizer.locationStart else {
owsFailDebug("Missing locationStart.")
return
}
guard let movingTextStartUnitCenter = movingTextStartUnitCenter else {
owsFailDebug("Missing movingTextStartUnitCenter.")
return
}
let unitLocation = unitSampleForGestureLocation(gestureRecognizer, shouldClamp: false)
let unitLocationDelta = CGPointSubtract(unitLocation, movingTextStartUnitLocation)
let locationUnitStart = canvasView.locationUnit(forLocationInView: locationStart, transform: model.currentTransform())
let locationNow = gestureRecognizer.location(in: canvasView.gestureReferenceView)
let locationUnitNow = canvasView.locationUnit(forLocationInView: locationNow, transform: model.currentTransform())
let unitLocationDelta = CGPointSubtract(locationUnitNow, locationUnitStart)
let unitCenter = CGPointClamp01(CGPointAdd(movingTextStartUnitCenter, unitLocationDelta))
let newItem = textItem.copy(withUnitCenter: unitCenter)
@ -542,7 +465,7 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
self.currentStrokeSamples.removeAll()
}
let tryToAppendStrokeSample = {
let newSample = self.unitSampleForGestureLocation(gestureRecognizer, shouldClamp: false)
let newSample = self.locationUnit(forGestureRecognizer: gestureRecognizer, transform: self.model.currentTransform())
if let prevSample = self.currentStrokeSamples.last,
prevSample == newSample {
// Ignore duplicate samples.
@ -590,474 +513,105 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
}
}
private var unitReferenceView: UIView {
return layersView
// MARK: - Coordinates
private func locationUnit(forGestureRecognizer gestureRecognizer: UIGestureRecognizer,
transform: ImageEditorTransform) -> CGPoint {
return canvasView.locationUnit(forGestureRecognizer: gestureRecognizer, transform: transform)
}
private func unitSampleForGestureLocation(_ gestureRecognizer: UIGestureRecognizer,
shouldClamp: Bool) -> CGPoint {
// TODO: Smooth touch samples before converting into stroke samples.
let location = gestureRecognizer.location(in: unitReferenceView)
return convertToUnit(location: location,
shouldClamp: shouldClamp)
private func locationUnit(forLocationInView locationInView: CGPoint,
transform: ImageEditorTransform) -> CGPoint {
return canvasView.locationUnit(forLocationInView: locationInView, transform: transform)
}
private func convertToUnit(location: CGPoint,
shouldClamp: Bool) -> CGPoint {
var x = CGFloatInverseLerp(location.x, 0, unitReferenceView.bounds.width)
var y = CGFloatInverseLerp(location.y, 0, unitReferenceView.bounds.height)
if shouldClamp {
x = CGFloatClamp01(x)
y = CGFloatClamp01(y)
}
return CGPoint(x: x, y: y)
}
// MARK: - Edit Text Tool
// MARK: - Crop
private var isEditingTextItem = false {
didSet {
AssertIsOnMainThread()
private var cropStartUnit = CGPoint.zero
private var cropEndUnit = CGPoint.zero
private var cropLayer1 = CAShapeLayer()
private var cropLayer2 = CAShapeLayer()
private var cropLayers: [CAShapeLayer] {
return [cropLayer1, cropLayer2]
}
@objc
public func handleCropGesture(_ gestureRecognizer: UIGestureRecognizer) {
AssertIsOnMainThread()
let kCropDashLength: CGFloat = 3
let cancelCrop = {
for cropLayer in self.cropLayers {
cropLayer.removeFromSuperlayer()
cropLayer.removeAllAnimations()
}
}
let updateCropLayer = { (cropLayer: CAShapeLayer) in
cropLayer.fillColor = nil
cropLayer.lineWidth = 1.0
cropLayer.lineDashPattern = [NSNumber(value: Double(kCropDashLength)), NSNumber(value: Double(kCropDashLength))]
let viewSize = self.layersView.bounds.size
cropLayer.frame = CGRect(origin: .zero, size: viewSize)
// Find the upper-left and bottom-right corners of the
// crop rectangle, in unit coordinates.
let unitMin = CGPointMin(self.cropStartUnit, self.cropEndUnit)
let unitMax = CGPointMax(self.cropStartUnit, self.cropEndUnit)
let transformSampleToPoint = { (unitSample: CGPoint) -> CGPoint in
return CGPoint(x: viewSize.width * unitSample.x,
y: viewSize.height * unitSample.y)
}
// Convert from unit coordinates to view coordinates.
let pointMin = transformSampleToPoint(unitMin)
let pointMax = transformSampleToPoint(unitMax)
let cropRect = CGRect(x: pointMin.x,
y: pointMin.y,
width: pointMax.x - pointMin.x,
height: pointMax.y - pointMin.y)
let bezierPath = UIBezierPath(rect: cropRect)
cropLayer.path = bezierPath.cgPath
}
let updateCrop = {
updateCropLayer(self.cropLayer1)
updateCropLayer(self.cropLayer2)
self.cropLayer1.strokeColor = UIColor.white.cgColor
self.cropLayer2.strokeColor = UIColor.black.cgColor
self.cropLayer1.lineDashPhase = 0
self.cropLayer2.lineDashPhase = self.cropLayer1.lineDashPhase + kCropDashLength
}
let startCrop = {
for cropLayer in self.cropLayers {
self.layersView.layer.addSublayer(cropLayer)
}
updateCrop()
}
let endCrop = {
updateCrop()
for cropLayer in self.cropLayers {
cropLayer.removeFromSuperlayer()
cropLayer.removeAllAnimations()
}
// Find the upper-left and bottom-right corners of the
// crop rectangle, in unit coordinates.
let unitMin = CGPointClamp01(CGPointMin(self.cropStartUnit, self.cropEndUnit))
let unitMax = CGPointClamp01(CGPointMax(self.cropStartUnit, self.cropEndUnit))
let unitCropRect = CGRect(x: unitMin.x,
y: unitMin.y,
width: unitMax.x - unitMin.x,
height: unitMax.y - unitMin.y)
self.model.crop(unitCropRect: unitCropRect)
}
let currentUnitSample = {
self.unitSampleForGestureLocation(gestureRecognizer, shouldClamp: true)
}
switch gestureRecognizer.state {
case .began:
let unitSample = currentUnitSample()
cropStartUnit = unitSample
cropEndUnit = unitSample
startCrop()
case .changed:
cropEndUnit = currentUnitSample()
updateCrop()
case .ended:
cropEndUnit = currentUnitSample()
endCrop()
default:
cancelCrop()
updateButtons()
}
}
// MARK: - ImageEditorModelDelegate
private func edit(textItem: ImageEditorTextItem) {
Logger.verbose("")
toggle(editorMode: .none)
isEditingTextItem = true
// TODO:
let maxTextWidthPoints = model.srcImageSizePixels.width * ImageEditorTextItem.kDefaultUnitWidth
// let maxTextWidthPoints = canvasView.imageView.width() * ImageEditorTextItem.kDefaultUnitWidth
let textEditor = ImageEditorTextViewController(delegate: self, textItem: textItem, maxTextWidthPoints: maxTextWidthPoints)
self.delegate?.imageEditor(presentFullScreenOverlay: textEditor)
}
// MARK: - Crop Tool
private func presentCropTool() {
Logger.verbose("")
toggle(editorMode: .none)
guard let srcImage = canvasView.loadSrcImage() else {
owsFailDebug("Couldn't load src image.")
return
}
// We want to render a preview image that "flattens" all of the brush strokes, text items,
// into the background image without applying the transform (e.g. rotating, etc.), so we
// use a default transform.
let previewTransform = ImageEditorTransform.defaultTransform(srcImageSizePixels: model.srcImageSizePixels)
guard let previewImage = ImageEditorCanvasView.renderForOutput(model: model, transform: previewTransform) else {
owsFailDebug("Couldn't generate preview image.")
return
}
let cropTool = ImageEditorCropViewController(delegate: self, model: model, srcImage: srcImage, previewImage: previewImage)
self.delegate?.imageEditor(presentFullScreenOverlay: cropTool)
}}
// MARK: -
extension ImageEditorView: UIGestureRecognizerDelegate {
@objc public func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive touch: UITouch) -> Bool {
guard moveTextGestureRecognizer == gestureRecognizer else {
owsFailDebug("Unexpected gesture.")
return false
}
guard editorMode == .none else {
// We only filter touches when in default mode.
return true
}
let location = touch.location(in: canvasView.gestureReferenceView)
let isInTextArea = canvasView.textLayer(forLocation: location) != nil
return isInTextArea
}
}
// MARK: -
extension ImageEditorView: ImageEditorModelObserver {
public func imageEditorModelDidChange(before: ImageEditorContents,
after: ImageEditorContents) {
if before.imagePath != after.imagePath {
_ = updateImageView()
}
updateAllContent()
updateButtons()
}
public func imageEditorModelDidChange(changedItemIds: [String]) {
updateContent(changedItemIds: changedItemIds)
updateButtons()
}
}
// MARK: - Accessor Overrides
// MARK: -
@objc public override var bounds: CGRect {
didSet {
if oldValue != bounds {
updateAllContent()
}
}
}
@objc public override var frame: CGRect {
didSet {
if oldValue != frame {
updateAllContent()
}
}
}
// MARK: - Content
var contentLayerMap = [String: CALayer]()
internal func updateAllContent() {
AssertIsOnMainThread()
// Don't animate changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
for layer in contentLayerMap.values {
layer.removeFromSuperlayer()
}
contentLayerMap.removeAll()
if bounds.width > 0,
bounds.height > 0 {
for item in model.items() {
let viewSize = layersView.bounds.size
guard let layer = ImageEditorView.layerForItem(item: item,
viewSize: viewSize) else {
continue
}
layersView.layer.addSublayer(layer)
contentLayerMap[item.itemId] = layer
}
}
CATransaction.commit()
}
internal func updateContent(changedItemIds: [String]) {
AssertIsOnMainThread()
// Don't animate changes.
CATransaction.begin()
CATransaction.setDisableActions(true)
// Remove all changed items.
for itemId in changedItemIds {
if let layer = contentLayerMap[itemId] {
layer.removeFromSuperlayer()
}
contentLayerMap.removeValue(forKey: itemId)
}
if bounds.width > 0,
bounds.height > 0 {
// Create layers for inserted and updated items.
for itemId in changedItemIds {
guard let item = model.item(forId: itemId) else {
// Item was deleted.
continue
}
// Item was inserted or updated.
let viewSize = layersView.bounds.size
guard let layer = ImageEditorView.layerForItem(item: item,
viewSize: viewSize) else {
continue
}
layersView.layer.addSublayer(layer)
contentLayerMap[item.itemId] = layer
}
}
CATransaction.commit()
}
private class func layerForItem(item: ImageEditorItem,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
switch item.itemType {
case .test:
owsFailDebug("Unexpected test item.")
return nil
case .stroke:
guard let strokeItem = item as? ImageEditorStrokeItem else {
owsFailDebug("Item has unexpected type: \(type(of: item)).")
return nil
}
return strokeLayerForItem(item: strokeItem, viewSize: viewSize)
case .text:
guard let textItem = item as? ImageEditorTextItem else {
owsFailDebug("Item has unexpected type: \(type(of: item)).")
return nil
}
return textLayerForItem(item: textItem, viewSize: viewSize)
}
}
private class func strokeLayerForItem(item: ImageEditorStrokeItem,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
let strokeWidth = ImageEditorStrokeItem.strokeWidth(forUnitStrokeWidth: item.unitStrokeWidth,
dstSize: viewSize)
let unitSamples = item.unitSamples
guard unitSamples.count > 0 else {
// Not an error; the stroke doesn't have enough samples to render yet.
return nil
}
let shapeLayer = CAShapeLayer()
shapeLayer.lineWidth = strokeWidth
shapeLayer.strokeColor = item.color.cgColor
shapeLayer.frame = CGRect(origin: .zero, size: viewSize)
let transformSampleToPoint = { (unitSample: CGPoint) -> CGPoint in
return CGPoint(x: viewSize.width * unitSample.x,
y: viewSize.height * unitSample.y)
}
// TODO: Use bezier curves to smooth stroke.
let bezierPath = UIBezierPath()
let points = applySmoothing(to: unitSamples.map { (unitSample) in
transformSampleToPoint(unitSample)
})
var previousForwardVector = CGPoint.zero
for index in 0..<points.count {
let point = points[index]
let forwardVector: CGPoint
if points.count <= 1 {
// Skip forward vectors.
forwardVector = .zero
} else if index == 0 {
// First sample.
let nextPoint = points[index + 1]
forwardVector = CGPointSubtract(nextPoint, point)
} else if index == points.count - 1 {
// Last sample.
let previousPoint = points[index - 1]
forwardVector = CGPointSubtract(point, previousPoint)
} else {
// Middle samples.
let previousPoint = points[index - 1]
let previousPointForwardVector = CGPointSubtract(point, previousPoint)
let nextPoint = points[index + 1]
let nextPointForwardVector = CGPointSubtract(nextPoint, point)
forwardVector = CGPointScale(CGPointAdd(previousPointForwardVector, nextPointForwardVector), 0.5)
}
if index == 0 {
// First sample.
bezierPath.move(to: point)
if points.count == 1 {
bezierPath.addLine(to: point)
}
} else {
let previousPoint = points[index - 1]
// We apply more than one kind of smoothing.
// This smoothing avoids rendering "angled segments"
// by drawing the stroke as a series of curves.
// We use bezier curves and infer the control points
// from the "next" and "prev" points.
//
// This factor controls how much we're smoothing.
//
// * 0.0 = No smoothing.
//
// TODO: Tune this variable once we have stroke input.
let controlPointFactor: CGFloat = 0.25
let controlPoint1 = CGPointAdd(previousPoint, CGPointScale(previousForwardVector, +controlPointFactor))
let controlPoint2 = CGPointAdd(point, CGPointScale(forwardVector, -controlPointFactor))
// We're using Cubic curves.
bezierPath.addCurve(to: point, controlPoint1: controlPoint1, controlPoint2: controlPoint2)
}
previousForwardVector = forwardVector
}
shapeLayer.path = bezierPath.cgPath
shapeLayer.fillColor = nil
shapeLayer.lineCap = kCALineCapRound
shapeLayer.lineJoin = kCALineJoinRound
return shapeLayer
}
private class func textLayerForItem(item: ImageEditorTextItem,
viewSize: CGSize) -> CALayer? {
AssertIsOnMainThread()
let layer = EditorTextLayer(itemId: item.itemId)
layer.string = item.text
layer.foregroundColor = item.color.cgColor
layer.font = CGFont(item.font.fontName as CFString)
layer.fontSize = item.font.pointSize
layer.isWrapped = true
layer.alignmentMode = kCAAlignmentCenter
// I don't think we need to enable allowsFontSubpixelQuantization
// or set truncationMode.
// This text needs to be rendered at a scale that reflects the sceen scaling
// AND the item's scaling.
layer.contentsScale = UIScreen.main.scale * item.scaling
// TODO: Min with measured width.
let maxWidth = viewSize.width * item.unitWidth
let maxSize = CGSize(width: maxWidth, height: CGFloat.greatestFiniteMagnitude)
// TODO: Is there a more accurate way to measure text in a CATextLayer?
// CoreText?
let textBounds = (item.text as NSString).boundingRect(with: maxSize,
options: [
.usesLineFragmentOrigin,
.usesFontLeading
],
attributes: [
.font: item.font
],
context: nil)
let center = CGPoint(x: viewSize.width * item.unitCenter.x,
y: viewSize.height * item.unitCenter.y)
let layerSize = CGSizeCeil(textBounds.size)
layer.frame = CGRect(origin: CGPoint(x: center.x - layerSize.width * 0.5,
y: center.y - layerSize.height * 0.5),
size: layerSize)
let transform = CGAffineTransform.identity.scaledBy(x: item.scaling, y: item.scaling).rotated(by: item.rotationRadians)
layer.setAffineTransform(transform)
return layer
}
// We apply more than one kind of smoothing.
//
// This (simple) smoothing reduces jitter from the touch sensor.
private class func applySmoothing(to points: [CGPoint]) -> [CGPoint] {
AssertIsOnMainThread()
var result = [CGPoint]()
for index in 0..<points.count {
let point = points[index]
if index == 0 {
// First sample.
result.append(point)
} else if index == points.count - 1 {
// Last sample.
result.append(point)
} else {
// Middle samples.
let lastPoint = points[index - 1]
let nextPoint = points[index + 1]
let alpha: CGFloat = 0.1
let smoothedPoint = CGPointAdd(CGPointScale(point, 1.0 - 2.0 * alpha),
CGPointAdd(CGPointScale(lastPoint, alpha),
CGPointScale(nextPoint, alpha)))
result.append(smoothedPoint)
}
}
return result
}
// MARK: - Actions
// Returns nil on error.
@objc
public class func renderForOutput(model: ImageEditorModel) -> UIImage? {
// TODO: Do we want to render off the main thread?
AssertIsOnMainThread()
// Render output at same size as source image.
let dstSizePixels = model.srcImageSizePixels
let dstScale: CGFloat = 1.0 // The size is specified in pixels, not in points.
let hasAlpha = NSData.hasAlpha(forValidImageFilePath: model.currentImagePath)
guard let srcImage = UIImage(contentsOfFile: model.currentImagePath) else {
owsFailDebug("Could not load src image.")
return nil
}
// We use an UIImageView + UIView.renderAsImage() instead of a CGGraphicsContext
// Because CALayer.renderInContext() doesn't honor CALayer properties like frame,
// transform, etc.
let imageView = UIImageView(image: srcImage)
imageView.frame = CGRect(origin: .zero, size: dstSizePixels)
for item in model.items() {
guard let layer = layerForItem(item: item,
viewSize: dstSizePixels) else {
Logger.error("Couldn't create layer for item.")
continue
}
layer.contentsScale = dstScale * item.outputScale()
imageView.layer.addSublayer(layer)
}
let image = imageView.renderAsImage(opaque: !hasAlpha, scale: dstScale)
return image
}
// MARK: - ImageEditorTextViewControllerDelegate
extension ImageEditorView: ImageEditorTextViewControllerDelegate {
public func textEditDidComplete(textItem: ImageEditorTextItem, text: String?) {
AssertIsOnMainThread()
@ -1084,49 +638,17 @@ public class ImageEditorView: UIView, ImageEditorModelDelegate, ImageEditorTextV
public func textEditDidCancel() {
isEditingTextItem = false
}
}
// MARK: - UIGestureRecognizerDelegate
// MARK: -
@objc public func gestureRecognizer(_ gestureRecognizer: UIGestureRecognizer, shouldReceive touch: UITouch) -> Bool {
guard let editorGestureRecognizer = editorGestureRecognizer else {
owsFailDebug("Missing editorGestureRecognizer.")
return false
}
guard editorGestureRecognizer == gestureRecognizer else {
owsFailDebug("Unexpected gesture.")
return false
}
guard editorMode == .none else {
// We only filter touches when in default mode.
return true
}
let isInTextArea = textLayer(forTouch: touch) != nil
return isInTextArea
extension ImageEditorView: ImageEditorCropViewControllerDelegate {
public func cropDidComplete(transform: ImageEditorTransform) {
// TODO: Ignore no-change updates.
model.replace(transform: transform)
}
private func textLayer(forTouch touch: UITouch) -> EditorTextLayer? {
let point = touch.location(in: layersView)
return textLayer(forLocation: point)
}
private func textLayer(forGestureRecognizer gestureRecognizer: UIGestureRecognizer) -> EditorTextLayer? {
let point = gestureRecognizer.location(in: layersView)
return textLayer(forLocation: point)
}
private func textLayer(forLocation point: CGPoint) -> EditorTextLayer? {
guard let sublayers = layersView.layer.sublayers else {
return nil
}
for layer in sublayers {
guard let textLayer = layer as? EditorTextLayer else {
continue
}
if textLayer.hitTest(point) != nil {
return textLayer
}
}
return nil
public func cropDidCancel() {
// TODO:
}
}

View File

@ -0,0 +1,104 @@
//
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
import Foundation
public class OrderedDictionary<ValueType>: NSObject {
public typealias KeyType = String
var keyValueMap = [KeyType: ValueType]()
var orderedKeys = [KeyType]()
public override init() {
}
// Used to clone copies of instances of this class.
public init(keyValueMap: [KeyType: ValueType],
orderedKeys: [KeyType]) {
self.keyValueMap = keyValueMap
self.orderedKeys = orderedKeys
}
// Since the contents are immutable, we only modify copies
// made with this method.
public func clone() -> OrderedDictionary<ValueType> {
return OrderedDictionary(keyValueMap: keyValueMap, orderedKeys: orderedKeys)
}
public func value(forKey key: KeyType) -> ValueType? {
return keyValueMap[key]
}
public func append(key: KeyType, value: ValueType) {
if keyValueMap[key] != nil {
owsFailDebug("Unexpected duplicate key in key map: \(key)")
}
keyValueMap[key] = value
if orderedKeys.contains(key) {
owsFailDebug("Unexpected duplicate key in key list: \(key)")
} else {
orderedKeys.append(key)
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
}
public func replace(key: KeyType, value: ValueType) {
if keyValueMap[key] == nil {
owsFailDebug("Missing key in key map: \(key)")
}
keyValueMap[key] = value
if !orderedKeys.contains(key) {
owsFailDebug("Missing key in key list: \(key)")
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
}
public func remove(key: KeyType) {
if keyValueMap[key] == nil {
owsFailDebug("Missing key in key map: \(key)")
} else {
keyValueMap.removeValue(forKey: key)
}
if !orderedKeys.contains(key) {
owsFailDebug("Missing key in key list: \(key)")
} else {
orderedKeys = orderedKeys.filter { $0 != key }
}
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
}
public var count: Int {
if orderedKeys.count != keyValueMap.count {
owsFailDebug("Invalid contents.")
}
return orderedKeys.count
}
public func orderedValues() -> [ValueType] {
var values = [ValueType]()
for key in orderedKeys {
guard let value = self.keyValueMap[key] else {
owsFailDebug("Missing value")
continue
}
values.append(value)
}
return values
}
}

View File

@ -1,5 +1,5 @@
//
// Copyright (c) 2018 Open Whisper Systems. All rights reserved.
// Copyright (c) 2019 Open Whisper Systems. All rights reserved.
//
#import "UIColor+OWS.h"
@ -119,6 +119,7 @@ NS_ASSUME_NONNULL_BEGIN
[otherColor getRed:&r1 green:&g1 blue:&b1 alpha:&a1];
OWSAssertDebug(result);
alpha = CGFloatClamp01(alpha);
return [UIColor colorWithRed:CGFloatLerp(r0, r1, alpha)
green:CGFloatLerp(g0, g1, alpha)
blue:CGFloatLerp(b0, b1, alpha)

View File

@ -22,7 +22,7 @@ CGFloat ScaleFromIPhone5To7Plus(CGFloat iPhone5Value, CGFloat iPhone7PlusValue)
CGFloat screenShortDimension = ScreenShortDimension();
return (CGFloat)round(CGFloatLerp(iPhone5Value,
iPhone7PlusValue,
CGFloatInverseLerp(screenShortDimension, kIPhone5ScreenWidth, kIPhone7PlusScreenWidth)));
CGFloatClamp01(CGFloatInverseLerp(screenShortDimension, kIPhone5ScreenWidth, kIPhone7PlusScreenWidth))));
}
CGFloat ScaleFromIPhone5(CGFloat iPhone5Value)

View File

@ -86,4 +86,116 @@ extension UIView {
view.autoSetDimension(.height, toSize: height)
return view
}
@objc
public func applyScaleAspectFitLayout(subview: UIView, aspectRatio: CGFloat) -> [NSLayoutConstraint] {
guard subviews.contains(subview) else {
owsFailDebug("Not a subview.")
return []
}
// This emulates the behavior of contentMode = .scaleAspectFit using
// iOS auto layout constraints.
//
// This allows ConversationInputToolbar to place the "cancel" button
// in the upper-right hand corner of the preview content.
var constraints = [NSLayoutConstraint]()
constraints.append(contentsOf: subview.autoCenterInSuperview())
constraints.append(subview.autoPin(toAspectRatio: aspectRatio))
constraints.append(subview.autoMatch(.width, to: .width, of: self, withMultiplier: 1.0, relation: .lessThanOrEqual))
constraints.append(subview.autoMatch(.height, to: .height, of: self, withMultiplier: 1.0, relation: .lessThanOrEqual))
return constraints
}
}
public extension CGFloat {
// Linear interpolation
public func lerp(_ minValue: CGFloat, _ maxValue: CGFloat) -> CGFloat {
return CGFloatLerp(minValue, maxValue, self)
}
// Inverse linear interpolation
public func inverseLerp(_ minValue: CGFloat, _ maxValue: CGFloat, shouldClamp: Bool = false) -> CGFloat {
let value = CGFloatInverseLerp(self, minValue, maxValue)
return (shouldClamp ? CGFloatClamp01(value) : value)
}
public static let halfPi: CGFloat = CGFloat.pi * 0.5
}
public extension CGPoint {
public func toUnitCoordinates(viewBounds: CGRect, shouldClamp: Bool) -> CGPoint {
return CGPoint(x: (x - viewBounds.origin.x).inverseLerp(0, viewBounds.width, shouldClamp: shouldClamp),
y: (y - viewBounds.origin.y).inverseLerp(0, viewBounds.height, shouldClamp: shouldClamp))
}
public func toUnitCoordinates(viewSize: CGSize, shouldClamp: Bool) -> CGPoint {
return toUnitCoordinates(viewBounds: CGRect(origin: .zero, size: viewSize), shouldClamp: shouldClamp)
}
public func fromUnitCoordinates(viewSize: CGSize) -> CGPoint {
return CGPoint(x: x.lerp(0, viewSize.width),
y: y.lerp(0, viewSize.height))
}
public func inverse() -> CGPoint {
return CGPoint(x: -x, y: -y)
}
public func plus(_ value: CGPoint) -> CGPoint {
return CGPointAdd(self, value)
}
public func minus(_ value: CGPoint) -> CGPoint {
return CGPointSubtract(self, value)
}
public static let unit: CGPoint = CGPoint(x: 1.0, y: 1.0)
public static let unitMidpoint: CGPoint = CGPoint(x: 0.5, y: 0.5)
public func applyingInverse(_ transform: CGAffineTransform) -> CGPoint {
return applying(transform.inverted())
}
}
public extension CGRect {
public var center: CGPoint {
return CGPoint(x: midX, y: midY)
}
public var topLeft: CGPoint {
return origin
}
public var bottomRight: CGPoint {
return CGPoint(x: maxX, y: maxY)
}
}
public extension CGAffineTransform {
public static func translate(_ point: CGPoint) -> CGAffineTransform {
return CGAffineTransform(translationX: point.x, y: point.y)
}
public static func scale(_ scaling: CGFloat) -> CGAffineTransform {
return CGAffineTransform(scaleX: scaling, y: scaling)
}
public func translate(_ point: CGPoint) -> CGAffineTransform {
return translatedBy(x: point.x, y: point.y)
}
public func scale(_ scaling: CGFloat) -> CGAffineTransform {
return scaledBy(x: scaling, y: scaling)
}
public func rotate(_ angleRadians: CGFloat) -> CGAffineTransform {
return rotated(by: angleRadians)
}
// public func forAnchorPoint(viewSize: CGSize) -> CGAffineTransform {
// let viewCenter = CGRect(origin: .zero, size: viewSize).center
// return CGAffineTransform.translate(viewCenter.inverse()).concatenating(self).translate(viewCenter)
// }
}

View File

@ -910,8 +910,6 @@ NSString *const kNSNotification_OWSWebSocketStateDidChange = @"kNSNotification_O
{
OWSAssertIsOnMainThread();
return NO;
// Don't open socket in app extensions.
if (!CurrentAppContext().isMainApp) {
return NO;

View File

@ -16,8 +16,6 @@ CG_INLINE CGFloat CGFloatClamp01(CGFloat value)
CG_INLINE CGFloat CGFloatLerp(CGFloat left, CGFloat right, CGFloat alpha)
{
alpha = CGFloatClamp01(alpha);
return (left * (1.f - alpha)) + (right * alpha);
}