auto-migrate to Swift5

This commit is contained in:
Michael Kirk 2019-03-30 07:22:31 -06:00
parent 87094a8fb1
commit 64a0c4bfaa
68 changed files with 275 additions and 208 deletions

View File

@ -2875,7 +2875,7 @@
453518671FC635DD00210559 = {
CreatedOnToolsVersion = 9.2;
DevelopmentTeam = U68MSDN6DR;
LastSwiftMigration = 0910;
LastSwiftMigration = 1020;
ProvisioningStyle = Automatic;
SystemCapabilities = {
com.apple.ApplicationGroups.iOS = {
@ -2895,12 +2895,12 @@
453518911FC63DBF00210559 = {
CreatedOnToolsVersion = 9.2;
DevelopmentTeam = U68MSDN6DR;
LastSwiftMigration = 0930;
LastSwiftMigration = 1020;
ProvisioningStyle = Automatic;
};
D221A088169C9E5E00537ABF = {
DevelopmentTeam = U68MSDN6DR;
LastSwiftMigration = 0930;
LastSwiftMigration = 1020;
ProvisioningStyle = Automatic;
SystemCapabilities = {
com.apple.ApplicationGroups.iOS = {
@ -2931,7 +2931,7 @@
};
D221A0A9169C9E5F00537ABF = {
DevelopmentTeam = U68MSDN6DR;
LastSwiftMigration = 0930;
LastSwiftMigration = 1020;
ProvisioningStyle = Automatic;
TestTargetID = D221A088169C9E5E00537ABF;
};
@ -2942,6 +2942,7 @@
developmentRegion = English;
hasScannedForEncodings = 0;
knownRegions = (
English,
en,
az_AZ,
bg_BG,
@ -3914,7 +3915,7 @@
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OBJC_BRIDGING_HEADER = "SignalShareExtension/SignalShareExtension-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = 1;
};
name = Debug;
@ -3979,7 +3980,7 @@
SKIP_INSTALL = YES;
SWIFT_OBJC_BRIDGING_HEADER = "SignalShareExtension/SignalShareExtension-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = 1;
VALIDATE_PRODUCT = YES;
};
@ -4032,8 +4033,7 @@
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
SWIFT_OBJC_BRIDGING_HEADER = "";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VERSIONING_SYSTEM = "apple-generic";
VERSION_INFO_PREFIX = "";
@ -4106,8 +4106,7 @@
SKIP_INSTALL = YES;
SWIFT_OBJC_BRIDGING_HEADER = "";
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
VALIDATE_PRODUCT = YES;
VERSIONING_SYSTEM = "apple-generic";
@ -4328,8 +4327,7 @@
SDKROOT = iphoneos;
SWIFT_OBJC_BRIDGING_HEADER = "Signal/src/Signal-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TEST_AFTER_BUILD = YES;
VALID_ARCHS = "arm64 armv7 armv7s";
WRAPPER_EXTENSION = app;
@ -4392,8 +4390,7 @@
RUN_CLANG_STATIC_ANALYZER = YES;
SDKROOT = iphoneos;
SWIFT_OBJC_BRIDGING_HEADER = "Signal/src/Signal-Bridging-Header.h";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TEST_AFTER_BUILD = YES;
VALID_ARCHS = "arm64 armv7 armv7s";
WRAPPER_EXTENSION = app;
@ -4452,8 +4449,7 @@
PROVISIONING_PROFILE = "";
SWIFT_OBJC_BRIDGING_HEADER = "Signal/test/SignalTests-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUNDLE_LOADER)";
VALID_ARCHS = "arm64 armv7s armv7 i386 x86_64";
};
@ -4510,8 +4506,7 @@
PROVISIONING_PROFILE = "";
SWIFT_OBJC_BRIDGING_HEADER = "Signal/test/SignalTests-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
SWIFT_SWIFT3_OBJC_INFERENCE = Off;
SWIFT_VERSION = 4.0;
SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUNDLE_LOADER)";
VALID_ARCHS = "arm64 armv7s armv7 i386 x86_64";
};

View File

@ -83,7 +83,7 @@ public class AccountManager: NSObject {
private func registerForTextSecure(verificationCode: String,
pin: String?) -> Promise<Void> {
return Promise { resolver in
return Promise<Any> { resolver in
tsAccountManager.verifyAccount(withCode: verificationCode,
pin: pin,
success: resolver.fulfill,
@ -106,7 +106,7 @@ public class AccountManager: NSObject {
// MARK: Message Delivery
func updatePushTokens(pushToken: String, voipToken: String) -> Promise<Void> {
return Promise { resolver in
return Promise<Any> { resolver in
tsAccountManager.registerForPushNotifications(pushToken: pushToken,
voipToken: voipToken,
success: resolver.fulfill,

View File

@ -27,12 +27,12 @@ class CompareSafetyNumbersActivity: UIActivity {
// MARK: UIActivity
override class var activityCategory: UIActivityCategory {
override class var activityCategory: UIActivity.Category {
get { return .action }
}
override var activityType: UIActivityType? {
get { return UIActivityType(rawValue: CompareSafetyNumbersActivityType) }
override var activityType: UIActivity.ActivityType? {
get { return UIActivity.ActivityType(rawValue: CompareSafetyNumbersActivityType) }
}
override var activityTitle: String? {

View File

@ -27,7 +27,7 @@ import Foundation
}
@objc public func openSystemSettings() {
openURL(URL(string: UIApplicationOpenSettingsURLString)!)
openURL(URL(string: UIApplication.openSettingsURLString)!)
}
}

View File

@ -52,7 +52,7 @@ enum NotificationHapticFeedbackType {
}
extension NotificationHapticFeedbackType {
var uiNotificationFeedbackType: UINotificationFeedbackType {
var uiNotificationFeedbackType: UINotificationFeedbackGenerator.FeedbackType {
switch self {
case .error: return .error
case .success: return .success

View File

@ -281,8 +281,13 @@ extension OWSSound {
func notificationSound(isQuiet: Bool) -> UNNotificationSound {
guard let filename = OWSSounds.filename(for: self, quiet: isQuiet) else {
owsFailDebug("filename was unexpectedly nil")
return UNNotificationSound.default()
return UNNotificationSound.default
}
return UNNotificationSound(named: filename)
return UNNotificationSound(named: convertToUNNotificationSoundName(filename))
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertToUNNotificationSoundName(_ input: String) -> UNNotificationSoundName {
return UNNotificationSoundName(rawValue: input)
}

View File

@ -128,7 +128,7 @@ class AddContactShareToExistingContactViewController: ContactsPicker, ContactsPi
// We want to pop *this* view *and* the still presented CNContactViewController in a single animation.
// Note this happens for *cancel* and for *done*. Unfortunately, I don't know of a way to detect the difference
// between the two, since both just call this method.
guard let myIndex = navigationController.viewControllers.index(of: self) else {
guard let myIndex = navigationController.viewControllers.firstIndex(of: self) else {
owsFailDebug("myIndex was unexpectedly nil")
navigationController.popViewController(animated: true)
navigationController.popViewController(animated: true)

View File

@ -124,7 +124,7 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver,
}
// Don't use receiver when video is enabled. Only bluetooth or speaker
return portDescription.portType != AVAudioSessionPortBuiltInMic
return convertFromAVAudioSessionPort(portDescription.portType) != convertFromAVAudioSessionPort(AVAudioSession.Port.builtInMic)
}
}
return Set(appropriateForVideo)
@ -602,7 +602,7 @@ class CallViewController: OWSViewController, CallObserver, CallServiceObserver,
internal func updateLocalVideoLayout() {
if !localVideoView.isHidden {
localVideoView.superview?.bringSubview(toFront: localVideoView)
localVideoView.superview?.bringSubviewToFront(localVideoView)
}
updateCallUI(callState: call.state)
@ -1225,3 +1225,8 @@ extension CallViewController: CallVideoHintViewDelegate {
updateRemoteVideoLayout()
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionPort(_ input: AVAudioSession.Port) -> String {
return input.rawValue
}

View File

@ -285,7 +285,7 @@ class ColorPickerView: UIView, ColorViewDelegate {
let kRowLength = 4
let rows: [UIView] = colorViews.chunked(by: kRowLength).map { colorViewsInRow in
let row = UIStackView(arrangedSubviews: colorViewsInRow)
row.distribution = UIStackViewDistribution.equalSpacing
row.distribution = UIStackView.Distribution.equalSpacing
return row
}
let rowsStackView = UIStackView(arrangedSubviews: rows)

View File

@ -320,7 +320,7 @@ class ContactViewController: OWSViewController, ContactShareViewHelperDelegate {
// Show no action buttons for contacts without a phone number.
break
case .unknown:
let activityIndicator = UIActivityIndicatorView(activityIndicatorStyle: .whiteLarge)
let activityIndicator = UIActivityIndicatorView(style: .whiteLarge)
topView.addSubview(activityIndicator)
activityIndicator.autoPinEdge(.top, to: .bottom, of: lastView, withOffset: 10)
activityIndicator.autoHCenterInSuperview()

View File

@ -124,7 +124,7 @@ public class ContactsPicker: OWSViewController, UITableViewDelegate, UITableView
// Auto size cells for dynamic type
tableView.estimatedRowHeight = 60.0
tableView.rowHeight = UITableViewAutomaticDimension
tableView.rowHeight = UITableView.automaticDimension
tableView.estimatedRowHeight = 60
tableView.allowsMultipleSelection = allowsMultipleSelection
@ -136,7 +136,7 @@ public class ContactsPicker: OWSViewController, UITableViewDelegate, UITableView
reloadContacts()
updateSearchResults(searchText: "")
NotificationCenter.default.addObserver(self, selector: #selector(self.didChangePreferredContentSize), name: NSNotification.Name.UIContentSizeCategoryDidChange, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(self.didChangePreferredContentSize), name: UIContentSizeCategory.didChangeNotification, object: nil)
}
@objc
@ -172,7 +172,7 @@ public class ContactsPicker: OWSViewController, UITableViewDelegate, UITableView
let title = NSLocalizedString("INVITE_FLOW_REQUIRES_CONTACT_ACCESS_TITLE", comment: "Alert title when contacts disabled while trying to invite contacts to signal")
let body = NSLocalizedString("INVITE_FLOW_REQUIRES_CONTACT_ACCESS_BODY", comment: "Alert body when contacts disabled while trying to invite contacts to signal")
let alert = UIAlertController(title: title, message: body, preferredStyle: UIAlertControllerStyle.alert)
let alert = UIAlertController(title: title, message: body, preferredStyle: UIAlertController.Style.alert)
let dismissText = CommonStrings.cancelButton

View File

@ -204,8 +204,8 @@ public class ConversationMediaView: UIView {
animatedImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
animatedImageView.layer.minificationFilter = kCAFilterTrilinear
animatedImageView.layer.magnificationFilter = kCAFilterTrilinear
animatedImageView.layer.minificationFilter = CALayerContentsFilter.trilinear
animatedImageView.layer.magnificationFilter = CALayerContentsFilter.trilinear
animatedImageView.backgroundColor = Theme.offBackgroundColor
addSubview(animatedImageView)
animatedImageView.autoPinEdgesToSuperviewEdges()
@ -263,8 +263,8 @@ public class ConversationMediaView: UIView {
stillImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear
stillImageView.layer.magnificationFilter = kCAFilterTrilinear
stillImageView.layer.minificationFilter = CALayerContentsFilter.trilinear
stillImageView.layer.magnificationFilter = CALayerContentsFilter.trilinear
stillImageView.backgroundColor = Theme.offBackgroundColor
addSubview(stillImageView)
stillImageView.autoPinEdgesToSuperviewEdges()
@ -318,8 +318,8 @@ public class ConversationMediaView: UIView {
stillImageView.contentMode = .scaleAspectFill
// Use trilinear filters for better scaling quality at
// some performance cost.
stillImageView.layer.minificationFilter = kCAFilterTrilinear
stillImageView.layer.magnificationFilter = kCAFilterTrilinear
stillImageView.layer.minificationFilter = CALayerContentsFilter.trilinear
stillImageView.layer.magnificationFilter = CALayerContentsFilter.trilinear
stillImageView.backgroundColor = Theme.offBackgroundColor
addSubview(stillImageView)

View File

@ -157,7 +157,7 @@ public class MediaAlbumCellView: UIStackView {
// the "more" item, if any.
continue
}
guard let index = itemViews.index(of: itemView) else {
guard let index = itemViews.firstIndex(of: itemView) else {
owsFailDebug("Couldn't determine index of item view.")
continue
}
@ -188,14 +188,14 @@ public class MediaAlbumCellView: UIStackView {
}
private func newRow(rowViews: [ConversationMediaView],
axis: UILayoutConstraintAxis,
axis: NSLayoutConstraint.Axis,
viewSize: CGFloat) -> UIStackView {
autoSet(viewSize: viewSize, ofViews: rowViews)
return newRow(rowViews: rowViews, axis: axis)
}
private func newRow(rowViews: [ConversationMediaView],
axis: UILayoutConstraintAxis) -> UIStackView {
axis: NSLayoutConstraint.Axis) -> UIStackView {
let stackView = UIStackView(arrangedSubviews: rowViews)
stackView.axis = axis
stackView.spacing = MediaAlbumCellView.kSpacingPts

View File

@ -105,7 +105,7 @@ public class MediaDownloadView: UIView {
shapeLayer1.path = bezierPath1.cgPath
let fillColor1: UIColor = UIColor(white: 1.0, alpha: 0.4)
shapeLayer1.fillColor = fillColor1.cgColor
shapeLayer1.fillRule = kCAFillRuleEvenOdd
shapeLayer1.fillRule = CAShapeLayerFillRule.evenOdd
let bezierPath2 = UIBezierPath()
bezierPath2.addArc(withCenter: center, radius: outerRadius, startAngle: startAngle, endAngle: endAngle, clockwise: true)

View File

@ -110,7 +110,7 @@ public class MediaUploadView: UIView {
bezierPath2.append(UIBezierPath(ovalIn: outerCircleBounds))
shapeLayer2.path = bezierPath2.cgPath
shapeLayer2.fillColor = UIColor(white: 1.0, alpha: 0.4).cgColor
shapeLayer2.fillRule = kCAFillRuleEvenOdd
shapeLayer2.fillRule = CAShapeLayerFillRule.evenOdd
CATransaction.commit()
}

View File

@ -113,7 +113,7 @@ public class ConversationHeaderView: UIStackView {
public override var intrinsicContentSize: CGSize {
// Grow to fill as much of the navbar as possible.
return UILayoutFittingExpandedSize
return UIView.layoutFittingExpandedSize
}
@objc

View File

@ -184,7 +184,7 @@ import SignalMessaging
path.usesEvenOddFillRule = true
layer.path = path.cgPath
layer.fillRule = kCAFillRuleEvenOdd
layer.fillRule = CAShapeLayerFillRule.evenOdd
layer.fillColor = UIColor.black.cgColor
layer.opacity = 0.7
}

View File

@ -221,7 +221,7 @@ class GifPickerCell: UICollectionViewCell {
self.backgroundColor = nil
if self.isCellSelected {
let activityIndicator = UIActivityIndicatorView(activityIndicatorStyle: .gray)
let activityIndicator = UIActivityIndicatorView(style: .gray)
self.activityIndicator = activityIndicator
addSubview(activityIndicator)
activityIndicator.autoCenterInSuperview()

View File

@ -216,7 +216,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
searchErrorView.isUserInteractionEnabled = true
searchErrorView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(retryTapped)))
let activityIndicator = UIActivityIndicatorView(activityIndicatorStyle: .gray)
let activityIndicator = UIActivityIndicatorView(style: .gray)
self.activityIndicator = activityIndicator
self.view.addSubview(activityIndicator)
activityIndicator.autoHCenterInSuperview()
@ -349,7 +349,7 @@ class GifPickerViewController: OWSViewController, UISearchBarDelegate, UICollect
path.append(UIBezierPath(rect: cellRect))
layer.path = path.cgPath
layer.fillRule = kCAFillRuleEvenOdd
layer.fillRule = CAShapeLayerFillRule.evenOdd
layer.fillColor = UIColor.black.cgColor
layer.opacity = 0.7
}

View File

@ -64,7 +64,7 @@ class ConversationSearchViewController: UITableViewController, BlockListCacheDel
blockListCache = BlockListCache()
blockListCache.startObservingAndSyncState(delegate: self)
tableView.rowHeight = UITableViewAutomaticDimension
tableView.rowHeight = UITableView.automaticDimension
tableView.estimatedRowHeight = 60
tableView.separatorColor = Theme.cellSeparatorColor
@ -272,7 +272,7 @@ class ConversationSearchViewController: UITableViewController, BlockListCacheDel
if let messageSnippet = searchResult.snippet {
overrideSnippet = NSAttributedString(string: messageSnippet,
attributes: [
NSAttributedStringKey.foregroundColor: Theme.secondaryColor
NSAttributedString.Key.foregroundColor: Theme.secondaryColor
])
} else {
owsFailDebug("message search result is missing message snippet")
@ -296,7 +296,7 @@ class ConversationSearchViewController: UITableViewController, BlockListCacheDel
guard nil != self.tableView(tableView, titleForHeaderInSection: section) else {
return 0
}
return UITableViewAutomaticDimension
return UITableView.automaticDimension
}
override func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? {
@ -433,7 +433,7 @@ class EmptySearchResultCell: UITableViewCell {
static let reuseIdentifier = "EmptySearchResultCell"
let messageLabel: UILabel
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
self.messageLabel = UILabel()
super.init(style: style, reuseIdentifier: reuseIdentifier)

View File

@ -142,11 +142,11 @@ public class LongTextViewController: OWSViewController {
//
// UITextViews linkTextAttributes property has type [String : Any]! but should be [NSAttributedStringKey : Any]! in Swift 4.
let linkTextAttributes: [String: Any] = [
NSAttributedStringKey.foregroundColor.rawValue: Theme.primaryColor,
NSAttributedStringKey.underlineColor.rawValue: Theme.primaryColor,
NSAttributedStringKey.underlineStyle.rawValue: NSUnderlineStyle.styleSingle.rawValue
NSAttributedString.Key.foregroundColor.rawValue: Theme.primaryColor,
NSAttributedString.Key.underlineColor.rawValue: Theme.primaryColor,
NSAttributedString.Key.underlineStyle.rawValue: NSUnderlineStyle.single.rawValue
]
messageTextView.linkTextAttributes = linkTextAttributes
messageTextView.linkTextAttributes = convertToOptionalNSAttributedStringKeyDictionary(linkTextAttributes)
view.addSubview(messageTextView)
messageTextView.autoPinEdge(toSuperviewEdge: .top)
@ -173,3 +173,9 @@ public class LongTextViewController: OWSViewController {
AttachmentSharing.showShareUI(forText: fullText)
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertToOptionalNSAttributedStringKeyDictionary(_ input: [String: Any]?) -> [NSAttributedString.Key: Any]? {
guard let input = input else { return nil }
return Dictionary(uniqueKeysWithValues: input.map { key, value in (NSAttributedString.Key(rawValue: key), value)})
}

View File

@ -282,8 +282,8 @@ class MediaGalleryNavigationController: OWSNavigationController {
presentationView.isHidden = true
presentationView.clipsToBounds = true
presentationView.layer.allowsEdgeAntialiasing = true
presentationView.layer.minificationFilter = kCAFilterTrilinear
presentationView.layer.magnificationFilter = kCAFilterTrilinear
presentationView.layer.minificationFilter = CALayerContentsFilter.trilinear
presentationView.layer.magnificationFilter = CALayerContentsFilter.trilinear
presentationView.contentMode = .scaleAspectFit
guard let navigationBar = self.navigationBar as? OWSNavigationBar else {
@ -980,13 +980,13 @@ class MediaGallery: NSObject, MediaGalleryDataSource, MediaTileViewControllerDel
if let completionBlock = completion {
Bench(title: "calculating changes for collectionView") {
// FIXME can we avoid this index offset?
let dateIndices = newDates.map { sectionDates.index(of: $0)! + 1 }
let dateIndices = newDates.map { sectionDates.firstIndex(of: $0)! + 1 }
let addedSections: IndexSet = IndexSet(dateIndices)
let addedItems: [IndexPath] = newGalleryItems.map { galleryItem in
let sectionIdx = sectionDates.index(of: galleryItem.galleryDate)!
let sectionIdx = sectionDates.firstIndex(of: galleryItem.galleryDate)!
let section = sections[galleryItem.galleryDate]!
let itemIdx = section.index(of: galleryItem)!
let itemIdx = section.firstIndex(of: galleryItem)!
// FIXME can we avoid this index offset?
return IndexPath(item: itemIdx, section: sectionIdx + 1)
@ -1032,14 +1032,14 @@ class MediaGallery: NSObject, MediaGalleryDataSource, MediaTileViewControllerDel
let originalSectionDates = self.sectionDates
for item in items {
guard let itemIndex = galleryItems.index(of: item) else {
guard let itemIndex = galleryItems.firstIndex(of: item) else {
owsFailDebug("removing unknown item.")
return
}
self.galleryItems.remove(at: itemIndex)
guard let sectionIndex = sectionDates.index(where: { $0 == item.galleryDate }) else {
guard let sectionIndex = sectionDates.firstIndex(where: { $0 == item.galleryDate }) else {
owsFailDebug("item with unknown date.")
return
}
@ -1049,13 +1049,13 @@ class MediaGallery: NSObject, MediaGalleryDataSource, MediaTileViewControllerDel
return
}
guard let sectionRowIndex = sectionItems.index(of: item) else {
guard let sectionRowIndex = sectionItems.firstIndex(of: item) else {
owsFailDebug("item with unknown sectionRowIndex")
return
}
// We need to calculate the index of the deleted item with respect to it's original position.
guard let originalSectionIndex = originalSectionDates.index(where: { $0 == item.galleryDate }) else {
guard let originalSectionIndex = originalSectionDates.firstIndex(where: { $0 == item.galleryDate }) else {
owsFailDebug("item with unknown date.")
return
}
@ -1065,7 +1065,7 @@ class MediaGallery: NSObject, MediaGalleryDataSource, MediaTileViewControllerDel
return
}
guard let originalSectionRowIndex = originalSectionItems.index(of: item) else {
guard let originalSectionRowIndex = originalSectionItems.firstIndex(of: item) else {
owsFailDebug("item with unknown sectionRowIndex")
return
}
@ -1095,7 +1095,7 @@ class MediaGallery: NSObject, MediaGalleryDataSource, MediaTileViewControllerDel
self.ensureGalleryItemsLoaded(.after, item: currentItem, amount: kGallerySwipeLoadBatchSize)
guard let currentIndex = galleryItems.index(of: currentItem) else {
guard let currentIndex = galleryItems.firstIndex(of: currentItem) else {
owsFailDebug("currentIndex was unexpectedly nil")
return nil
}
@ -1119,7 +1119,7 @@ class MediaGallery: NSObject, MediaGalleryDataSource, MediaTileViewControllerDel
self.ensureGalleryItemsLoaded(.before, item: currentItem, amount: kGallerySwipeLoadBatchSize)
guard let currentIndex = galleryItems.index(of: currentItem) else {
guard let currentIndex = galleryItems.firstIndex(of: currentItem) else {
owsFailDebug("currentIndex was unexpectedly nil")
return nil
}

View File

@ -48,7 +48,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
return currentViewController.galleryItemBox.value
}
public func setCurrentItem(_ item: MediaGalleryItem, direction: UIPageViewControllerNavigationDirection, animated isAnimated: Bool) {
public func setCurrentItem(_ item: MediaGalleryItem, direction: UIPageViewController.NavigationDirection, animated isAnimated: Bool) {
guard let galleryPage = self.buildGalleryPage(galleryItem: item) else {
owsFailDebug("unexpectedly unable to build new gallery page")
return
@ -77,7 +77,7 @@ class MediaPageViewController: UIPageViewController, UIPageViewControllerDataSou
super.init(transitionStyle: .scroll,
navigationOrientation: .horizontal,
options: [UIPageViewControllerOptionInterPageSpacingKey: kSpacingBetweenItems])
options: convertToOptionalUIPageViewControllerOptionsKeyDictionary([convertFromUIPageViewControllerOptionsKey(UIPageViewController.OptionsKey.interPageSpacing): kSpacingBetweenItems]))
self.dataSource = self
self.delegate = self
@ -800,7 +800,7 @@ extension MediaPageViewController: GalleryRailViewDelegate {
return
}
let direction: UIPageViewControllerNavigationDirection
let direction: UIPageViewController.NavigationDirection
direction = currentItem.albumIndex < targetItem.albumIndex ? .forward : .reverse
self.setCurrentItem(targetItem, direction: direction, animated: true)
@ -829,3 +829,14 @@ extension MediaPageViewController: CaptionContainerViewDelegate {
captionContainerView.isHidden = true
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertToOptionalUIPageViewControllerOptionsKeyDictionary(_ input: [String: Any]?) -> [UIPageViewController.OptionsKey: Any]? {
guard let input = input else { return nil }
return Dictionary(uniqueKeysWithValues: input.map { key, value in (UIPageViewController.OptionsKey(rawValue: key), value)})
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromUIPageViewControllerOptionsKey(_ input: UIPageViewController.OptionsKey) -> String {
return input.rawValue
}

View File

@ -95,8 +95,8 @@ public class MediaTileViewController: UICollectionViewController, MediaGalleryDa
collectionView.backgroundColor = Theme.darkThemeBackgroundColor
collectionView.register(PhotoGridViewCell.self, forCellWithReuseIdentifier: PhotoGridViewCell.reuseIdentifier)
collectionView.register(MediaGallerySectionHeader.self, forSupplementaryViewOfKind: UICollectionElementKindSectionHeader, withReuseIdentifier: MediaGallerySectionHeader.reuseIdentifier)
collectionView.register(MediaGalleryStaticHeader.self, forSupplementaryViewOfKind: UICollectionElementKindSectionHeader, withReuseIdentifier: MediaGalleryStaticHeader.reuseIdentifier)
collectionView.register(MediaGallerySectionHeader.self, forSupplementaryViewOfKind: UICollectionView.elementKindSectionHeader, withReuseIdentifier: MediaGallerySectionHeader.reuseIdentifier)
collectionView.register(MediaGalleryStaticHeader.self, forSupplementaryViewOfKind: UICollectionView.elementKindSectionHeader, withReuseIdentifier: MediaGalleryStaticHeader.reuseIdentifier)
collectionView.delegate = self
@ -113,10 +113,10 @@ public class MediaTileViewController: UICollectionViewController, MediaGalleryDa
}
private func indexPath(galleryItem: MediaGalleryItem) -> IndexPath? {
guard let sectionIdx = galleryDates.index(of: galleryItem.galleryDate) else {
guard let sectionIdx = galleryDates.firstIndex(of: galleryItem.galleryDate) else {
return nil
}
guard let rowIdx = galleryItems[galleryItem.galleryDate]!.index(of: galleryItem) else {
guard let rowIdx = galleryItems[galleryItem.galleryDate]!.firstIndex(of: galleryItem) else {
return nil
}
@ -311,7 +311,7 @@ public class MediaTileViewController: UICollectionViewController, MediaGalleryDa
return sectionHeader
}
if (kind == UICollectionElementKindSectionHeader) {
if (kind == UICollectionView.elementKindSectionHeader) {
switch indexPath.section {
case kLoadOlderSectionIdx:
guard let sectionHeader = collectionView.dequeueReusableSupplementaryView(ofKind: kind, withReuseIdentifier: MediaGalleryStaticHeader.reuseIdentifier, for: indexPath) as? MediaGalleryStaticHeader else {

View File

@ -44,7 +44,7 @@ public class OWS2FAReminderViewController: UIViewController, PinEntryViewDelegat
let instructionsTextHeader = NSLocalizedString("REMINDER_2FA_BODY_HEADER", comment: "Body header for when user is periodically prompted to enter their registration lock PIN")
let instructionsTextBody = NSLocalizedString("REMINDER_2FA_BODY", comment: "Body text for when user is periodically prompted to enter their registration lock PIN")
let attributes = [NSAttributedStringKey.font: pinEntryView.boldLabelFont]
let attributes = [NSAttributedString.Key.font: pinEntryView.boldLabelFont]
let attributedInstructionsText = NSAttributedString(string: instructionsTextHeader, attributes: attributes).rtlSafeAppend(" ").rtlSafeAppend(instructionsTextBody)

View File

@ -72,7 +72,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
// do nothing
} else {
// must assign titleView frame manually on older iOS
titleView.frame = CGRect(origin: .zero, size: titleView.systemLayoutSizeFitting(UILayoutFittingCompressedSize))
titleView.frame = CGRect(origin: .zero, size: titleView.systemLayoutSizeFitting(UIView.layoutFittingCompressedSize))
}
navigationItem.titleView = titleView
@ -393,7 +393,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
assert(self.collectionPickerController == nil)
self.collectionPickerController = collectionPickerController
addChildViewController(collectionPickerController)
addChild(collectionPickerController)
view.addSubview(collectionPickerView)
collectionPickerView.autoPinEdgesToSuperviewEdges(with: .zero, excludingEdge: .top)
@ -422,7 +422,7 @@ class ImagePickerGridController: UICollectionViewController, PhotoLibraryDelegat
self.titleView.rotateIcon(.down)
}.done { _ in
collectionPickerController.view.removeFromSuperview()
collectionPickerController.removeFromParentViewController()
collectionPickerController.removeFromParent()
}.retainUntilComplete()
}

View File

@ -649,7 +649,7 @@ extension UIDeviceOrientation: CustomStringConvertible {
}
}
extension UIImageOrientation: CustomStringConvertible {
extension UIImage.Orientation: CustomStringConvertible {
public var description: String {
switch self {
case .up:

View File

@ -245,7 +245,7 @@ class PhotoCaptureViewController: OWSViewController {
NotificationCenter.default.addObserver(self,
selector: #selector(didChangeDeviceOrientation),
name: .UIDeviceOrientationDidChange,
name: UIDevice.orientationDidChangeNotification,
object: UIDevice.current)
}

View File

@ -53,7 +53,7 @@ class PhotoCollectionPickerController: OWSTableViewController, PhotoLibraryDeleg
let sectionItems = photoCollections.map { collection in
return OWSTableItem(customCellBlock: { self.buildTableCell(collection: collection) },
customRowHeight: UITableViewAutomaticDimension,
customRowHeight: UITableView.automaticDimension,
actionBlock: { [weak self] in
guard let strongSelf = self else { return }
strongSelf.didSelectCollection(collection: collection)

View File

@ -111,7 +111,7 @@ class SendMediaNavigationController: OWSNavigationController {
func fadeTo(viewControllers: [UIViewController]) {
let transition: CATransition = CATransition()
transition.duration = 0.1
transition.type = kCATransitionFade
transition.type = CATransitionType.fade
view.layer.add(transition, forKey: nil)
setViewControllers(viewControllers, animated: false)
}

View File

@ -17,8 +17,8 @@ public class OnboardingSplashViewController: OnboardingBaseViewController {
let heroImage = UIImage(named: "onboarding_splash_hero")
let heroImageView = UIImageView(image: heroImage)
heroImageView.contentMode = .scaleAspectFit
heroImageView.layer.minificationFilter = kCAFilterTrilinear
heroImageView.layer.magnificationFilter = kCAFilterTrilinear
heroImageView.layer.minificationFilter = CALayerContentsFilter.trilinear
heroImageView.layer.magnificationFilter = CALayerContentsFilter.trilinear
heroImageView.setCompressionResistanceLow()
heroImageView.setContentHuggingVerticalLow()
heroImageView.accessibilityIdentifier = "onboarding.splash." + "heroImageView"

View File

@ -29,7 +29,7 @@ struct AudioSource: Hashable {
init(portDescription: AVAudioSessionPortDescription) {
let isBuiltInEarPiece = portDescription.portType == AVAudioSessionPortBuiltInMic
let isBuiltInEarPiece = convertFromAVAudioSessionPort(portDescription.portType) == convertFromAVAudioSessionPort(AVAudioSession.Port.builtInMic)
// portDescription.portName works well for BT linked devices, but if we are using
// the built in mic, we have "iPhone Microphone" which is a little awkward.
@ -129,7 +129,7 @@ protocol CallAudioServiceDelegate: class {
// Configure audio session so we don't prompt user with Record permission until call is connected.
audioSession.configureRTCAudio()
NotificationCenter.default.addObserver(forName: .AVAudioSessionRouteChange, object: avAudioSession, queue: nil) { _ in
NotificationCenter.default.addObserver(forName: AVAudioSession.routeChangeNotification, object: avAudioSession, queue: nil) { _ in
assert(!Thread.isMainThread)
self.updateIsSpeakerphoneEnabled()
}
@ -201,7 +201,7 @@ protocol CallAudioServiceDelegate: class {
private func updateIsSpeakerphoneEnabled() {
let value = avAudioSession.currentRoute.outputs.contains { (portDescription: AVAudioSessionPortDescription) -> Bool in
return portDescription.portName == AVAudioSessionPortBuiltInSpeaker
return portDescription.portName == convertFromAVAudioSessionPort(AVAudioSession.Port.builtInSpeaker)
}
DispatchQueue.main.async {
self.isSpeakerphoneEnabled = value
@ -213,8 +213,8 @@ protocol CallAudioServiceDelegate: class {
guard let call = call, !call.isTerminated else {
// Revert to default audio
setAudioSession(category: AVAudioSessionCategorySoloAmbient,
mode: AVAudioSessionModeDefault)
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.soloAmbient),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.default))
return
}
@ -224,12 +224,12 @@ protocol CallAudioServiceDelegate: class {
// to setPreferredInput to call.audioSource.portDescription in this case,
// but in practice I'm seeing the call revert to the bluetooth headset.
// Presumably something else (in WebRTC?) is touching our shared AudioSession. - mjk
let options: AVAudioSessionCategoryOptions = call.audioSource?.isBuiltInEarPiece == true ? [] : [.allowBluetooth]
let options: AVAudioSession.CategoryOptions = call.audioSource?.isBuiltInEarPiece == true ? [] : [.allowBluetooth]
if call.state == .localRinging {
// SoloAmbient plays through speaker, but respects silent switch
setAudioSession(category: AVAudioSessionCategorySoloAmbient,
mode: AVAudioSessionModeDefault)
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.soloAmbient),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.default))
} else if call.hasLocalVideo {
// Because ModeVideoChat affects gain, we don't want to apply it until the call is connected.
// otherwise sounds like ringing will be extra loud for video vs. speakerphone
@ -238,16 +238,16 @@ protocol CallAudioServiceDelegate: class {
// side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// option, and inspect AVAudioSession.sharedInstance.categoryOptions == 0. And availableInputs
// does not include my linked bluetooth device
setAudioSession(category: AVAudioSessionCategoryPlayAndRecord,
mode: AVAudioSessionModeVideoChat,
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.playAndRecord),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.videoChat),
options: options)
} else {
// Apple Docs say that setting mode to AVAudioSessionModeVoiceChat has the
// side effect of setting options: .allowBluetooth, when I remove the (seemingly unnecessary)
// option, and inspect AVAudioSession.sharedInstance.categoryOptions == 0. And availableInputs
// does not include my linked bluetooth device
setAudioSession(category: AVAudioSessionCategoryPlayAndRecord,
mode: AVAudioSessionModeVoiceChat,
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.playAndRecord),
mode: convertFromAVAudioSessionMode(AVAudioSession.Mode.voiceChat),
options: options)
}
@ -384,7 +384,7 @@ protocol CallAudioServiceDelegate: class {
// Stop solo audio, revert to default.
isSpeakerphoneEnabled = false
setAudioSession(category: AVAudioSessionCategorySoloAmbient)
setAudioSession(category: convertFromAVAudioSessionCategory(AVAudioSession.Category.soloAmbient))
}
// MARK: Playing Sounds
@ -490,15 +490,15 @@ protocol CallAudioServiceDelegate: class {
private func setAudioSession(category: String,
mode: String? = nil,
options: AVAudioSessionCategoryOptions = AVAudioSessionCategoryOptions(rawValue: 0)) {
options: AVAudioSession.CategoryOptions = AVAudioSession.CategoryOptions(rawValue: 0)) {
AssertIsOnMainThread()
var audioSessionChanged = false
do {
if #available(iOS 10.0, *), let mode = mode {
let oldCategory = avAudioSession.category
let oldMode = avAudioSession.mode
let oldCategory = convertFromAVAudioSessionCategory(avAudioSession.category)
let oldMode = convertFromAVAudioSessionMode(avAudioSession.mode)
let oldOptions = avAudioSession.categoryOptions
guard oldCategory != category || oldMode != mode || oldOptions != options else {
@ -516,13 +516,13 @@ protocol CallAudioServiceDelegate: class {
if oldOptions != options {
Logger.debug("audio session changed options: \(oldOptions) -> \(options) ")
}
try avAudioSession.setCategory(category, mode: mode, options: options)
try avAudioSession.setCategory(convertToAVAudioSessionCategory(category), mode: AVAudioSession.Mode(rawValue: mode), options: options)
} else {
let oldCategory = avAudioSession.category
let oldCategory = convertFromAVAudioSessionCategory(avAudioSession.category)
let oldOptions = avAudioSession.categoryOptions
guard avAudioSession.category != category || avAudioSession.categoryOptions != options else {
guard convertFromAVAudioSessionCategory(avAudioSession.category) != category || avAudioSession.categoryOptions != options else {
return
}
@ -548,3 +548,23 @@ protocol CallAudioServiceDelegate: class {
}
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionPort(_ input: AVAudioSession.Port) -> String {
return input.rawValue
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionCategory(_ input: AVAudioSession.Category) -> String {
return input.rawValue
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionMode(_ input: AVAudioSession.Mode) -> String {
return input.rawValue
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertToAVAudioSessionCategory(_ input: String) -> AVAudioSession.Category {
return AVAudioSession.Category(rawValue: input)
}

View File

@ -1831,7 +1831,7 @@ private class SignalCallData: NSObject {
func removeObserver(_ observer: CallServiceObserver) {
AssertIsOnMainThread()
while let index = observers.index(where: { $0.value === observer }) {
while let index = observers.firstIndex(where: { $0.value === observer }) {
observers.remove(at: index)
}
}

View File

@ -191,7 +191,7 @@ protocol CallObserver: class {
func removeObserver(_ observer: CallObserver) {
AssertIsOnMainThread()
while let index = observers.index(where: { $0.value === observer }) {
while let index = observers.firstIndex(where: { $0.value === observer }) {
observers.remove(at: index)
}
}

View File

@ -102,7 +102,7 @@ final class CallKitCallManager: NSObject {
private(set) var calls = [SignalCall]()
func callWithLocalId(_ localId: UUID) -> SignalCall? {
guard let index = calls.index(where: { $0.localId == localId }) else {
guard let index = calls.firstIndex(where: { $0.localId == localId }) else {
return nil
}
return calls[index]
@ -124,7 +124,7 @@ final class CallKitCallManager: NSObject {
fileprivate extension Array {
mutating func removeFirst(where predicate: (Element) throws -> Bool) rethrows {
guard let index = try index(where: predicate) else {
guard let index = try firstIndex(where: predicate) else {
return
}

View File

@ -60,7 +60,7 @@ final class CallKitCallUIAdaptee: NSObject, CallUIAdaptee, CXProviderDelegate {
providerConfiguration.supportedHandleTypes = [.phoneNumber, .generic]
let iconMaskImage = #imageLiteral(resourceName: "logoSignal")
providerConfiguration.iconTemplateImageData = UIImagePNGRepresentation(iconMaskImage)
providerConfiguration.iconTemplateImageData = iconMaskImage.pngData()
// We don't set the ringtoneSound property, so that we use either the
// default iOS ringtone OR the custom ringtone associated with this user's

View File

@ -120,7 +120,7 @@ import PromiseKit
public class func record(forFileUrl fileUrl: URL,
recordName: String) -> CKRecord {
let recordType = signalBackupRecordType
let recordID = CKRecordID(recordName: recordName)
let recordID = CKRecord.ID(recordName: recordName)
let record = CKRecord(recordType: recordType, recordID: recordID)
let asset = CKAsset(fileURL: fileUrl)
record[payloadKey] = asset
@ -234,7 +234,7 @@ import PromiseKit
success: @escaping () -> Void,
failure: @escaping (Error) -> Void) {
let recordIDs = recordNames.map { CKRecordID(recordName: $0) }
let recordIDs = recordNames.map { CKRecord.ID(recordName: $0) }
let deleteOperation = CKModifyRecordsOperation(recordsToSave: nil, recordIDsToDelete: recordIDs)
deleteOperation.modifyRecordsCompletionBlock = { (records, recordIds, error) in
@ -277,7 +277,7 @@ import PromiseKit
let (promise, resolver) = Promise<CKRecord?>.pending()
let recordId = CKRecordID(recordName: recordName)
let recordId = CKRecord.ID(recordName: recordName)
let fetchOperation = CKFetchRecordsOperation(recordIDs: [recordId ])
// Don't download the file; we're just using the fetch to check whether or
// not this record already exists.
@ -386,7 +386,7 @@ import PromiseKit
private class func fetchAllRecordNamesStep(recipientId: String?,
query: CKQuery,
previousRecordNames: [String],
cursor: CKQueryCursor?,
cursor: CKQueryOperation.Cursor?,
remainingRetries: Int,
success: @escaping ([String]) -> Void,
failure: @escaping (Error) -> Void) {
@ -489,7 +489,7 @@ import PromiseKit
remainingRetries: maxRetries)
.then { (asset) -> Promise<Data> in
do {
let data = try Data(contentsOf: asset.fileURL)
let data = try Data(contentsOf: asset.fileURL!)
return Promise.value(data)
} catch {
Logger.error("couldn't load asset file: \(error).")
@ -512,7 +512,7 @@ import PromiseKit
remainingRetries: maxRetries)
.then { (asset) -> Promise<Void> in
do {
try FileManager.default.copyItem(at: asset.fileURL, to: toFileUrl)
try FileManager.default.copyItem(at: asset.fileURL!, to: toFileUrl)
return Promise.value(())
} catch {
Logger.error("couldn't copy asset file: \(error).")
@ -533,7 +533,7 @@ import PromiseKit
let (promise, resolver) = Promise<CKAsset>.pending()
let recordId = CKRecordID(recordName: recordName)
let recordId = CKRecord.ID(recordName: recordName)
let fetchOperation = CKFetchRecordsOperation(recordIDs: [recordId ])
// Download all keys for this record.
fetchOperation.perRecordCompletionBlock = { (record, recordId, error) in

View File

@ -26,7 +26,7 @@ public class AvatarTableViewCell: UITableViewCell {
}
@objc
public override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
public override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
self.avatarView = AvatarImageView()
avatarView.autoSetDimensions(to: CGSize(width: CGFloat(kStandardAvatarSize), height: CGFloat(kStandardAvatarSize)))

View File

@ -165,7 +165,7 @@ private class CaptionView: UIView {
override var intrinsicContentSize: CGSize {
var size = super.intrinsicContentSize
if size.height == UIViewNoIntrinsicMetric {
if size.height == UIView.noIntrinsicMetric {
size.height = layoutManager.usedRect(for: textContainer).height + textContainerInset.top + textContainerInset.bottom
}
size.height = min(kMaxHeight, size.height)

View File

@ -21,7 +21,7 @@ class ContactCell: UITableViewCell {
var contact: Contact?
var showsWhenSelected: Bool = false
override init(style: UITableViewCellStyle, reuseIdentifier: String?) {
override init(style: UITableViewCell.CellStyle, reuseIdentifier: String?) {
self.contactImageView = AvatarImageView()
self.textStackView = UIStackView()
self.titleLabel = UILabel()
@ -31,7 +31,7 @@ class ContactCell: UITableViewCell {
super.init(style: style, reuseIdentifier: reuseIdentifier)
selectionStyle = UITableViewCellSelectionStyle.none
selectionStyle = UITableViewCell.SelectionStyle.none
textStackView.axis = .vertical
textStackView.addArrangedSubview(titleLabel)
@ -46,7 +46,7 @@ class ContactCell: UITableViewCell {
self.contentView.addSubview(contentColumns)
contentColumns.autoPinEdgesToSuperviewMargins()
NotificationCenter.default.addObserver(self, selector: #selector(self.didChangePreferredContentSize), name: NSNotification.Name.UIContentSizeCategoryDidChange, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(self.didChangePreferredContentSize), name: UIContentSizeCategory.didChangeNotification, object: nil)
}
required init?(coder aDecoder: NSCoder) {
@ -136,13 +136,13 @@ fileprivate extension CNContact {
let boldDescriptor = font.fontDescriptor.withSymbolicTraits(.traitBold)
let boldAttributes = [
NSAttributedStringKey.font: UIFont(descriptor: boldDescriptor!, size: 0)
NSAttributedString.Key.font: UIFont(descriptor: boldDescriptor!, size: 0)
]
if let attributedName = CNContactFormatter.attributedString(from: self, style: .fullName, defaultAttributes: nil) {
let highlightedName = attributedName.mutableCopy() as! NSMutableAttributedString
highlightedName.enumerateAttributes(in: NSRange(location: 0, length: highlightedName.length), options: [], using: { (attrs, range, _) in
if let property = attrs[NSAttributedStringKey(rawValue: CNContactPropertyAttribute)] as? String, property == keyToHighlight {
if let property = attrs[NSAttributedString.Key(rawValue: CNContactPropertyAttribute)] as? String, property == keyToHighlight {
highlightedName.addAttributes(boldAttributes, range: range)
}
})

View File

@ -715,7 +715,7 @@ public class LinkPreviewView: UIStackView {
let activityIndicatorStyle: UIActivityIndicatorView.Style = (Theme.isDarkThemeEnabled
? .white
: .gray)
let activityIndicator = UIActivityIndicatorView(activityIndicatorStyle: activityIndicatorStyle)
let activityIndicator = UIActivityIndicatorView(style: activityIndicatorStyle)
activityIndicator.startAnimating()
addArrangedSubview(activityIndicator)
let activityIndicatorSize: CGFloat = 25

View File

@ -97,7 +97,7 @@ open class MarqueeLabel: UILabel, CAAnimationDelegate {
Defaults to `UIViewAnimationOptionCurveEaseInOut`.
*/
open var animationCurve: UIViewAnimationCurve = .linear
open var animationCurve: UIView.AnimationCurve = .linear
/**
A boolean property that sets whether the `MarqueeLabel` should behave like a normal `UILabel`.
@ -1074,7 +1074,7 @@ open class MarqueeLabel: UILabel, CAAnimationDelegate {
let colorAnimation = GradientSetupAnimation(keyPath: "colors")
colorAnimation.fromValue = gradientMask.colors
colorAnimation.toValue = adjustedColors
colorAnimation.fillMode = kCAFillModeForwards
colorAnimation.fillMode = CAMediaTimingFillMode.forwards
colorAnimation.isRemovedOnCompletion = false
colorAnimation.delegate = self
gradientMask.add(colorAnimation, forKey: "setupFade")
@ -1088,21 +1088,21 @@ open class MarqueeLabel: UILabel, CAAnimationDelegate {
self.layer.mask = nil
}
private func timingFunctionForAnimationCurve(_ curve: UIViewAnimationCurve) -> CAMediaTimingFunction {
private func timingFunctionForAnimationCurve(_ curve: UIView.AnimationCurve) -> CAMediaTimingFunction {
let timingFunction: String?
switch curve {
case .easeIn:
timingFunction = kCAMediaTimingFunctionEaseIn
timingFunction = convertFromCAMediaTimingFunctionName(CAMediaTimingFunctionName.easeIn)
case .easeInOut:
timingFunction = kCAMediaTimingFunctionEaseInEaseOut
timingFunction = convertFromCAMediaTimingFunctionName(CAMediaTimingFunctionName.easeInEaseOut)
case .easeOut:
timingFunction = kCAMediaTimingFunctionEaseOut
timingFunction = convertFromCAMediaTimingFunctionName(CAMediaTimingFunctionName.easeOut)
default:
timingFunction = kCAMediaTimingFunctionLinear
timingFunction = convertFromCAMediaTimingFunctionName(CAMediaTimingFunctionName.linear)
}
return CAMediaTimingFunction(name: timingFunction!)
return CAMediaTimingFunction(name: convertToCAMediaTimingFunctionName(timingFunction!))
}
private func transactionDurationType(_ labelType: MarqueeType, interval: CGFloat, delay: CGFloat) -> TimeInterval {
@ -1554,7 +1554,7 @@ open class MarqueeLabel: UILabel, CAAnimationDelegate {
sublabel.tintColorDidChange()
}
override open var contentMode: UIViewContentMode {
override open var contentMode: UIView.ContentMode {
get {
return sublabel.contentMode
}
@ -1588,7 +1588,7 @@ open class MarqueeLabel: UILabel, CAAnimationDelegate {
//
public protocol MarqueeStep {
var timeStep: CGFloat { get }
var timingFunction: UIViewAnimationCurve { get }
var timingFunction: UIView.AnimationCurve { get }
var edgeFades: EdgeFade { get }
}
@ -1626,7 +1626,7 @@ public struct ScrollStep: MarqueeStep {
- Note: The animation curve value for the first `ScrollStep` in a sequence has no effect.
*/
public let timingFunction: UIViewAnimationCurve
public let timingFunction: UIView.AnimationCurve
/**
The position of the label for this scroll step.
@ -1642,7 +1642,7 @@ public struct ScrollStep: MarqueeStep {
*/
public let edgeFades: EdgeFade
public init(timeStep: CGFloat, timingFunction: UIViewAnimationCurve = .linear, position: Position, edgeFades: EdgeFade) {
public init(timeStep: CGFloat, timingFunction: UIView.AnimationCurve = .linear, position: Position, edgeFades: EdgeFade) {
self.timeStep = timeStep
self.position = position
self.edgeFades = edgeFades
@ -1675,7 +1675,7 @@ public struct FadeStep: MarqueeStep {
/**
The animation curve to utilize between the previous fade state in a sequence and this step.
*/
public let timingFunction: UIViewAnimationCurve
public let timingFunction: UIView.AnimationCurve
/**
The option set defining the edge fade state for this fade step.
@ -1687,7 +1687,7 @@ public struct FadeStep: MarqueeStep {
*/
public let edgeFades: EdgeFade
public init(timeStep: CGFloat, timingFunction: UIViewAnimationCurve = .linear, edgeFades: EdgeFade) {
public init(timeStep: CGFloat, timingFunction: UIView.AnimationCurve = .linear, edgeFades: EdgeFade) {
self.timeStep = timeStep
self.timingFunction = timingFunction
self.edgeFades = edgeFades
@ -1840,3 +1840,13 @@ fileprivate extension CAMediaTimingFunction {
return pointArray
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromCAMediaTimingFunctionName(_ input: CAMediaTimingFunctionName) -> String {
return input.rawValue
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertToCAMediaTimingFunctionName(_ input: String) -> CAMediaTimingFunctionName {
return CAMediaTimingFunctionName(rawValue: input)
}

View File

@ -40,7 +40,7 @@ class QuotedReplyPreview: UIView, OWSQuotedMessageViewDelegate {
updateContents()
NotificationCenter.default.addObserver(self, selector: #selector(contentSizeCategoryDidChange), name: .UIContentSizeCategoryDidChange, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(contentSizeCategoryDidChange), name: UIContentSizeCategory.didChangeNotification, object: nil)
}
private let draftMarginTop: CGFloat = 6

View File

@ -36,12 +36,12 @@ final class ContactsPickerTest: SignalBaseTest {
let collatedContacts = contactsPicker.collatedContacts([emailOnlyContactB, emailOnlyContactD])
let sectionTitles = contactsPicker.collationForTests.sectionTitles
if let bIndex = sectionTitles.index(of: "B") {
if let bIndex = sectionTitles.firstIndex(of: "B") {
let bSectionContacts = collatedContacts[bIndex]
XCTAssertEqual(bSectionContacts.first, emailOnlyContactB)
}
if let dIndex = sectionTitles.index(of: "D") {
if let dIndex = sectionTitles.firstIndex(of: "D") {
let dSectionContacts = collatedContacts[dIndex]
XCTAssertEqual(dSectionContacts.first, emailOnlyContactD)
}
@ -58,7 +58,7 @@ final class ContactsPickerTest: SignalBaseTest {
let collatedContacts = contactsPicker.collatedContacts([nameAndEmailContact])
let sectionTitles = contactsPicker.collationForTests.sectionTitles
if let aIndex = sectionTitles.index(of: "A") {
if let aIndex = sectionTitles.firstIndex(of: "A") {
let aSectionContacts = collatedContacts[aIndex]
XCTAssertEqual(aSectionContacts.first, nameAndEmailContact)
}

View File

@ -49,7 +49,7 @@ class ImageEditorTest: SignalBaseTest {
private func writeDummyImage() -> String {
let image = UIImage.init(color: .red, size: CGSize(width: 1, height: 1))
guard let data = UIImagePNGRepresentation(image) else {
guard let data = image.pngData() else {
owsFail("Couldn't export dummy image.")
}
let filePath = OWSFileSystem.temporaryFilePath(withFileExtension: "png")

View File

@ -67,7 +67,7 @@ public class AttachmentApprovalViewController: UIPageViewController, UIPageViewC
self.attachmentItemCollection = AttachmentItemCollection(attachmentItems: attachmentItems)
super.init(transitionStyle: .scroll,
navigationOrientation: .horizontal,
options: [UIPageViewControllerOptionInterPageSpacingKey: kSpacingBetweenItems])
options: convertToOptionalUIPageViewControllerOptionsKeyDictionary([convertFromUIPageViewControllerOptionsKey(UIPageViewController.OptionsKey.interPageSpacing): kSpacingBetweenItems]))
self.dataSource = self
self.delegate = self
@ -516,7 +516,7 @@ public class AttachmentApprovalViewController: UIPageViewController, UIPageViewC
return viewController
}
private func setCurrentItem(_ item: SignalAttachmentItem, direction: UIPageViewControllerNavigationDirection, animated isAnimated: Bool) {
private func setCurrentItem(_ item: SignalAttachmentItem, direction: UIPageViewController.NavigationDirection, animated isAnimated: Bool) {
guard let page = self.buildPage(item: item) else {
owsFailDebug("unexpectedly unable to build new page")
return
@ -586,10 +586,10 @@ public class AttachmentApprovalViewController: UIPageViewController, UIPageViewC
let isLossy: Bool = attachmentItem.attachment.mimeType.caseInsensitiveCompare(OWSMimeTypeImageJpeg) == .orderedSame
if isLossy {
dataUTI = kUTTypeJPEG as String
return UIImageJPEGRepresentation(dstImage, 0.9)
return dstImage.jpegData(compressionQuality: 0.9)
} else {
dataUTI = kUTTypePNG as String
return UIImagePNGRepresentation(dstImage)
return dstImage.pngData()
}
}() else {
owsFailDebug("Could not export for output.")
@ -620,7 +620,7 @@ public class AttachmentApprovalViewController: UIPageViewController, UIPageViewC
}
func attachmentItem(before currentItem: SignalAttachmentItem) -> SignalAttachmentItem? {
guard let currentIndex = attachmentItems.index(of: currentItem) else {
guard let currentIndex = attachmentItems.firstIndex(of: currentItem) else {
owsFailDebug("currentIndex was unexpectedly nil")
return nil
}
@ -635,7 +635,7 @@ public class AttachmentApprovalViewController: UIPageViewController, UIPageViewC
}
func attachmentItem(after currentItem: SignalAttachmentItem) -> SignalAttachmentItem? {
guard let currentIndex = attachmentItems.index(of: currentItem) else {
guard let currentIndex = attachmentItems.firstIndex(of: currentItem) else {
owsFailDebug("currentIndex was unexpectedly nil")
return nil
}
@ -747,17 +747,17 @@ extension AttachmentApprovalViewController: GalleryRailViewDelegate {
return
}
guard let currentIndex = attachmentItems.index(of: currentItem) else {
guard let currentIndex = attachmentItems.firstIndex(of: currentItem) else {
owsFailDebug("currentIndex was unexpectedly nil")
return
}
guard let targetIndex = attachmentItems.index(of: targetItem) else {
guard let targetIndex = attachmentItems.firstIndex(of: targetItem) else {
owsFailDebug("targetIndex was unexpectedly nil")
return
}
let direction: UIPageViewControllerNavigationDirection = currentIndex < targetIndex ? .forward : .reverse
let direction: UIPageViewController.NavigationDirection = currentIndex < targetIndex ? .forward : .reverse
self.setCurrentItem(targetItem, direction: direction, animated: true)
}
@ -792,3 +792,14 @@ extension AttachmentApprovalViewController: AttachmentApprovalInputAccessoryView
isEditingCaptions = false
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertToOptionalUIPageViewControllerOptionsKeyDictionary(_ input: [String: Any]?) -> [UIPageViewController.OptionsKey: Any]? {
guard let input = input else { return nil }
return Dictionary(uniqueKeysWithValues: input.map { key, value in (UIPageViewController.OptionsKey(rawValue: key), value)})
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromUIPageViewControllerOptionsKey(_ input: UIPageViewController.OptionsKey) -> String {
return input.rawValue
}

View File

@ -103,7 +103,7 @@ class AttachmentCaptionViewController: OWSViewController {
let backgroundView = UIView()
backgroundView.backgroundColor = UIColor(white: 0, alpha: 0.5)
view.addSubview(backgroundView)
view.sendSubview(toBack: backgroundView)
view.sendSubviewToBack(backgroundView)
backgroundView.autoPinEdge(toSuperviewEdge: .leading)
backgroundView.autoPinEdge(toSuperviewEdge: .trailing)
backgroundView.autoPinEdge(toSuperviewEdge: .bottom)

View File

@ -80,7 +80,7 @@ class AttachmentItemCollection {
}
func itemAfter(item: SignalAttachmentItem) -> SignalAttachmentItem? {
guard let currentIndex = attachmentItems.index(of: item) else {
guard let currentIndex = attachmentItems.firstIndex(of: item) else {
owsFailDebug("currentIndex was unexpectedly nil")
return nil
}
@ -91,7 +91,7 @@ class AttachmentItemCollection {
}
func itemBefore(item: SignalAttachmentItem) -> SignalAttachmentItem? {
guard let currentIndex = attachmentItems.index(of: item) else {
guard let currentIndex = attachmentItems.firstIndex(of: item) else {
owsFailDebug("currentIndex was unexpectedly nil")
return nil
}

View File

@ -78,7 +78,7 @@ public class AttachmentPrepViewController: OWSViewController, PlayerProgressBarD
scrollView.showsVerticalScrollIndicator = false
// Panning should stop pretty soon after the user stops scrolling
scrollView.decelerationRate = UIScrollViewDecelerationRateFast
scrollView.decelerationRate = UIScrollView.DecelerationRate.fast
// We want scroll view content up and behind the system status bar content
// but we want other content (e.g. bar buttons) to respect the top layout guide.

View File

@ -138,8 +138,8 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
let audioPlayButton = UIButton()
self.audioPlayButton = audioPlayButton
setAudioIconToPlay()
audioPlayButton.imageView?.layer.minificationFilter = kCAFilterTrilinear
audioPlayButton.imageView?.layer.magnificationFilter = kCAFilterTrilinear
audioPlayButton.imageView?.layer.minificationFilter = CALayerContentsFilter.trilinear
audioPlayButton.imageView?.layer.magnificationFilter = CALayerContentsFilter.trilinear
audioPlayButton.addTarget(self, action: #selector(audioPlayButtonPressed), for: .touchUpInside)
let buttonSize = createHeroViewSize()
audioPlayButton.autoSetDimension(.width, toSize: buttonSize)
@ -221,8 +221,8 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
}
let imageView = UIImageView(image: image)
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.layer.minificationFilter = CALayerContentsFilter.trilinear
imageView.layer.magnificationFilter = CALayerContentsFilter.trilinear
let aspectRatio = image.size.width / image.size.height
addSubviewWithScaleAspectFitLayout(view: imageView, aspectRatio: aspectRatio)
contentView = imageView
@ -243,8 +243,8 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
}
let imageView = UIImageView(image: image)
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.layer.minificationFilter = CALayerContentsFilter.trilinear
imageView.layer.magnificationFilter = CALayerContentsFilter.trilinear
let aspectRatio = image.size.width / image.size.height
addSubviewWithScaleAspectFitLayout(view: imageView, aspectRatio: aspectRatio)
contentView = imageView
@ -307,8 +307,8 @@ public class MediaMessageView: UIView, OWSAudioPlayerDelegate {
let image = UIImage(named: imageName)
assert(image != nil)
let imageView = UIImageView(image: image)
imageView.layer.minificationFilter = kCAFilterTrilinear
imageView.layer.magnificationFilter = kCAFilterTrilinear
imageView.layer.minificationFilter = CALayerContentsFilter.trilinear
imageView.layer.magnificationFilter = CALayerContentsFilter.trilinear
imageView.layer.shadowColor = UIColor.black.cgColor
let shadowScaling = 5.0
imageView.layer.shadowRadius = CGFloat(2.0 * shadowScaling)

View File

@ -74,7 +74,7 @@ public class ModalActivityIndicatorViewController: OWSViewController {
: UIColor(white: 0, alpha: 0.25))
self.view.isOpaque = false
let activityIndicator = UIActivityIndicatorView(activityIndicatorStyle: .whiteLarge)
let activityIndicator = UIActivityIndicatorView(style: .whiteLarge)
self.activityIndicator = activityIndicator
self.view.addSubview(activityIndicator)
activityIndicator.autoCenterInSuperview()

View File

@ -32,8 +32,8 @@ public class AvatarImageView: UIImageView {
func configureView() {
self.autoPinToSquareAspectRatio()
self.layer.minificationFilter = kCAFilterTrilinear
self.layer.magnificationFilter = kCAFilterTrilinear
self.layer.minificationFilter = CALayerContentsFilter.trilinear
self.layer.magnificationFilter = CALayerContentsFilter.trilinear
self.layer.masksToBounds = true
self.layer.addSublayer(self.shadowLayer)
@ -57,7 +57,7 @@ public class AvatarImageView: UIImageView {
// This can be any color since the fill should be clipped.
self.shadowLayer.fillColor = UIColor.black.cgColor
self.shadowLayer.path = shadowPath.cgPath
self.shadowLayer.fillRule = kCAFillRuleEvenOdd
self.shadowLayer.fillRule = CAShapeLayerFillRule.evenOdd
self.shadowLayer.shadowColor = (Theme.isDarkThemeEnabled ? UIColor.white : UIColor.black).cgColor
self.shadowLayer.shadowRadius = 0.5
self.shadowLayer.shadowOpacity = 0.15
@ -203,14 +203,14 @@ public class AvatarImageButton: UIButton {
// This can be any color since the fill should be clipped.
shadowLayer.fillColor = UIColor.black.cgColor
shadowLayer.path = shadowPath.cgPath
shadowLayer.fillRule = kCAFillRuleEvenOdd
shadowLayer.fillRule = CAShapeLayerFillRule.evenOdd
shadowLayer.shadowColor = (Theme.isDarkThemeEnabled ? UIColor.white : UIColor.black).cgColor
shadowLayer.shadowRadius = 0.5
shadowLayer.shadowOpacity = 0.15
shadowLayer.shadowOffset = .zero
}
override public func setImage(_ image: UIImage?, for state: UIControlState) {
override public func setImage(_ image: UIImage?, for state: UIControl.State) {
ensureViewConfigured()
super.setImage(image, for: state)
}
@ -226,8 +226,8 @@ public class AvatarImageButton: UIButton {
autoPinToSquareAspectRatio()
layer.minificationFilter = kCAFilterTrilinear
layer.magnificationFilter = kCAFilterTrilinear
layer.minificationFilter = CALayerContentsFilter.trilinear
layer.magnificationFilter = CALayerContentsFilter.trilinear
layer.masksToBounds = true
layer.addSublayer(shadowLayer)

View File

@ -470,8 +470,8 @@ public class ImageEditorCanvasView: UIView {
shapeLayer.path = bezierPath.cgPath
shapeLayer.fillColor = nil
shapeLayer.lineCap = kCALineCapRound
shapeLayer.lineJoin = kCALineJoinRound
shapeLayer.lineCap = CAShapeLayerLineCap.round
shapeLayer.lineJoin = CAShapeLayerLineJoin.round
shapeLayer.zPosition = zPositionForItem(item: item, model: model, zPositionBase: brushLayerZ)
return shapeLayer
@ -503,8 +503,8 @@ public class ImageEditorCanvasView: UIView {
let text = item.text.filterForDisplay ?? ""
let attributedString = NSAttributedString(string: text,
attributes: [
NSAttributedStringKey.font: item.font.withSize(fontSize),
NSAttributedStringKey.foregroundColor: item.color.color
NSAttributedString.Key.font: item.font.withSize(fontSize),
NSAttributedString.Key.foregroundColor: item.color.color
])
let layer = EditorTextLayer(itemId: item.itemId)
layer.string = attributedString
@ -512,7 +512,7 @@ public class ImageEditorCanvasView: UIView {
layer.font = CGFont(item.font.fontName as CFString)
layer.fontSize = fontSize
layer.isWrapped = true
layer.alignmentMode = kCAAlignmentCenter
layer.alignmentMode = CATextLayerAlignmentMode.center
// I don't think we need to enable allowsFontSubpixelQuantization
// or set truncationMode.

View File

@ -48,7 +48,7 @@ public class OWSNavigationBar: UINavigationBar {
applyTheme()
NotificationCenter.default.addObserver(self, selector: #selector(callDidChange), name: .OWSWindowManagerCallDidChange, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didChangeStatusBarFrame), name: .UIApplicationDidChangeStatusBarFrame, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(didChangeStatusBarFrame), name: UIApplication.didChangeStatusBarFrameNotification, object: nil)
NotificationCenter.default.addObserver(self,
selector: #selector(themeDidChange),
name: .ThemeDidChange,
@ -75,7 +75,7 @@ public class OWSNavigationBar: UINavigationBar {
return
}
if UIAccessibilityIsReduceTransparencyEnabled() {
if UIAccessibility.isReduceTransparencyEnabled {
blurEffectView?.isHidden = true
let color = Theme.navbarBackgroundColor
let backgroundImage = UIImage(color: color)
@ -213,7 +213,7 @@ public class OWSNavigationBar: UINavigationBar {
respectsTheme = false
barStyle = .black
titleTextAttributes = [NSAttributedStringKey.foregroundColor: Theme.darkThemePrimaryColor]
titleTextAttributes = [NSAttributedString.Key.foregroundColor: Theme.darkThemePrimaryColor]
barTintColor = Theme.darkThemeBackgroundColor.withAlphaComponent(0.6)
tintColor = Theme.darkThemePrimaryColor

View File

@ -106,7 +106,7 @@ public class PlayerProgressBar: UIView {
// Background
backgroundColor = UIColor.lightGray.withAlphaComponent(0.5)
if !UIAccessibilityIsReduceTransparencyEnabled() {
if !UIAccessibility.isReduceTransparencyEnabled {
addSubview(blurEffectView)
blurEffectView.ows_autoPinToSuperviewEdges()
}

View File

@ -58,7 +58,7 @@ public class OWSVideoPlayer: NSObject {
if item.currentTime() == item.duration {
// Rewind for repeated plays, but only if it previously played to end.
avPlayer.seek(to: kCMTimeZero)
avPlayer.seek(to: CMTime.zero)
}
avPlayer.play()
@ -67,7 +67,7 @@ public class OWSVideoPlayer: NSObject {
@objc
public func stop() {
avPlayer.pause()
avPlayer.seek(to: kCMTimeZero)
avPlayer.seek(to: CMTime.zero)
audioSession.endAudioActivity(self.audioActivity)
}

View File

@ -301,7 +301,7 @@ public class SignalAttachment: NSObject {
let asset = AVURLAsset(url: mediaUrl)
let generator = AVAssetImageGenerator(asset: asset)
generator.appliesPreferredTrackTransform = true
let cgImage = try generator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil)
let cgImage = try generator.copyCGImage(at: CMTimeMake(value: 0, timescale: 1), actualTime: nil)
let image = UIImage(cgImage: cgImage)
cachedVideoPreview = image
@ -747,8 +747,7 @@ public class SignalAttachment: NSObject {
}
dstImage = resizedImage
}
guard let jpgImageData = UIImageJPEGRepresentation(dstImage,
jpegCompressionQuality(imageUploadQuality: imageUploadQuality)) else {
guard let jpgImageData = dstImage.jpegData(compressionQuality: jpegCompressionQuality(imageUploadQuality: imageUploadQuality)) else {
attachment.error = .couldNotConvertToJpeg
return attachment
}

View File

@ -39,7 +39,7 @@ public class OWSAudioSession: NSObject {
@objc
public func setup() {
NotificationCenter.default.addObserver(self, selector: #selector(proximitySensorStateDidChange(notification:)), name: .UIDeviceProximityStateDidChange, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(proximitySensorStateDidChange(notification:)), name: UIDevice.proximityStateDidChangeNotification, object: nil)
}
// MARK: Dependencies
@ -105,20 +105,20 @@ public class OWSAudioSession: NSObject {
// Eventually it would be nice to consolidate more of the audio
// session handling.
} else if aggregateBehaviors.contains(.playAndRecord) {
assert(avAudioSession.recordPermission() == .granted)
try avAudioSession.setCategory(AVAudioSessionCategoryRecord)
assert(avAudioSession.recordPermission == .granted)
try avAudioSession.setCategory(AVAudioSession.Category(rawValue: convertFromAVAudioSessionCategory(AVAudioSession.Category.record)))
} else if aggregateBehaviors.contains(.audioMessagePlayback) {
if self.device.proximityState {
Logger.debug("proximityState: true")
try avAudioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
try avAudioSession.setCategory(AVAudioSession.Category(rawValue: convertFromAVAudioSessionCategory(AVAudioSession.Category.playAndRecord)))
try avAudioSession.overrideOutputAudioPort(.none)
} else {
Logger.debug("proximityState: false")
try avAudioSession.setCategory(AVAudioSessionCategoryPlayback)
try avAudioSession.setCategory(AVAudioSession.Category(rawValue: convertFromAVAudioSessionCategory(AVAudioSession.Category.playback)))
}
} else if aggregateBehaviors.contains(.playback) {
try avAudioSession.setCategory(AVAudioSessionCategoryPlayback)
try avAudioSession.setCategory(AVAudioSession.Category(rawValue: convertFromAVAudioSessionCategory(AVAudioSession.Category.playback)))
} else {
ensureAudioSessionActivationStateAfterDelay()
}
@ -168,7 +168,7 @@ public class OWSAudioSession: NSObject {
do {
// When playing audio in Signal, other apps audio (e.g. Music) is paused.
// By notifying when we deactivate, the other app can resume playback.
try avAudioSession.setActive(false, with: [.notifyOthersOnDeactivation])
try avAudioSession.setActive(false, options: [.notifyOthersOnDeactivation])
} catch {
owsFailDebug("failed with error: \(error)")
}
@ -215,3 +215,8 @@ public class OWSAudioSession: NSObject {
}
}
}
// Helper function inserted by Swift 4.2 migrator.
fileprivate func convertFromAVAudioSessionCategory(_ input: AVAudioSession.Category) -> String {
return input.rawValue
}

View File

@ -72,7 +72,7 @@ public class ConversationStyle: NSObject {
NotificationCenter.default.addObserver(self,
selector: #selector(uiContentSizeCategoryDidChange),
name: NSNotification.Name.UIContentSizeCategoryDidChange,
name: UIContentSizeCategory.didChangeNotification,
object: nil)
}

View File

@ -52,7 +52,7 @@ public class OWSProximityMonitoringManagerImpl: NSObject, OWSProximityMonitoring
@objc
public func setup() {
NotificationCenter.default.addObserver(self, selector: #selector(proximitySensorStateDidChange(notification:)), name: .UIDeviceProximityStateDidChange, object: nil)
NotificationCenter.default.addObserver(self, selector: #selector(proximitySensorStateDidChange(notification:)), name: UIDevice.proximityStateDidChangeNotification, object: nil)
}
@objc

View File

@ -50,7 +50,7 @@ public enum OWSMediaError: Error {
let generator = AVAssetImageGenerator(asset: asset)
generator.maximumSize = maxSize
generator.appliesPreferredTrackTransform = true
let time: CMTime = CMTimeMake(1, 60)
let time: CMTime = CMTimeMake(value: 1, timescale: 60)
let cgImage = try generator.copyCGImage(at: time, actualTime: nil)
let image = UIImage(cgImage: cgImage)
return image

View File

@ -164,7 +164,7 @@ private struct OWSThumbnailRequest {
} else {
throw OWSThumbnailError.assertionFailure(description: "Invalid attachment type.")
}
guard let thumbnailData = UIImageJPEGRepresentation(thumbnailImage, 0.85) else {
guard let thumbnailData = thumbnailImage.jpegData(compressionQuality: 0.85) else {
throw OWSThumbnailError.failure(description: "Could not convert thumbnail to JPEG.")
}
do {

View File

@ -699,7 +699,7 @@ public class OWSLinkPreview: MTLModel {
let maxImageSize: CGFloat = 1024
let shouldResize = imageSize.width > maxImageSize || imageSize.height > maxImageSize
guard shouldResize else {
guard let dstData = UIImageJPEGRepresentation(srcImage, 0.8) else {
guard let dstData = srcImage.jpegData(compressionQuality: 0.8) else {
Logger.error("Could not write resized image.")
return Promise(error: LinkPreviewError.invalidContent)
}
@ -710,7 +710,7 @@ public class OWSLinkPreview: MTLModel {
Logger.error("Could not resize image.")
return Promise(error: LinkPreviewError.invalidContent)
}
guard let dstData = UIImageJPEGRepresentation(dstImage, 0.8) else {
guard let dstData = dstImage.jpegData(compressionQuality: 0.8) else {
Logger.error("Could not write resized image.")
return Promise(error: LinkPreviewError.invalidContent)
}

View File

@ -41,7 +41,7 @@ public class LRUCache<KeyType: Hashable & Equatable, ValueType> {
NotificationCenter.default.addObserver(self,
selector: #selector(didReceiveMemoryWarning),
name: NSNotification.Name.UIApplicationDidReceiveMemoryWarning,
name: UIApplication.didReceiveMemoryWarningNotification,
object: nil)
NotificationCenter.default.addObserver(self,
selector: #selector(didEnterBackground),

View File

@ -11,7 +11,7 @@ public extension MessageSender {
* Wrap message sending in a Promise for easier callback chaining.
*/
public func sendPromise(message: TSOutgoingMessage) -> Promise<Void> {
let promise: Promise<Void> = Promise { resolver in
let promise: Promise<Void> = Promise<Any> { resolver in
self.send(message, success: resolver.fulfill, failure: resolver.reject)
}

View File

@ -13,7 +13,7 @@ public extension YapDatabaseConnection {
}
func readWritePromise(_ block: @escaping (YapDatabaseReadWriteTransaction) -> Void) -> Promise<Void> {
return Promise { resolver in
return Promise<Any> { resolver in
self.asyncReadWrite(block, completionBlock: resolver.fulfill)
}
}

View File

@ -63,7 +63,7 @@ class SAELoadViewController: UIViewController {
self.view.backgroundColor = UIColor.ows_signalBrandBlue
let activityIndicator = UIActivityIndicatorView(activityIndicatorStyle: .whiteLarge)
let activityIndicator = UIActivityIndicatorView(style: .whiteLarge)
self.activityIndicator = activityIndicator
self.view.addSubview(activityIndicator)
activityIndicator.autoCenterInSuperview()

View File

@ -866,7 +866,7 @@ public class ShareViewController: UIViewController, ShareViewDelegate, SAEFailed
isConvertibleToTextMessage: isConvertibleToTextMessage))
}
} else if let image = value as? UIImage {
if let data = UIImagePNGRepresentation(image) {
if let data = image.pngData() {
let tempFilePath = OWSFileSystem.temporaryFilePath(withFileExtension: "png")
do {
let url = NSURL.fileURL(withPath: tempFilePath)