You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

862 lines
30 KiB

2 years ago
  1. // The MIT License (MIT)
  2. //
  3. // Copyright (c) 2015-2021 Alexander Grebenyuk (github.com/kean).
  4. import Foundation
  5. #if os(iOS) || os(tvOS) || os(watchOS)
  6. import UIKit
  7. #endif
  8. #if os(watchOS)
  9. import WatchKit
  10. #endif
  11. #if os(macOS)
  12. import Cocoa
  13. #endif
  14. // MARK: - ImageProcessing
  15. /// Performs image processing.
  16. ///
  17. /// For basic processing needs, implement the following method:
  18. ///
  19. /// ```
  20. /// func process(image: PlatformImage) -> PlatformImage?
  21. /// ```
  22. ///
  23. /// If your processor needs to manipulate image metadata (`ImageContainer`), or
  24. /// get access to more information via the context (`ImageProcessingContext`),
  25. /// there is an additional method that allows you to do that:
  26. ///
  27. /// ```
  28. /// func process(image container: ImageContainer, context: ImageProcessingContext) -> ImageContainer?
  29. /// ```
  30. ///
  31. /// You must implement either one of those methods.
  32. public protocol ImageProcessing {
  33. /// Returns a processed image. By default, returns `nil`.
  34. ///
  35. /// - note: Gets called a background queue managed by the pipeline.
  36. func process(_ image: PlatformImage) -> PlatformImage?
  37. /// Returns a processed image. By default, this calls the basic `process(image:)` method.
  38. ///
  39. /// - note: Gets called a background queue managed by the pipeline.
  40. func process(_ container: ImageContainer, context: ImageProcessingContext) -> ImageContainer?
  41. /// Returns a string that uniquely identifies the processor.
  42. ///
  43. /// Consider using the reverse DNS notation.
  44. var identifier: String { get }
  45. /// Returns a unique processor identifier.
  46. ///
  47. /// The default implementation simply returns `var identifier: String` but
  48. /// can be overridden as a performance optimization - creating and comparing
  49. /// strings is _expensive_ so you can opt-in to return something which is
  50. /// fast to create and to compare. See `ImageProcessors.Resize` for an example.
  51. ///
  52. /// - note: A common approach is to make your processor `Hashable` and return `self`
  53. /// from `hashableIdentifier`.
  54. var hashableIdentifier: AnyHashable { get }
  55. }
  56. public extension ImageProcessing {
  57. /// The default implementation simply calls the basic
  58. /// `process(_ image: PlatformImage) -> PlatformImage?` method.
  59. func process(_ container: ImageContainer, context: ImageProcessingContext) -> ImageContainer? {
  60. container.map(process)
  61. }
  62. /// The default impleemntation simply returns `var identifier: String`.
  63. var hashableIdentifier: AnyHashable { identifier }
  64. }
  65. /// Image processing context used when selecting which processor to use.
  66. public struct ImageProcessingContext {
  67. public let request: ImageRequest
  68. public let response: ImageResponse
  69. public let isFinal: Bool
  70. public init(request: ImageRequest, response: ImageResponse, isFinal: Bool) {
  71. self.request = request
  72. self.response = response
  73. self.isFinal = isFinal
  74. }
  75. }
  76. // MARK: - ImageProcessors
  77. /// A namespace for all processors that implement `ImageProcessing` protocol.
  78. public enum ImageProcessors {}
  79. // MARK: - ImageProcessors.Resize
  80. extension ImageProcessors {
  81. /// Scales an image to a specified size.
  82. public struct Resize: ImageProcessing, Hashable, CustomStringConvertible {
  83. private let size: Size
  84. private let contentMode: ContentMode
  85. private let crop: Bool
  86. private let upscale: Bool
  87. /// An option for how to resize the image.
  88. public enum ContentMode: CustomStringConvertible {
  89. /// Scales the image so that it completely fills the target area.
  90. /// Maintains the aspect ratio of the original image.
  91. case aspectFill
  92. /// Scales the image so that it fits the target size. Maintains the
  93. /// aspect ratio of the original image.
  94. case aspectFit
  95. public var description: String {
  96. switch self {
  97. case .aspectFill: return ".aspectFill"
  98. case .aspectFit: return ".aspectFit"
  99. }
  100. }
  101. }
  102. /// Initializes the processor with the given size.
  103. ///
  104. /// - parameter size: The target size.
  105. /// - parameter unit: Unit of the target size, `.points` by default.
  106. /// - parameter contentMode: `.aspectFill` by default.
  107. /// - parameter crop: If `true` will crop the image to match the target size.
  108. /// Does nothing with content mode .aspectFill. `false` by default.
  109. /// - parameter upscale: `false` by default.
  110. public init(size: CGSize, unit: ImageProcessingOptions.Unit = .points, contentMode: ContentMode = .aspectFill, crop: Bool = false, upscale: Bool = false) {
  111. self.size = Size(cgSize: CGSize(size: size, unit: unit))
  112. self.contentMode = contentMode
  113. self.crop = crop
  114. self.upscale = upscale
  115. }
  116. /// Resizes the image to the given width preserving aspect ratio.
  117. ///
  118. /// - parameter unit: Unit of the target size, `.points` by default.
  119. public init(width: CGFloat, unit: ImageProcessingOptions.Unit = .points, upscale: Bool = false) {
  120. self.init(size: CGSize(width: width, height: 9999), unit: unit, contentMode: .aspectFit, crop: false, upscale: upscale)
  121. }
  122. /// Resizes the image to the given height preserving aspect ratio.
  123. ///
  124. /// - parameter unit: Unit of the target size, `.points` by default.
  125. public init(height: CGFloat, unit: ImageProcessingOptions.Unit = .points, upscale: Bool = false) {
  126. self.init(size: CGSize(width: 9999, height: height), unit: unit, contentMode: .aspectFit, crop: false, upscale: upscale)
  127. }
  128. public func process(_ image: PlatformImage) -> PlatformImage? {
  129. if crop && contentMode == .aspectFill {
  130. return image.processed.byResizingAndCropping(to: size.cgSize)
  131. } else {
  132. return image.processed.byResizing(to: size.cgSize, contentMode: contentMode, upscale: upscale)
  133. }
  134. }
  135. public var identifier: String {
  136. "com.github.kean/nuke/resize?s=\(size.cgSize),cm=\(contentMode),crop=\(crop),upscale=\(upscale)"
  137. }
  138. public var hashableIdentifier: AnyHashable { self }
  139. public var description: String {
  140. "Resize(size: \(size.cgSize) pixels, contentMode: \(contentMode), crop: \(crop), upscale: \(upscale))"
  141. }
  142. }
  143. }
  144. // MARK: - ImageProcessors.Circle
  145. extension ImageProcessors {
  146. /// Rounds the corners of an image into a circle. If the image is not a square,
  147. /// crops it to a square first.
  148. public struct Circle: ImageProcessing, Hashable, CustomStringConvertible {
  149. private let border: ImageProcessingOptions.Border?
  150. public init(border: ImageProcessingOptions.Border? = nil) {
  151. self.border = border
  152. }
  153. public func process(_ image: PlatformImage) -> PlatformImage? {
  154. image.processed.byDrawingInCircle(border: border)
  155. }
  156. public var identifier: String {
  157. if let border = self.border {
  158. return "com.github.kean/nuke/circle?border=\(border)"
  159. } else {
  160. return "com.github.kean/nuke/circle"
  161. }
  162. }
  163. public var hashableIdentifier: AnyHashable { self }
  164. public var description: String {
  165. "Circle(border: \(border?.description ?? "nil"))"
  166. }
  167. }
  168. }
  169. // MARK: - ImageProcessors.RoundedCorners
  170. extension ImageProcessors {
  171. /// Rounds the corners of an image to the specified radius.
  172. ///
  173. /// - warning: In order for the corners to be displayed correctly, the image must exactly match the size
  174. /// of the image view in which it will be displayed. See `ImageProcessor.Resize` for more info.
  175. public struct RoundedCorners: ImageProcessing, Hashable, CustomStringConvertible {
  176. private let radius: CGFloat
  177. private let border: ImageProcessingOptions.Border?
  178. /// Initializes the processor with the given radius.
  179. ///
  180. /// - parameter radius: The radius of the corners.
  181. /// - parameter unit: Unit of the radius, `.points` by default.
  182. /// - parameter border: An optional border drawn around the image.
  183. public init(radius: CGFloat, unit: ImageProcessingOptions.Unit = .points, border: ImageProcessingOptions.Border? = nil) {
  184. self.radius = radius.converted(to: unit)
  185. self.border = border
  186. }
  187. public func process(_ image: PlatformImage) -> PlatformImage? {
  188. image.processed.byAddingRoundedCorners(radius: radius, border: border)
  189. }
  190. public var identifier: String {
  191. if let border = self.border {
  192. return "com.github.kean/nuke/rounded_corners?radius=\(radius),border=\(border)"
  193. } else {
  194. return "com.github.kean/nuke/rounded_corners?radius=\(radius)"
  195. }
  196. }
  197. public var hashableIdentifier: AnyHashable { self }
  198. public var description: String {
  199. "RoundedCorners(radius: \(radius) pixels, border: \(border?.description ?? "nil"))"
  200. }
  201. }
  202. }
  203. #if os(iOS) || os(tvOS) || os(macOS)
  204. // MARK: - ImageProcessors.CoreImageFilter
  205. import CoreImage
  206. extension ImageProcessors {
  207. /// Applies Core Image filter (`CIFilter`) to the image.
  208. ///
  209. /// # Performance Considerations.
  210. ///
  211. /// Prefer chaining multiple `CIFilter` objects using `Core Image` facilities
  212. /// instead of using multiple instances of `ImageProcessors.CoreImageFilter`.
  213. ///
  214. /// # References
  215. ///
  216. /// - [Core Image Programming Guide](https://developer.apple.com/library/ios/documentation/GraphicsImaging/Conceptual/CoreImaging/ci_intro/ci_intro.html)
  217. /// - [Core Image Filter Reference](https://developer.apple.com/library/prerelease/ios/documentation/GraphicsImaging/Reference/CoreImageFilterReference/index.html)
  218. public struct CoreImageFilter: ImageProcessing, CustomStringConvertible {
  219. private let name: String
  220. private let parameters: [String: Any]
  221. public let identifier: String
  222. /// - parameter identifier: Uniquely identifies the processor.
  223. public init(name: String, parameters: [String: Any], identifier: String) {
  224. self.name = name
  225. self.parameters = parameters
  226. self.identifier = identifier
  227. }
  228. public init(name: String) {
  229. self.name = name
  230. self.parameters = [:]
  231. self.identifier = "com.github.kean/nuke/core_image?name=\(name))"
  232. }
  233. public func process(_ image: PlatformImage) -> PlatformImage? {
  234. let filter = CIFilter(name: name, parameters: parameters)
  235. return CoreImageFilter.apply(filter: filter, to: image)
  236. }
  237. // MARK: - Apply Filter
  238. /// A default context shared between all Core Image filters. The context
  239. /// has `.priorityRequestLow` option set to `true`.
  240. public static var context = CIContext(options: [.priorityRequestLow: true])
  241. public static func apply(filter: CIFilter?, to image: PlatformImage) -> PlatformImage? {
  242. guard let filter = filter else {
  243. return nil
  244. }
  245. return applyFilter(to: image) {
  246. filter.setValue($0, forKey: kCIInputImageKey)
  247. return filter.outputImage
  248. }
  249. }
  250. static func applyFilter(to image: PlatformImage, context: CIContext = context, closure: (CoreImage.CIImage) -> CoreImage.CIImage?) -> PlatformImage? {
  251. let ciImage: CoreImage.CIImage? = {
  252. if let image = image.ciImage {
  253. return image
  254. }
  255. if let image = image.cgImage {
  256. return CoreImage.CIImage(cgImage: image)
  257. }
  258. return nil
  259. }()
  260. guard let inputImage = ciImage, let outputImage = closure(inputImage) else {
  261. return nil
  262. }
  263. guard let imageRef = context.createCGImage(outputImage, from: outputImage.extent) else {
  264. return nil
  265. }
  266. return PlatformImage.make(cgImage: imageRef, source: image)
  267. }
  268. public var description: String {
  269. "CoreImageFilter(name: \(name), parameters: \(parameters))"
  270. }
  271. }
  272. }
  273. // MARK: - ImageProcessors.GaussianBlur
  274. extension ImageProcessors {
  275. /// Blurs an image using `CIGaussianBlur` filter.
  276. public struct GaussianBlur: ImageProcessing, Hashable, CustomStringConvertible {
  277. private let radius: Int
  278. /// Initializes the receiver with a blur radius.
  279. public init(radius: Int = 8) {
  280. self.radius = radius
  281. }
  282. /// Applies `CIGaussianBlur` filter to the image.
  283. public func process(_ image: PlatformImage) -> PlatformImage? {
  284. let filter = CIFilter(name: "CIGaussianBlur", parameters: ["inputRadius": radius])
  285. return CoreImageFilter.apply(filter: filter, to: image)
  286. }
  287. public var identifier: String {
  288. "com.github.kean/nuke/gaussian_blur?radius=\(radius)"
  289. }
  290. public var hashableIdentifier: AnyHashable { self }
  291. public var description: String {
  292. "GaussianBlur(radius: \(radius))"
  293. }
  294. }
  295. }
  296. #endif
  297. // MARK: - ImageDecompression (Internal)
  298. struct ImageDecompression {
  299. static func decompress(image: PlatformImage) -> PlatformImage {
  300. let output = image.decompressed() ?? image
  301. ImageDecompression.setDecompressionNeeded(false, for: output)
  302. return output
  303. }
  304. // MARK: Managing Decompression State
  305. static var isDecompressionNeededAK = "ImageDecompressor.isDecompressionNeeded.AssociatedKey"
  306. static func setDecompressionNeeded(_ isDecompressionNeeded: Bool, for image: PlatformImage) {
  307. objc_setAssociatedObject(image, &isDecompressionNeededAK, isDecompressionNeeded, .OBJC_ASSOCIATION_RETAIN)
  308. }
  309. static func isDecompressionNeeded(for image: PlatformImage) -> Bool? {
  310. objc_getAssociatedObject(image, &isDecompressionNeededAK) as? Bool
  311. }
  312. }
  313. // MARK: - ImageProcessors.Composition
  314. extension ImageProcessors {
  315. /// Composes multiple processors.
  316. public struct Composition: ImageProcessing, Hashable, CustomStringConvertible {
  317. let processors: [ImageProcessing]
  318. /// Composes multiple processors.
  319. public init(_ processors: [ImageProcessing]) {
  320. // note: multiple compositions are not flatten by default.
  321. self.processors = processors
  322. }
  323. public func process(_ image: PlatformImage) -> PlatformImage? {
  324. processors.reduce(image) { image, processor in
  325. autoreleasepool {
  326. image.flatMap { processor.process($0) }
  327. }
  328. }
  329. }
  330. /// Processes the given image by applying each processor in an order in
  331. /// which they were added. If one of the processors fails to produce
  332. /// an image the processing stops and `nil` is returned.
  333. public func process(_ container: ImageContainer, context: ImageProcessingContext) -> ImageContainer? {
  334. processors.reduce(container) { container, processor in
  335. autoreleasepool {
  336. container.flatMap { processor.process($0, context: context) }
  337. }
  338. }
  339. }
  340. public var identifier: String {
  341. processors.map({ $0.identifier }).joined()
  342. }
  343. public var hashableIdentifier: AnyHashable { self }
  344. public func hash(into hasher: inout Hasher) {
  345. for processor in processors {
  346. hasher.combine(processor.hashableIdentifier)
  347. }
  348. }
  349. public static func == (lhs: Composition, rhs: Composition) -> Bool {
  350. lhs.processors == rhs.processors
  351. }
  352. public var description: String {
  353. "Composition(processors: \(processors))"
  354. }
  355. }
  356. }
  357. // MARK: - ImageProcessors.Anonymous
  358. extension ImageProcessors {
  359. /// Processed an image using a specified closure.
  360. public struct Anonymous: ImageProcessing, CustomStringConvertible {
  361. public let identifier: String
  362. private let closure: (PlatformImage) -> PlatformImage?
  363. public init(id: String, _ closure: @escaping (PlatformImage) -> PlatformImage?) {
  364. self.identifier = id
  365. self.closure = closure
  366. }
  367. public func process(_ image: PlatformImage) -> PlatformImage? {
  368. self.closure(image)
  369. }
  370. public var description: String {
  371. "AnonymousProcessor(identifier: \(identifier)"
  372. }
  373. }
  374. }
  375. // MARK: - Image Processing (Internal)
  376. private extension PlatformImage {
  377. /// Draws the image in a `CGContext` in a canvas with the given size using
  378. /// the specified draw rect.
  379. ///
  380. /// For example, if the canvas size is `CGSize(width: 10, height: 10)` and
  381. /// the draw rect is `CGRect(x: -5, y: 0, width: 20, height: 10)` it would
  382. /// draw the input image (which is horizontal based on the known draw rect)
  383. /// in a square by centering it in the canvas.
  384. ///
  385. /// - parameter drawRect: `nil` by default. If `nil` will use the canvas rect.
  386. func draw(inCanvasWithSize canvasSize: CGSize, drawRect: CGRect? = nil) -> PlatformImage? {
  387. guard let cgImage = cgImage else {
  388. return nil
  389. }
  390. guard let ctx = CGContext.make(cgImage, size: canvasSize) else {
  391. return nil
  392. }
  393. ctx.draw(cgImage, in: drawRect ?? CGRect(origin: .zero, size: canvasSize))
  394. guard let outputCGImage = ctx.makeImage() else {
  395. return nil
  396. }
  397. return PlatformImage.make(cgImage: outputCGImage, source: self)
  398. }
  399. /// Decompresses the input image by drawing in the the `CGContext`.
  400. func decompressed() -> PlatformImage? {
  401. guard let cgImage = cgImage else {
  402. return nil
  403. }
  404. return draw(inCanvasWithSize: cgImage.size, drawRect: CGRect(origin: .zero, size: cgImage.size))
  405. }
  406. }
  407. // MARK: - ImageProcessingExtensions
  408. private extension PlatformImage {
  409. var processed: ImageProcessingExtensions {
  410. ImageProcessingExtensions(image: self)
  411. }
  412. }
  413. private struct ImageProcessingExtensions {
  414. let image: PlatformImage
  415. func byResizing(to targetSize: CGSize,
  416. contentMode: ImageProcessors.Resize.ContentMode,
  417. upscale: Bool) -> PlatformImage? {
  418. guard let cgImage = image.cgImage else {
  419. return nil
  420. }
  421. #if os(iOS) || os(tvOS) || os(watchOS)
  422. let targetSize = targetSize.rotatedForOrientation(image.imageOrientation)
  423. #endif
  424. let scale = cgImage.size.getScale(targetSize: targetSize, contentMode: contentMode)
  425. guard scale < 1 || upscale else {
  426. return image // The image doesn't require scaling
  427. }
  428. let size = cgImage.size.scaled(by: scale).rounded()
  429. return image.draw(inCanvasWithSize: size)
  430. }
  431. /// Crops the input image to the given size and resizes it if needed.
  432. /// - note: this method will always upscale.
  433. func byResizingAndCropping(to targetSize: CGSize) -> PlatformImage? {
  434. guard let cgImage = image.cgImage else {
  435. return nil
  436. }
  437. #if os(iOS) || os(tvOS) || os(watchOS)
  438. let targetSize = targetSize.rotatedForOrientation(image.imageOrientation)
  439. #endif
  440. let scale = cgImage.size.getScale(targetSize: targetSize, contentMode: .aspectFill)
  441. let scaledSize = cgImage.size.scaled(by: scale)
  442. let drawRect = scaledSize.centeredInRectWithSize(targetSize)
  443. return image.draw(inCanvasWithSize: targetSize, drawRect: drawRect)
  444. }
  445. func byDrawingInCircle(border: ImageProcessingOptions.Border?) -> PlatformImage? {
  446. guard let squared = byCroppingToSquare(), let cgImage = squared.cgImage else {
  447. return nil
  448. }
  449. let radius = CGFloat(cgImage.width) / 2.0 // Can use any dimension since image is a square
  450. return squared.processed.byAddingRoundedCorners(radius: radius, border: border)
  451. }
  452. /// Draws an image in square by preserving an aspect ratio and filling the
  453. /// square if needed. If the image is already a square, returns an original image.
  454. func byCroppingToSquare() -> PlatformImage? {
  455. guard let cgImage = image.cgImage else {
  456. return nil
  457. }
  458. guard cgImage.width != cgImage.height else {
  459. return image // Already a square
  460. }
  461. let imageSize = cgImage.size
  462. let side = min(cgImage.width, cgImage.height)
  463. let targetSize = CGSize(width: side, height: side)
  464. let cropRect = CGRect(origin: .zero, size: targetSize).offsetBy(
  465. dx: max(0, (imageSize.width - targetSize.width) / 2),
  466. dy: max(0, (imageSize.height - targetSize.height) / 2)
  467. )
  468. guard let cropped = cgImage.cropping(to: cropRect) else {
  469. return nil
  470. }
  471. return PlatformImage.make(cgImage: cropped, source: image)
  472. }
  473. /// Adds rounded corners with the given radius to the image.
  474. /// - parameter radius: Radius in pixels.
  475. /// - parameter border: Optional stroke border.
  476. func byAddingRoundedCorners(radius: CGFloat, border: ImageProcessingOptions.Border? = nil) -> PlatformImage? {
  477. guard let cgImage = image.cgImage else {
  478. return nil
  479. }
  480. guard let ctx = CGContext.make(cgImage, size: cgImage.size, alphaInfo: .premultipliedLast) else {
  481. return nil
  482. }
  483. let rect = CGRect(origin: CGPoint.zero, size: cgImage.size)
  484. let path = CGPath(roundedRect: rect, cornerWidth: radius, cornerHeight: radius, transform: nil)
  485. ctx.addPath(path)
  486. ctx.clip()
  487. ctx.draw(cgImage, in: CGRect(origin: CGPoint.zero, size: cgImage.size))
  488. if let border = border {
  489. ctx.setStrokeColor(border.color.cgColor)
  490. ctx.addPath(path)
  491. ctx.setLineWidth(border.width)
  492. ctx.strokePath()
  493. }
  494. guard let outputCGImage = ctx.makeImage() else {
  495. return nil
  496. }
  497. return PlatformImage.make(cgImage: outputCGImage, source: image)
  498. }
  499. }
  500. // MARK: - CoreGraphics Helpers (Internal)
  501. #if os(macOS)
  502. typealias Color = NSColor
  503. #else
  504. typealias Color = UIColor
  505. #endif
  506. #if os(macOS)
  507. extension NSImage {
  508. var cgImage: CGImage? {
  509. cgImage(forProposedRect: nil, context: nil, hints: nil)
  510. }
  511. var ciImage: CIImage? {
  512. cgImage.map { CIImage(cgImage: $0) }
  513. }
  514. static func make(cgImage: CGImage, source: NSImage) -> NSImage {
  515. NSImage(cgImage: cgImage, size: .zero)
  516. }
  517. }
  518. #else
  519. extension UIImage {
  520. static func make(cgImage: CGImage, source: UIImage) -> UIImage {
  521. UIImage(cgImage: cgImage, scale: source.scale, orientation: source.imageOrientation)
  522. }
  523. }
  524. #endif
  525. extension CGImage {
  526. /// Returns `true` if the image doesn't contain alpha channel.
  527. var isOpaque: Bool {
  528. let alpha = alphaInfo
  529. return alpha == .none || alpha == .noneSkipFirst || alpha == .noneSkipLast
  530. }
  531. var size: CGSize {
  532. CGSize(width: width, height: height)
  533. }
  534. }
  535. private extension CGFloat {
  536. func converted(to unit: ImageProcessingOptions.Unit) -> CGFloat {
  537. switch unit {
  538. case .pixels: return self
  539. case .points: return self * Screen.scale
  540. }
  541. }
  542. }
  543. // Adds Hashable without making changes to public CGSize API
  544. private struct Size: Hashable {
  545. let cgSize: CGSize
  546. func hash(into hasher: inout Hasher) {
  547. hasher.combine(cgSize.width)
  548. hasher.combine(cgSize.height)
  549. }
  550. }
  551. private extension CGSize {
  552. /// Creates the size in pixels by scaling to the input size to the screen scale
  553. /// if needed.
  554. init(size: CGSize, unit: ImageProcessingOptions.Unit) {
  555. switch unit {
  556. case .pixels: self = size // The size is already in pixels
  557. case .points: self = size.scaled(by: Screen.scale)
  558. }
  559. }
  560. func scaled(by scale: CGFloat) -> CGSize {
  561. CGSize(width: width * scale, height: height * scale)
  562. }
  563. func rounded() -> CGSize {
  564. CGSize(width: CGFloat(round(width)), height: CGFloat(round(height)))
  565. }
  566. }
  567. #if os(iOS) || os(tvOS) || os(watchOS)
  568. private extension CGSize {
  569. func rotatedForOrientation(_ imageOrientation: UIImage.Orientation) -> CGSize {
  570. switch imageOrientation {
  571. case .left, .leftMirrored, .right, .rightMirrored:
  572. return CGSize(width: height, height: width) // Rotate 90 degrees
  573. case .up, .upMirrored, .down, .downMirrored:
  574. return self
  575. @unknown default:
  576. return self
  577. }
  578. }
  579. }
  580. #endif
  581. extension CGSize {
  582. func getScale(targetSize: CGSize, contentMode: ImageProcessors.Resize.ContentMode) -> CGFloat {
  583. let scaleHor = targetSize.width / width
  584. let scaleVert = targetSize.height / height
  585. switch contentMode {
  586. case .aspectFill:
  587. return max(scaleHor, scaleVert)
  588. case .aspectFit:
  589. return min(scaleHor, scaleVert)
  590. }
  591. }
  592. /// Calculates a rect such that the output rect will be in the center of
  593. /// the rect of the input size (assuming origin: .zero)
  594. func centeredInRectWithSize(_ targetSize: CGSize) -> CGRect {
  595. // First, resize the original size to fill the target size.
  596. CGRect(origin: .zero, size: self).offsetBy(
  597. dx: -(width - targetSize.width) / 2,
  598. dy: -(height - targetSize.height) / 2
  599. )
  600. }
  601. }
  602. // MARK: - ImageProcessing Extensions (Internal)
  603. func == (lhs: [ImageProcessing], rhs: [ImageProcessing]) -> Bool {
  604. guard lhs.count == rhs.count else {
  605. return false
  606. }
  607. // Lazily creates `hashableIdentifiers` because for some processors the
  608. // identifiers might be expensive to compute.
  609. return zip(lhs, rhs).allSatisfy {
  610. $0.hashableIdentifier == $1.hashableIdentifier
  611. }
  612. }
  613. // MARK: - ImageProcessingOptions
  614. public enum ImageProcessingOptions {
  615. public enum Unit: CustomStringConvertible {
  616. case points
  617. case pixels
  618. public var description: String {
  619. switch self {
  620. case .points: return "points"
  621. case .pixels: return "pixels"
  622. }
  623. }
  624. }
  625. #if os(iOS) || os(tvOS) || os(watchOS)
  626. /// Draws a border.
  627. ///
  628. /// - warning: To make sure that the border looks the way you expect,
  629. /// make sure that the images you display exactly match the size of the
  630. /// views in which they get displayed. If you can't guarantee that, pleasee
  631. /// consider adding border to a view layer. This should be your primary
  632. /// option regardless.
  633. public struct Border: Hashable, CustomStringConvertible {
  634. public let color: UIColor
  635. public let width: CGFloat
  636. /// - parameter color: Border color.
  637. /// - parameter width: Border width. 1 points by default.
  638. /// - parameter unit: Unit of the width, `.points` by default.
  639. public init(color: UIColor, width: CGFloat = 1, unit: Unit = .points) {
  640. self.color = color
  641. self.width = width.converted(to: unit)
  642. }
  643. public var description: String {
  644. "Border(color: \(color.hex), width: \(width) pixels)"
  645. }
  646. }
  647. #else
  648. /// Draws a border.
  649. ///
  650. /// - warning: To make sure that the border looks the way you expect,
  651. /// make sure that the images you display exactly match the size of the
  652. /// views in which they get displayed. If you can't guarantee that, pleasee
  653. /// consider adding border to a view layer. This should be your primary
  654. /// option regardless.
  655. public struct Border: Hashable, CustomStringConvertible { // Duplicated to avoid introducing PlatformColor
  656. public let color: NSColor
  657. public let width: CGFloat
  658. /// - parameter color: Border color.
  659. /// - parameter width: Border width. 1 points by default.
  660. /// - parameter unit: Unit of the width, `.points` by default.
  661. public init(color: NSColor, width: CGFloat = 1, unit: Unit = .points) {
  662. self.color = color
  663. self.width = width.converted(to: unit)
  664. }
  665. public var description: String {
  666. "Border(color: \(color.hex), width: \(width) pixels)"
  667. }
  668. }
  669. #endif
  670. }
  671. // MARK: - Misc (Internal)
  672. struct Screen {
  673. #if os(iOS) || os(tvOS)
  674. /// Returns the current screen scale.
  675. static var scale: CGFloat { UIScreen.main.scale }
  676. #elseif os(watchOS)
  677. /// Returns the current screen scale.
  678. static var scale: CGFloat { WKInterfaceDevice.current().screenScale }
  679. #elseif os(macOS)
  680. /// Always returns 1.
  681. static var scale: CGFloat { 1 }
  682. #endif
  683. }
  684. extension Color {
  685. /// Returns a hex representation of the color, e.g. "#FFFFAA".
  686. var hex: String {
  687. var (r, g, b, a) = (CGFloat(0), CGFloat(0), CGFloat(0), CGFloat(0))
  688. getRed(&r, green: &g, blue: &b, alpha: &a)
  689. let components = [r, g, b, a < 1 ? a : nil]
  690. return "#" + components
  691. .compactMap { $0 }
  692. .map { String(format: "%02lX", lroundf(Float($0) * 255)) }
  693. .joined()
  694. }
  695. }
  696. private extension CGContext {
  697. static func make(_ image: CGImage, size: CGSize, alphaInfo: CGImageAlphaInfo? = nil) -> CGContext? {
  698. let alphaInfo: CGImageAlphaInfo = alphaInfo ?? (image.isOpaque ? .noneSkipLast : .premultipliedLast)
  699. // Create the context which matches the input image.
  700. if let ctx = CGContext(
  701. data: nil,
  702. width: Int(size.width),
  703. height: Int(size.height),
  704. bitsPerComponent: 8,
  705. bytesPerRow: 0,
  706. space: image.colorSpace ?? CGColorSpaceCreateDeviceRGB(),
  707. bitmapInfo: alphaInfo.rawValue
  708. ) {
  709. return ctx
  710. }
  711. // In case the combination of parameters (color space, bits per component, etc)
  712. // is nit supported by Core Graphics, switch to default context.
  713. // - Quartz 2D Programming Guide
  714. // - https://github.com/kean/Nuke/issues/35
  715. // - https://github.com/kean/Nuke/issues/57
  716. return CGContext(
  717. data: nil,
  718. width: Int(size.width), height: Int(size.height),
  719. bitsPerComponent: 8,
  720. bytesPerRow: 0,
  721. space: CGColorSpaceCreateDeviceRGB(),
  722. bitmapInfo: alphaInfo.rawValue
  723. )
  724. }
  725. }