...
 
Commits (4)
......@@ -7,6 +7,8 @@
objects = {
/* Begin PBXBuildFile section */
561112572344BCA1006900F1 /* Appendix10.swift in Sources */ = {isa = PBXBuildFile; fileRef = 561112562344BCA1006900F1 /* Appendix10.swift */; };
561112592344C283006900F1 /* Appendix11.swift in Sources */ = {isa = PBXBuildFile; fileRef = 561112582344C283006900F1 /* Appendix11.swift */; };
5635C7E02283737000B83024 /* Window.heic in Resources */ = {isa = PBXBuildFile; fileRef = 5635C7DF2283667A00B83024 /* Window.heic */; };
565508C522AA7D1D00884E5C /* Appendix2.swift in Sources */ = {isa = PBXBuildFile; fileRef = 565508C422AA7D1D00884E5C /* Appendix2.swift */; };
565C4CFD22798EBA00275692 /* Task5.swift in Sources */ = {isa = PBXBuildFile; fileRef = 565C4CFC22798EBA00275692 /* Task5.swift */; };
......@@ -14,6 +16,8 @@
566774762278836C003F881E /* Task1.swift in Sources */ = {isa = PBXBuildFile; fileRef = 566774752278836C003F881E /* Task1.swift */; };
5667747922788388003F881E /* Task2.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5667747822788388003F881E /* Task2.swift */; };
5667747B22798BCA003F881E /* Task3.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5667747A22798BCA003F881E /* Task3.swift */; };
56A52E812345E8BC0061E5A3 /* Appendix12.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56A52E802345E8BC0061E5A3 /* Appendix12.swift */; };
56A52E832346046D0061E5A3 /* Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56A52E822346046D0061E5A3 /* Extensions.swift */; };
56AD53392278513B005B1E87 /* BrowserViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56AD53382278513B005B1E87 /* BrowserViewController.swift */; };
56AD533B2278668B005B1E87 /* EditorViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56AD533A2278668B005B1E87 /* EditorViewController.swift */; };
56AD534122787DEE005B1E87 /* Goat.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 56AD533C22787DED005B1E87 /* Goat.jpg */; };
......@@ -35,6 +39,8 @@
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
561112562344BCA1006900F1 /* Appendix10.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Appendix10.swift; sourceTree = "<group>"; };
561112582344C283006900F1 /* Appendix11.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Appendix11.swift; sourceTree = "<group>"; };
5635C7DF2283667A00B83024 /* Window.heic */ = {isa = PBXFileReference; lastKnownFileType = file; path = Window.heic; sourceTree = "<group>"; };
565508C422AA7D1D00884E5C /* Appendix2.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Appendix2.swift; sourceTree = "<group>"; };
565C4CFC22798EBA00275692 /* Task5.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Task5.swift; sourceTree = "<group>"; };
......@@ -42,6 +48,8 @@
566774752278836C003F881E /* Task1.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Task1.swift; sourceTree = "<group>"; };
5667747822788388003F881E /* Task2.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Task2.swift; sourceTree = "<group>"; };
5667747A22798BCA003F881E /* Task3.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Task3.swift; sourceTree = "<group>"; };
56A52E802345E8BC0061E5A3 /* Appendix12.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Appendix12.swift; sourceTree = "<group>"; };
56A52E822346046D0061E5A3 /* Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Extensions.swift; sourceTree = "<group>"; };
56AD53382278513B005B1E87 /* BrowserViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BrowserViewController.swift; sourceTree = "<group>"; };
56AD533A2278668B005B1E87 /* EditorViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditorViewController.swift; sourceTree = "<group>"; };
56AD533C22787DED005B1E87 /* Goat.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = Goat.jpg; sourceTree = "<group>"; };
......@@ -87,6 +95,9 @@
E3D058AB22B9490300118F2F /* Appendix7.swift */,
E3355E43232102D00093589E /* Appendix8.swift */,
E3355E45232133790093589E /* Appendix9.swift */,
561112562344BCA1006900F1 /* Appendix10.swift */,
561112582344C283006900F1 /* Appendix11.swift */,
56A52E802345E8BC0061E5A3 /* Appendix12.swift */,
);
path = Appendices;
sourceTree = "<group>";
......@@ -137,6 +148,7 @@
E32ABCCD2273092300D74B7C /* MainViewController.swift */,
56AD53382278513B005B1E87 /* BrowserViewController.swift */,
56AD533A2278668B005B1E87 /* EditorViewController.swift */,
56A52E822346046D0061E5A3 /* Extensions.swift */,
5667747722788370003F881E /* Tasks */,
565508C622AA882D00884E5C /* Appendices */,
E32ABCDD22730AE400D74B7C /* Kernels.metal */,
......@@ -225,7 +237,9 @@
565C4CFD22798EBA00275692 /* Task5.swift in Sources */,
566774762278836C003F881E /* Task1.swift in Sources */,
E3D058A622B9430800118F2F /* Appendix3.swift in Sources */,
56A52E812345E8BC0061E5A3 /* Appendix12.swift in Sources */,
E3355E44232102D00093589E /* Appendix8.swift in Sources */,
561112572344BCA1006900F1 /* Appendix10.swift in Sources */,
5667747922788388003F881E /* Task2.swift in Sources */,
56AD533B2278668B005B1E87 /* EditorViewController.swift in Sources */,
E32ABCDE22730AE400D74B7C /* Kernels.metal in Sources */,
......@@ -237,6 +251,8 @@
565508C522AA7D1D00884E5C /* Appendix2.swift in Sources */,
E3D058A822B9431C00118F2F /* Appendix4.swift in Sources */,
56AD53392278513B005B1E87 /* BrowserViewController.swift in Sources */,
561112592344C283006900F1 /* Appendix11.swift in Sources */,
56A52E832346046D0061E5A3 /* Extensions.swift in Sources */,
E3D6AE6D22B0F94A00ACADC5 /* Appendix1.swift in Sources */,
E32ABCCE2273092300D74B7C /* MainViewController.swift in Sources */,
E3355E46232133790093589E /* Appendix9.swift in Sources */,
......@@ -381,6 +397,7 @@
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = 3VZY5SN6SJ;
INFOPLIST_FILE = "Photo Editor/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
......@@ -401,6 +418,7 @@
CODE_SIGN_STYLE = Automatic;
DEVELOPMENT_TEAM = 3VZY5SN6SJ;
INFOPLIST_FILE = "Photo Editor/Info.plist";
IPHONEOS_DEPLOYMENT_TARGET = 13.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
......
......@@ -6,8 +6,8 @@ class AppDelegate: UIResponder, UIApplicationDelegate {
func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
UINavigationBar.appearance().barStyle = .blackOpaque
UIToolbar.appearance().barStyle = .blackOpaque
UINavigationBar.appearance().barStyle = .black
UIToolbar.appearance().barStyle = .black
let window = UIWindow(frame: UIScreen.main.bounds)
window.rootViewController = MainViewController()
......
import CoreImage
import CoreText
import UIKit
let appendix10: Effect = { originalImage, depthMap in
let string = "Hello world!"
let extent = originalImage.extent
let color = UIColor(displayP3Red: 1, green: 0, blue: 0.1, alpha: 1).cgColor
let position = CGPoint(x: 0, y: 0)
let fontSize = CGFloat(0.1) * extent.width
guard let context = CGContext(data: nil,
width: Int(extent.width),
height: Int(extent.height),
bitsPerComponent: 8,
bytesPerRow: 0,
space: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
else {
fatalError("Couldn't create CGContext")
}
context.textMatrix = .identity
let path = CGMutablePath()
let bounds = CGRect(origin: .zero, size: extent.size)
path.addRect(bounds)
let paragraphStyle = NSParagraphStyle.default.mutableCopy() as! NSMutableParagraphStyle
paragraphStyle.alignment = .center
let attrString = NSAttributedString(string: string,
attributes: [
.foregroundColor: color,
.font: CTFontCreateWithName("GillSans-Bold" as CFString, fontSize, nil),
.paragraphStyle: paragraphStyle
])
let framesetter = CTFramesetterCreateWithAttributedString(attrString)
let frame = CTFramesetterCreateFrame(framesetter, CFRange(location: 0, length: 0), path, nil)
context.textPosition = position
CTFrameDraw(frame, context)
guard let cgImage = context.makeImage() else {
fatalError("Couldn't create CGImage from CGContext")
}
return CIImage(cgImage: cgImage)
}
import CoreImage
import GameplayKit
let appendix11: Effect = { originalImage, depthMap in
return try? TextKernel.apply(
withExtent: originalImage.extent,
inputs: [originalImage],
arguments: [
"text": "Hello world!",
"extent": originalImage.extent
]
)
}
private class TextKernel: CIImageProcessorKernel {
override class func process(with inputs: [CIImageProcessorInput]?, arguments: [String : Any]?, output: CIImageProcessorOutput) throws {
guard let input = inputs?.first else {
fatalError("No input")
}
let outputRegion = output.region
let outputPixelCount = Int(outputRegion.width * outputRegion.height)
let outputValueCount = outputPixelCount * 4
let outAddress = output.baseAddress.bindMemory(to: Float32.self, capacity: outputValueCount)
// Copy input to output before drawing text
memcpy(output.baseAddress, input.baseAddress, outputValueCount * 4)
let tempOutAddress = UnsafeMutablePointer<UInt8>.allocate(capacity: outputValueCount)
// Similar to Appendix 10 ->
let string = arguments?["text"] as! String
let extent = arguments?["extent"] as! CGRect
let color = UIColor(displayP3Red: 1, green: 0, blue: 0.1, alpha: 1).cgColor
let fontSize = CGFloat(0.1) * extent.width
guard let context = CGContext(data: tempOutAddress,
width: Int(outputRegion.width),
height: Int(outputRegion.height),
bitsPerComponent: 8,
bytesPerRow: output.bytesPerRow / 4,
space: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue)
else {
fatalError("Couldn't create CGContext")
}
let path = CGMutablePath()
let bounds = CGRect(origin: .init(x: extent.minX - input.region.minX,
y: extent.minY - input.region.minY),
size: extent.size)
path.addRect(bounds)
let paragraphStyle = NSParagraphStyle.default.mutableCopy() as! NSMutableParagraphStyle
paragraphStyle.alignment = .center
let attrString = NSAttributedString(string: string,
attributes: [
.foregroundColor: color,
.font: CTFontCreateWithName("GillSans-Bold" as CFString, fontSize, nil),
.paragraphStyle: paragraphStyle
])
let framesetter = CTFramesetterCreateWithAttributedString(attrString)
let frame = CTFramesetterCreateFrame(framesetter, CFRange(location: 0, length: 0), path, nil)
CTFrameDraw(frame, context)
// <- Similar to Appendix 10
for i in 0..<outputPixelCount {
let temp = tempOutAddress.advanced(by: i * 4)
var out = outAddress.advanced(by: i * 4)
// Only draw for non-black pixels
if temp[0] > 0 {
out.r = Float32(temp[0]) / 255
out.g = Float32(temp[1]) / 255
out.b = Float32(temp[2]) / 255
}
}
}
override public class func formatForInput(at input: Int32) -> CIFormat {
.RGBAf
}
override public class var outputFormat: CIFormat {
.RGBAf
}
override public class func roi(forInput input: Int32, arguments: [String : Any]?, outputRect: CGRect) -> CGRect {
outputRect
}
}
import CoreImage
import CoreText
import Accelerate.vImage
let appendix12: Effect = { originalImage, depthMap in
// Use histogram from the sample photo Window.heic and apply it to originalImage
let histagram = CIImage(contentsOf: Bundle.main.url(forResource: "Window", withExtension: "heic")!)!
/// Straight from https://developer.apple.com/documentation/accelerate/vimage/specifying_histograms_with_vimage
let context = CIContext()
guard
let sourceCGImage = context.createCGImage(originalImage, from: originalImage.extent),
let histogramSourceCGImage = context.createCGImage(histagram, from: originalImage.extent)
else {
fatalError()
}
let format = vImage_CGImageFormat(bitsPerComponent: 8,
bitsPerPixel: 32,
colorSpace: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.last.rawValue),
renderingIntent: .defaultIntent)!
guard var sourceBuffer = try? vImage_Buffer(cgImage: sourceCGImage, format: format) else {
return nil
}
defer { sourceBuffer.free() }
guard var histogramSourceBuffer = try? vImage_Buffer(cgImage: histogramSourceCGImage, format: format) else {
return nil
}
defer { histogramSourceBuffer.free() }
let histogramBins = (0...3).map { _ in
[vImagePixelCount](repeating: 0, count: 256)
}
var error = kvImageNoError
var mutableHistogram: [UnsafeMutablePointer<vImagePixelCount>?] = histogramBins.map {
UnsafeMutablePointer<vImagePixelCount>(mutating: $0)
}
error = vImageHistogramCalculation_ARGB8888(&histogramSourceBuffer,
&mutableHistogram,
vImage_Flags(kvImageNoFlags))
guard error == kvImageNoError else {
print("Error calculating histogram")
return nil
}
var immutableHistogram: [UnsafePointer<vImagePixelCount>?] = histogramBins.map {
return UnsafePointer<vImagePixelCount>($0)
}
error = vImageHistogramSpecification_ARGB8888(&sourceBuffer,
&sourceBuffer,
&immutableHistogram,
vImage_Flags(kvImageLeaveAlphaUnchanged))
guard error == kvImageNoError else {
print("Error specifying histogram")
return nil
}
print("Done")
guard let output = try? sourceBuffer.createCGImage(format: format) else {
fatalError("Couldn't produce output")
}
return CIImage(cgImage: output)
}
......@@ -19,14 +19,14 @@ private class LineAvereageKernel: CIImageProcessorKernel {
let pixelCount = Int(inputRegion.width * inputRegion.height)
let valueCount = pixelCount * 4 // 4 values (red, green, blue, alpha) per pixel
// Bind input memory pointer to Float32 to make reading easier
let inAddress: UnsafePointer<Float32> = input.baseAddress.bindMemory(to: Float32.self, capacity: valueCount)
let inAddress = input.baseAddress.bindMemory(to: Float32.self, capacity: valueCount)
// Output
let outputRegion = output.region
let outputPixelCount = Int(outputRegion.width * outputRegion.height)
let outputValueCount = outputPixelCount * 4 // 4 values (red, green, blue, alpha) per pixel
// Bind output memory pointer to Float32 to make writing easier
let outAddress: UnsafeMutablePointer<Float32> = output.baseAddress.bindMemory(to: Float32.self, capacity: outputValueCount)
let outAddress = output.baseAddress.bindMemory(to: Float32.self, capacity: outputValueCount)
// Loop through each row of pixels in the output region
// We use DispatchQueue.concurrentPerform() instead of a regular for loop
......
......@@ -13,9 +13,6 @@ let appendix9: Effect = { originalImage, depthMap in
)
}
fileprivate typealias Float32Pointer = UnsafePointer<Float32>
fileprivate typealias Float32MutablePointer = UnsafeMutablePointer<Float32>
public class StereogramKernel: CIImageProcessorKernel {
static let colors: [CIColor] = [
.black,
......@@ -37,14 +34,14 @@ public class StereogramKernel: CIImageProcessorKernel {
let pixelCount = Int(inputRegion.width * inputRegion.height)
let valueCount = pixelCount * 4
let inAddress: Float32Pointer = input.baseAddress.bindMemory(to: Float32.self, capacity: valueCount)
let tempOutAddress: Float32MutablePointer = Float32MutablePointer.allocate(capacity: valueCount)
let inAddress = input.baseAddress.bindMemory(to: Float32.self, capacity: valueCount)
let tempOutAddress = UnsafeMutablePointer<Float32>.allocate(capacity: valueCount)
let outputRegion = output.region
let outputPixelCount = Int(outputRegion.width * outputRegion.height)
let outputValueCount = outputPixelCount * 4
let outAddress: Float32MutablePointer = output.baseAddress.bindMemory(to: Float32.self, capacity: outputValueCount)
let outAddress = output.baseAddress.bindMemory(to: Float32.self, capacity: outputValueCount)
let eyeSeperation = (2.5 * dpi).rounded()
......@@ -164,35 +161,3 @@ public class StereogramKernel: CIImageProcessorKernel {
return relativeY * Int(rect.width) + relativeX
}
}
fileprivate extension Float32Pointer {
var r: Float32 { return self[0] }
var g: Float32 { return self[1] }
var b: Float32 { return self[2] }
var a: Float32 { return self[3] }
}
fileprivate extension Float32MutablePointer {
var r: Float32 {
get { self[0] }
set { self[0] = newValue }
}
var g: Float32 {
get { self[1] }
set { self[1] = newValue }
}
var b: Float32 {
get { self[2] }
set { self[2] = newValue }
}
var a: Float32 {
get { self[3] }
set { self[3] = newValue }
}
}
......@@ -230,7 +230,7 @@ extension PHImageManager {
print(value)
}
let id = requestImageData(for: asset, options: options) { data, _, _, info in
let id = requestImageDataAndOrientation(for: asset, options: options) { data, _, _, info in
if
let isCancelled = info?[PHImageCancelledKey] as? Bool,
isCancelled {
......
......@@ -23,7 +23,10 @@ class EditorViewController: UIViewController {
appendix6,
appendix7,
appendix8,
appendix9
appendix9,
appendix10,
appendix11,
appendix12,
]
private var selectedEffect: Int = UserDefaults.standard.integer(forKey: "selectedEffect") {
didSet {
......
import Foundation
extension UnsafePointer where Pointee == Float32 {
var r: Float32 { self[0] }
var g: Float32 { self[1] }
var b: Float32 { self[2] }
var a: Float32 { self[3] }
}
extension UnsafeMutablePointer where Pointee == Float32 {
var r: Float32 {
get { self[0] }
set { self[0] = newValue }
}
var g: Float32 {
get { self[1] }
set { self[1] = newValue }
}
var b: Float32 {
get { self[2] }
set { self[2] = newValue }
}
var a: Float32 {
get { self[3] }
set { self[3] = newValue }
}
}
......@@ -264,6 +264,23 @@ In other words, for each row:
This is a more advanced example of what could be done using a `CIImageProcessorKernel`. It implements an old school stereogram effect, or more specifically a random-dot stereogram. It’s a seemingly noisy picture that, when looking at it “correctly” with both eyes produces a 3D effect. It is produced using depth information. Therefor we can use our depth maps for the calculations.
## Appendix 10-11 – Core Graphics
The built-in Core Image filter for text generation, `CIAttributedTextImageGenerator`, can only do so much. A1 and A2 both uses Core Graphics to draw text in a context.
### Appendix 10
This appendix draws text to a `CGContext` and produces a `CGImage` which in turn is converted to a `CIImage`. This doesn't integrate with the Core Image processing graph like using `CIImageProcessorKernel` in A9 did, but for producing a starting point for a graph is just fine.
### Appendix 11
This appendix uses the `CIImageProcessorKernel` to integrate Core Graphics fully in to Core Image processing graph.
First it copies the input source color image from the output to become the background of the final result. Then the text is drawn to a buffer using Core Graphics. Finally the content of the buffer is manually drawn on top of the output if the buffers value at a given position has non-zero values.
## Appendix 12 - vImage
When working with Accelerate’s vImage, most of the work it to convert your `CIImage`’s to and from `CGImage`’s. This appendix implements the [Apple provided example](https://developer.apple.com/documentation/accelerate/vimage/specifying_histograms_with_vimage) on how to use vImage for doing a histogram transfer.
# References
- [Core Image](https://developer.apple.com/documentation/coreimage) by Apple
......