...
 
Commits (2)
......@@ -18,11 +18,13 @@
5667747B22798BCA003F881E /* Task3.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5667747A22798BCA003F881E /* Task3.swift */; };
56A52E812345E8BC0061E5A3 /* Appendix12.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56A52E802345E8BC0061E5A3 /* Appendix12.swift */; };
56A52E832346046D0061E5A3 /* Extensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56A52E822346046D0061E5A3 /* Extensions.swift */; };
56A52E8523462C9E0061E5A3 /* Appendix14.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56A52E8423462C9E0061E5A3 /* Appendix14.swift */; };
56A52ECE234634110061E5A3 /* DeepLabV3.mlmodel in Sources */ = {isa = PBXBuildFile; fileRef = 56A52ECD234634110061E5A3 /* DeepLabV3.mlmodel */; };
56AD53392278513B005B1E87 /* BrowserViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56AD53382278513B005B1E87 /* BrowserViewController.swift */; };
56AD533B2278668B005B1E87 /* EditorViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 56AD533A2278668B005B1E87 /* EditorViewController.swift */; };
56AD534122787DEE005B1E87 /* Goat.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 56AD533C22787DED005B1E87 /* Goat.jpg */; };
56AD534222787DEE005B1E87 /* Ice.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 56AD533D22787DED005B1E87 /* Ice.jpg */; };
E3135F6623460C8000795492 /* AppendixMPS.swift in Sources */ = {isa = PBXBuildFile; fileRef = E3135F6523460C8000795492 /* AppendixMPS.swift */; };
E3135F6623460C8000795492 /* Appendix13.swift in Sources */ = {isa = PBXBuildFile; fileRef = E3135F6523460C8000795492 /* Appendix13.swift */; };
E32ABCCC2273092300D74B7C /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = E32ABCCB2273092300D74B7C /* AppDelegate.swift */; };
E32ABCCE2273092300D74B7C /* MainViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = E32ABCCD2273092300D74B7C /* MainViewController.swift */; };
E32ABCD32273092400D74B7C /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = E32ABCD22273092400D74B7C /* Assets.xcassets */; };
......@@ -51,11 +53,13 @@
5667747A22798BCA003F881E /* Task3.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Task3.swift; sourceTree = "<group>"; };
56A52E802345E8BC0061E5A3 /* Appendix12.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Appendix12.swift; sourceTree = "<group>"; };
56A52E822346046D0061E5A3 /* Extensions.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Extensions.swift; sourceTree = "<group>"; };
56A52E8423462C9E0061E5A3 /* Appendix14.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Appendix14.swift; sourceTree = "<group>"; };
56A52ECD234634110061E5A3 /* DeepLabV3.mlmodel */ = {isa = PBXFileReference; lastKnownFileType = file.mlmodel; path = DeepLabV3.mlmodel; sourceTree = "<group>"; };
56AD53382278513B005B1E87 /* BrowserViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BrowserViewController.swift; sourceTree = "<group>"; };
56AD533A2278668B005B1E87 /* EditorViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = EditorViewController.swift; sourceTree = "<group>"; };
56AD533C22787DED005B1E87 /* Goat.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = Goat.jpg; sourceTree = "<group>"; };
56AD533D22787DED005B1E87 /* Ice.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = Ice.jpg; sourceTree = "<group>"; };
E3135F6523460C8000795492 /* AppendixMPS.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppendixMPS.swift; sourceTree = "<group>"; };
E3135F6523460C8000795492 /* Appendix13.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Appendix13.swift; sourceTree = "<group>"; };
E32ABCC82273092300D74B7C /* Photo Editor.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Photo Editor.app"; sourceTree = BUILT_PRODUCTS_DIR; };
E32ABCCB2273092300D74B7C /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
E32ABCCD2273092300D74B7C /* MainViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MainViewController.swift; sourceTree = "<group>"; };
......@@ -100,7 +104,8 @@
561112562344BCA1006900F1 /* Appendix10.swift */,
561112582344C283006900F1 /* Appendix11.swift */,
56A52E802345E8BC0061E5A3 /* Appendix12.swift */,
E3135F6523460C8000795492 /* AppendixMPS.swift */,
E3135F6523460C8000795492 /* Appendix13.swift */,
56A52E8423462C9E0061E5A3 /* Appendix14.swift */,
);
path = Appendices;
sourceTree = "<group>";
......@@ -117,6 +122,14 @@
path = Tasks;
sourceTree = "<group>";
};
56A52E8623462E7B0061E5A3 /* CoreML */ = {
isa = PBXGroup;
children = (
56A52ECD234634110061E5A3 /* DeepLabV3.mlmodel */,
);
path = CoreML;
sourceTree = "<group>";
};
56AD534622787DF1005B1E87 /* Sample Photos */ = {
isa = PBXGroup;
children = (
......@@ -156,6 +169,7 @@
565508C622AA882D00884E5C /* Appendices */,
E32ABCDD22730AE400D74B7C /* Kernels.metal */,
56AD534622787DF1005B1E87 /* Sample Photos */,
56A52E8623462E7B0061E5A3 /* CoreML */,
E32ABCD22273092400D74B7C /* Assets.xcassets */,
E32ABCD42273092400D74B7C /* LaunchScreen.storyboard */,
E32ABCD72273092400D74B7C /* Info.plist */,
......@@ -256,9 +270,11 @@
56AD53392278513B005B1E87 /* BrowserViewController.swift in Sources */,
561112592344C283006900F1 /* Appendix11.swift in Sources */,
56A52E832346046D0061E5A3 /* Extensions.swift in Sources */,
56A52ECE234634110061E5A3 /* DeepLabV3.mlmodel in Sources */,
E3D6AE6D22B0F94A00ACADC5 /* Appendix1.swift in Sources */,
56A52E8523462C9E0061E5A3 /* Appendix14.swift in Sources */,
E32ABCCE2273092300D74B7C /* MainViewController.swift in Sources */,
E3135F6623460C8000795492 /* AppendixMPS.swift in Sources */,
E3135F6623460C8000795492 /* Appendix13.swift in Sources */,
E3355E46232133790093589E /* Appendix9.swift in Sources */,
E32ABCCC2273092300D74B7C /* AppDelegate.swift in Sources */,
);
......
import CoreImage
import MetalPerformanceShaders
let appendixMPS: Effect = { originalImage, depthMap in
let appendix13: Effect = { originalImage, depthMap in
try? MetalPerformanceShaderKernel.apply(
withExtent: originalImage.extent,
inputs: [originalImage],
......@@ -19,11 +19,18 @@ private class MetalPerformanceShaderKernel: CIImageProcessorKernel {
let input = inputs?.first,
let sourceTexture = input.metalTexture,
let destinationTexture = output.metalTexture
else {
return
else {
return
}
let blur = MPSImageGaussianBlur(device: device, sigma: 50)
blur.encode(commandBuffer: commandBuffer, sourceTexture: sourceTexture, destinationTexture: destinationTexture)
let v = Float(1)
let a = Float(-1)
let convolution = MPSImageConvolution(device: device, kernelWidth: 25, kernelHeight: 3, weights: [
a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,a,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,v,
])
convolution.encode(commandBuffer: commandBuffer, sourceTexture: sourceTexture, destinationTexture: destinationTexture)
}
}
import CoreImage
import MetalPerformanceShaders
let appendix14: Effect = { originalImage, depthMap in
let originalImage = originalImage
.transformed(by: .init(scaleX: 0.2, y: 0.2))
.cropped(to: CGRect(x: 0, y: 0, width: 513, height: 513))
// let originalImage = originalImage.cropped(to: CGRect(x: 0, y: 0, width: 304, height: 228))
return try? CoreMLKernel.apply(
withExtent: originalImage.extent,
inputs: [originalImage],
arguments: [:]
)
}
private class CoreMLKernel: CIImageProcessorKernel {
override public class func process(with inputs: [CIImageProcessorInput]?, arguments: [String : Any]?, output: CIImageProcessorOutput) throws {
guard
let input = inputs?.first,
let pixelBuffer = input.pixelBuffer
else {
fatalError("No input")
}
let model = DeepLabV3()
let prediction = try model.prediction(image: pixelBuffer)
let semanticPredictions = prediction.semanticPredictions
let outputPixelCount = Int(output.region.width * output.region.height)
let out = output.baseAddress.bindMemory(to: Int8.self, capacity: outputPixelCount * 4)
for i in 0..<semanticPredictions.count {
let v: Int8 = semanticPredictions[i].int32Value > 0 ? 127 : 0
let o = out.advanced(by: i)
o[0] = v // Blue
o[1] = v // Green
o[2] = v // Red
o[3] = 127 // Alpha
}
}
override class func formatForInput(at input: Int32) -> CIFormat {
.BGRA8
}
override class var outputFormat: CIFormat {
.BGRA8
}
}
......@@ -27,7 +27,8 @@ class EditorViewController: UIViewController {
appendix10,
appendix11,
appendix12,
appendixMPS
appendix13,
appendix14,
]
private var selectedEffect: Int = UserDefaults.standard.integer(forKey: "selectedEffect") {
didSet {
......
......@@ -277,10 +277,19 @@ This appendix draws text to a `CGContext` and produces a `CGImage` which in turn
This appendix uses the `CIImageProcessorKernel` to integrate Core Graphics fully in to Core Image processing graph.
First it copies the input source color image from the output to become the background of the final result. Then the text is drawn to a buffer using Core Graphics. Finally the content of the buffer is manually drawn on top of the output if the buffers value at a given position has non-zero values.
## Appendix 12 - vImage
## Appendix 12 vImage
When working with Accelerate’s vImage, most of the work it to convert your `CIImage`’s to and from `CGImage`’s. This appendix implements the [Apple provided example](https://developer.apple.com/documentation/accelerate/vimage/specifying_histograms_with_vimage) on how to use vImage for doing a histogram transfer.
## Appendix 13 – Metal Performance Shaders
`CIImageProcessorKernel`’s provide convenient access directly to Metal textures. Among other things, this means we can use them with [Metal Performance Shaders](https://developer.apple.com/documentation/metalperformanceshaders). In this appendix that is used to perform a large non-square convolution using `MPSImageConvolution`
## Appendix 14 – CoreML
`CIImageProcessorKernel`’s also provide convenient access to a `CVPixelBuffer` which in turn are compatible with CoreML. This example tries to produce a segmentation map that could potentially be used as a fallback when a photo doesn’t have a depth map. The model used is taken from the [Apple provided CoreML models](https://developer.apple.com/machine-learning/models/#image).
Note: The example isn't fully working in its current state, specifically the parsing of the prediction from the model.
# References
- [Core Image](https://developer.apple.com/documentation/coreimage) by Apple
......