问题
I want to apply camera filter while rendering, my code is...
func session(_ session: ARSession, didUpdate frame: ARFrame) {
let image = CIImage(cvPixelBuffer: frame.capturedImage)
var r: CGFloat = 0, g: CGFloat = 0, b: CGFloat = 0, a: CGFloat = 0
color.getRed(&r, green: &g, blue: &b, alpha: &a)
filter.setDefaults()
filter.setValue(image, forKey: kCIInputImageKey)
filter.setValue(CIVector(x: r, y: 0, z: 0, w: 0), forKey: "inputRVector")
filter.setValue(CIVector(x: 0, y: g, z: 0, w: 0), forKey: "inputGVector")
filter.setValue(CIVector(x: 0, y: 0, z: b, w: 0), forKey: "inputBVector")
filter.setValue(CIVector(x: 0, y: 0, z: 0, w: a), forKey: "inputAVector")
if let result = filter.outputImage,
let cgImage = context.createCGImage(result, from: result.extent) {
sceneView.scene.background.contents = cgImage
sceneView.scene.background.contentsTransform = SCNMatrix4MakeRotation(.pi / 2, 0, 0, 1)
}
}
at runtime, output is getting stretched. I have attached two images,
- Normal Camera rendering
- With filter camera rendering.
Please help me to resolve it, it would be great help if you provide any demo code or project. Thank you.
来源:https://stackoverflow.com/questions/58501761/live-camera-is-getting-stretched-while-rendering-using-cifilter-swift-4