I am working on a video filter for iOS and macOS, which captures video input from the default camera, applies a filter (MPSImageGaussianBlur), and renders it using MTKView.
It works fine on iOS (13 on iPhone 6s and iPhone 11), but I see just a red screen on MacOS (10.15 on MacBook Pro) and can't figure out why. The code calls captureOutput() and draw() repeatedly as expected.
This is VS2CameraSession, which performs the majority of work. (Please notice that I added kCVPixelBufferMetalCompatibilityKey flag to the videoSettings as recommended at CVMetalTextureCacheCreateTextureFromImage returns -6660 on macOS 10.13)
import AVFoundation
import MetalPerformanceShaders
class VS2CameraSession: NSObject {
let gpu = MTLCreateSystemDefaultDevice()!
private let session = AVCaptureSession()
private let camera = AVCaptureDevice.default(for: .video)
private var textureCache:CVMetalTextureCache?
private var texture:MTLTexture?
func startRunning() {
CVMetalTextureCacheCreate(nil, nil, gpu, nil, &textureCache)
guard let camera = camera,
let input = try? AVCaptureDeviceInput(device: camera) else {
return
}
guard session.canAddInput(input) else {
return
}
session.addInput(input)
let output = AVCaptureVideoDataOutput()
output.alwaysDiscardsLateVideoFrames = true
#if os(macOS)
// https://stackoverflow.com/questions/46549906/cvmetaltexturecachecreatetexturefromimage-returns-6660-on-macos-10-13
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA,
kCVPixelBufferMetalCompatibilityKey as String: true
]
#else
output.videoSettings = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
#endif
output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
session.addOutput(output)
session.startRunning()
}
func draw(drawable:CAMetalDrawable?) {
guard let texture = self.texture,
let drawable = drawable,
let commandQueue = gpu.makeCommandQueue(),
let commandBuffer = commandQueue.makeCommandBuffer() else {
return
}
// Apply filter(s)
let filter = MPSImageGaussianBlur(device:gpu, sigma: 10.0)
filter.encode(commandBuffer: commandBuffer, sourceTexture: texture, destinationTexture: drawable.texture)
commandBuffer.present(drawable)
commandBuffer.commit()
self.texture = nil // no need to draw it again
}
}
extension VS2CameraSession : AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer),
let textureCache = self.textureCache {
let width = CVPixelBufferGetWidth(pixelBuffer)
let height = CVPixelBufferGetHeight(pixelBuffer)
var textureRef:CVMetalTexture?
CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, pixelBuffer, nil,
.bgra8Unorm, width, height, 0, &textureRef)
texture = CVMetalTextureGetTexture(textureRef!)
}
}
}
This is VS2CameraViewController, which uses VS2CameraSession to render its view.
import UIKit
import SwiftUI
import MetalKit
final class VS2CameraViewController: UIViewController {
let cameraSession = VS2CameraSession()
override func loadView() {
let metalView = MTKView()
metalView.device = self.cameraSession.gpu
metalView.delegate = self
metalView.clearColor = MTLClearColorMake(1, 1, 1, 1)
metalView.colorPixelFormat = MTLPixelFormat.bgra8Unorm
metalView.framebufferOnly = false
self.view = metalView
}
override func viewDidLoad() {
cameraSession.startRunning()
}
}
extension VS2CameraViewController : MTKViewDelegate {
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
}
func draw(in view: MTKView) {
cameraSession.draw(drawable: view.currentDrawable)
}
}
extension VS2CameraViewController : UIViewControllerRepresentable {
typealias UIViewControllerType = VS2CameraViewController
public func makeUIViewController(context: UIViewControllerRepresentableContext<VS2CameraViewController>) -> VS2CameraViewController {
return VS2CameraViewController()
}
public func updateUIViewController(_ uiViewController: VS2CameraViewController, context: UIViewControllerRepresentableContext<VS2CameraViewController>) {
}
}
The entire source code is available at https://github.com/snakajima/VideoShader2/tree/stack_overflow.
I found an answer here. Very slow framerate with AVFoundation and Metal in MacOS
I just need to retain the reference to the sampleBuffer along with the texture.