I need to display CVPixelBuffers in HDR10 (kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange / BT2020 PQ color space) using MTKView. I have the following code to display but it produces distorted images, and I can not be sure if it is displaying 10 bit colors or it is clamping colors. Also whether bgra10_xr or bgra10_xr_srgb should be the right choice of colorPixelFormat and the difference between the two.
class CIImageView: MTKView {
var image: CIImage? {
didSet {
self.draw()
}
}
var originalImageExtent: CGRect = CGRect.zero
var scale: CGFloat {
return max(self.frame.width / originalImageExtent.width, self.frame.height / originalImageExtent.height)
}
func update() {
guard let img = image, destRect.size.width <= img.extent.size.width && destRect.size.height <= img.extent.size.height else {
return
}
self.draw()
}
let context: CIContext
let commandQueue: MTLCommandQueue
convenience init(frame: CGRect) {
let device = MetalCamera.metalDevice
self.init(frame: frame, device: device)
colorPixelFormat = .bgra10_xr_srgb
}
override init(frame frameRect: CGRect, device: MTLDevice?) {
guard let device = device else {
fatalError("Can't use Metal")
}
guard let cmdQueue = device.makeCommandQueue(maxCommandBufferCount: 5) else {
fatalError("Can't make Command Queue")
}
commandQueue = cmdQueue
context = CIContext(mtlDevice: device, options: [CIContextOption.cacheIntermediates: false])
super.init(frame: frameRect, device: device)
self.framebufferOnly = false
self.enableSetNeedsDisplay = false
self.isPaused = true
self.clearColor = MTLClearColor(red: 0, green: 0, blue: 0, alpha: 0)
}
required init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override func draw(_ rect: CGRect) {
guard let image = self.image else {
return
}
let dRect = destRect
let drawImage: CIImage
if dRect == image.extent {
drawImage = image
} else {
let scale = max(dRect.height / image.extent.height, dRect.width / image.extent.width)
drawImage = image.transformed(by: CGAffineTransform(scaleX: scale, y: scale))
}
let commandBuffer = commandQueue.makeCommandBufferWithUnretainedReferences()
guard let texture = self.currentDrawable?.texture else {
return
}
let colorSpace = drawImage.colorSpace ?? CGColorSpaceCreateDeviceRGB()
context.render(drawImage, to: texture, commandBuffer: commandBuffer, bounds: dRect, colorSpace: colorSpace)
commandBuffer?.present(self.currentDrawable!)
commandBuffer?.commit()
}
private var destRect: CGRect {
let scale: CGFloat
if UIScreen.main.scale == 3 {
// BUG?
scale = 2.0 * (2.0 / UIScreen.main.scale) * 2
} else {
scale = UIScreen.main.scale
}
let destRect = self.bounds.applying(CGAffineTransform(scaleX: scale, y: scale))
return destRect
}
func displayPixelBuffer(_ pixelBuffer: CVPixelBuffer, flip:FlipDirection) {
self.image = CIImage(cvImageBuffer: pixelBuffer)
}
Edit: I tried the suggestion of setting colorSpace manually but the colors are washed out.
Here is the screenshot from the same scene on iPhone that uses AVCaptureVideoPreviewLayer.
Please see this answer from the developers of this framework on the apple forum:
To display true HDR content you should use
MTLPixelFormatRGBA16Float
pixel format.