在 Apple 的 ARKit with metal 示例代码中。它使用 ARFrame pixelbuffer 用 metal
绘制相机视图 func updateCapturedImageTextures(frame: ARFrame) {
// Create two textures (Y and CbCr) from the provided frame's captured image
let pixelBuffer = frame.capturedImage
if (CVPixelBufferGetPlaneCount(pixelBuffer) < 2) {
return
}
capturedImageTextureY = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.r8Unorm, planeIndex:0)
capturedImageTextureCbCr = createTexture(fromPixelBuffer: pixelBuffer, pixelFormat:.rg8Unorm, planeIndex:1)
}
func createTexture(fromPixelBuffer pixelBuffer: CVPixelBuffer, pixelFormat: MTLPixelFormat, planeIndex: Int) -> CVMetalTexture? {
let width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex)
let height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex)
var texture: CVMetalTexture? = nil
let status = CVMetalTextureCacheCreateTextureFromImage(nil, capturedImageTextureCache, pixelBuffer, nil, pixelFormat, width, height, planeIndex, &texture)
if status != kCVReturnSuccess {
texture = nil
}
return texture
}
func drawCapturedImage(renderEncoder: MTLRenderCommandEncoder) {
guard let textureY = capturedImageTextureY, let textureCbCr = capturedImageTextureCbCr else {
return
}
// Push a debug group allowing us to identify render commands in the GPU Frame Capture tool
renderEncoder.pushDebugGroup("DrawCapturedImage")
// Set render command encoder state
renderEncoder.setCullMode(.none)
renderEncoder.setRenderPipelineState(capturedImagePipelineState)
renderEncoder.setDepthStencilState(capturedImageDepthState)
// Set mesh's vertex buffers
renderEncoder.setVertexBuffer(imagePlaneVertexBuffer, offset: 0, index: Int(kBufferIndexMeshPositions.rawValue))
// Set any textures read/sampled from our render pipeline
renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(textureY), index: Int(kTextureIndexY.rawValue))
renderEncoder.setFragmentTexture(CVMetalTextureGetTexture(textureCbCr), index: Int(kTextureIndexCbCr.rawValue))
// Draw each submesh of our mesh
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
renderEncoder.popDebugGroup()
}
但在另一端,我用
VNGeneratePersonSegmentationRequest
修改 ARFrame 像素缓冲区,它最后给了我一个 CIImage,如果我将它转换为 pixelBuffer,它将在 kCVPixelFormatType_32BGRA
中,这与 ARFrame 像素不同kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
中的缓冲区。
长话短说,修改后的框架不能写在金属视图中,我认为是因为格式差异。
然后我尝试直接使用那个 CIImage,首先我使用这个 func 来转换它的颜色空间
import MetalPerformanceShaders
func convertColorSpace(ciImage: CIImage) -> CIImage? {
guard let device = MTLCreateSystemDefaultDevice() else {
return nil
}
let ciContext = CIContext(mtlDevice: device)
let colorSpace = CGColorSpaceCreateDeviceRGB()
let outputTextureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .bgra8Unorm, width: Int(ciImage.extent.width), height: Int(ciImage.extent.height), mipmapped: false)
outputTextureDescriptor.usage = [.shaderRead, .shaderWrite]
guard let outputTexture = device.makeTexture(descriptor: outputTextureDescriptor) else {
return nil
}
let commandQueue = device.makeCommandQueue()
let commandBuffer = commandQueue?.makeCommandBuffer()
let renderDestination = CIRenderDestination(width: Int(ciImage.extent.width), height: Int(ciImage.extent.height), pixelFormat: MTLPixelFormat.bgra8Unorm, commandBuffer: commandBuffer, mtlTextureProvider: { () -> MTLTexture in
return outputTexture
})
try? ciContext.startTask(toRender: ciImage, to: renderDestination)
commandBuffer?.commit()
commandBuffer?.waitUntilCompleted()
let ciOutput = CIImage(mtlTexture: outputTexture, options: [.colorSpace: colorSpace])
return ciOutput
}
然后尝试将其添加到金属工艺中进行绘制。它有效,但它有淡淡的颜色,看起来绿色是它使用的唯一颜色
func drawCapturedImage(renderEncoder: MTLRenderCommandEncoder) {
guard let ciImage = currentCIImage else { return }
renderEncoder.pushDebugGroup("DrawCapturedImage")
// Set render command encoder state
renderEncoder.setCullMode(.none)
renderEncoder.setRenderPipelineState(capturedImagePipelineState)
renderEncoder.setDepthStencilState(capturedImageDepthState)
// Set mesh's vertex buffers
renderEncoder.setVertexBuffer(imagePlaneVertexBuffer, offset: 0, index: Int(kBufferIndexMeshPositions.rawValue))
// Create a Metal texture from the CIImage
let textureDescriptor = MTLTextureDescriptor.texture2DDescriptor(pixelFormat: .bgra8Unorm, width: Int(ciImage.extent.width), height: Int(ciImage.extent.height), mipmapped: false)
textureDescriptor.usage = [.shaderRead, .shaderWrite] // Specify the texture usage
guard let metalTexture = device.makeTexture(descriptor: textureDescriptor) else {
// Handle error creating Metal texture
return
}
let commandBuffer = device.makeCommandQueue()?.makeCommandBuffer()
commandBuffer?.pushDebugGroup("RenderCIImage")
// Here is the change: we directly use a Metal-compatible color space.
let colorSpace = CGColorSpaceCreateDeviceRGB()
ciContext.render(ciImage, to: metalTexture, commandBuffer: commandBuffer, bounds: ciImage.extent, colorSpace: colorSpace)
commandBuffer?.popDebugGroup()
commandBuffer?.commit()
// Set the Metal texture as a render target
renderEncoder.setFragmentTexture(metalTexture, index: Int(kTextureIndexY.rawValue))
// Draw each submesh of our mesh
renderEncoder.drawPrimitives(type: .triangleStrip, vertexStart: 0, vertexCount: 4)
renderEncoder.popDebugGroup()
}
有人做过吗?有什么建议么?非常感谢