[AVCaptureVideoDataOutputSampleBufferDelegate使用CIFilter进行视频过滤的丢帧

问题描述 投票:0回答:1

我有一个非常奇怪的情况,如果我使用13个不同的滤镜链,AVCaptureVideoDataOutputSampleBufferDelegate会丢帧。让我解释一下:

我有CameraController设置,没什么特别的,这是我的委托方法:

func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        if !paused {

            if connection.output?.connection(with: .audio) == nil {
                //capture video

                // my try to avoid "Out of buffers error", no luck ;(
                lastCapturedBuffer = nil
                let err = CMSampleBufferCreateCopy(allocator: kCFAllocatorDefault, sampleBuffer: sampleBuffer, sampleBufferOut: &lastCapturedBuffer)
                if err == noErr {

                }

                connection.videoOrientation = .portrait

                // getting image
                let pixelBuffer = CMSampleBufferGetImageBuffer(lastCapturedBuffer!)
                // remove if any
                CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))

                // captured - is just ciimage property
                captured = CIImage(cvPixelBuffer: pixelBuffer!)
                //remove if any
                CVPixelBufferUnlockBaseAddress(pixelBuffer!,CVPixelBufferLockFlags(rawValue: 0))
                //CVPixelBufferUnlockBaseAddress(pixelBuffer!, .readOnly)

                // transform image to targer resolution
                let srcWidth = CGFloat(captured.extent.width)
                let srcHeight = CGFloat(captured.extent.height)

                let dstWidth: CGFloat = ConstantsManager.shared.k_video_width
                let dstHeight: CGFloat = ConstantsManager.shared.k_video_height

                let scaleX = dstWidth / srcWidth
                let scaleY = dstHeight / srcHeight

                var transform = CGAffineTransform.init(scaleX: scaleX, y: scaleY)
                captured = captured.transformed(by: transform).cropped(to: CGRect(x: 0, y: 0, width: dstWidth, height: dstHeight))
                // mirror for front camera
                if front {
                    var t = CGAffineTransform.init(scaleX: -1, y: 1)
                    t = t.translatedBy(x: -ConstantsManager.shared.k_video_width, y: 0)
                    captured = captured.transformed(by: t)
                }

                // video capture logic
                let writable = canWrite()

                if writable,
                    sessionAtSourceTime == nil {
                    sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(lastCapturedBuffer!)
                    videoWriter.startSession(atSourceTime: sessionAtSourceTime!)
                }

                if writable, (videoWriterInput.isReadyForMoreMediaData) {
                    videoWriterInput.append(lastCapturedBuffer!)
                }

                // apply effect in realtime <- here is problem. If I comment next line, it will be fixed but effect will n't be applied
                captured = FilterManager.shared.applyFilterForCamera(inputImage: captured)

                // current frame in case user wants to save image as photo
                self.capturedPhoto = captured

                // sent frame to Camcoder view controller
                self.delegate?.didCapturedFrame(frame: captured)
            } else {
                // capture sound
                let writable = canWrite()
                if writable, (audioWriterInput.isReadyForMoreMediaData) {
                    //print("write audio buffer")
                    audioWriterInput?.append(lastCapturedBuffer!)
                }
            }
        } else {
            // paused
        }
    }

我还实现了didDrop委托方法,这是我弄清楚为什么会掉帧的原因:

func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        print("did drop")
        var mode: CMAttachmentMode = 0
        let reason = CMGetAttachment(sampleBuffer, key: kCMSampleBufferAttachmentKey_DroppedFrameReason, attachmentModeOut: &mode)
        print("reason \(String(describing: reason))") // Optional(OutOfBuffers)
    }

所以我像专家一样做到了,只是注释了部分代码以查找问题所在。因此,它在这里:

captured = FilterManager.shared.applyFilterForCamera(inputImage: captured)

FilterManager-是单例,在这里称为func:

func applyFilterForCamera(inputImage: CIImage) -> CIImage {
        return currentVsFilter!.apply(sourceImage: inputImage)
    }

currentVsFilter是VSFilter类型的对象-这是一个示例:

import Foundation
import AVKit

class TestFilter: CustomFilter {

    let _name = "Тестовый Фильтр"
    let _displayName = "Test Filter"

    var tempImage: CIImage?
    var final: CGImage?

    override func name() -> String {
        return _name
    }

    override func displayName() -> String {
        return _displayName
    }

    override init() {
        super.init()
        print("Test Filter init")

        // setup my custom kernel filter
        self.noise.type = GlitchFilter.GlitchType.allCases[2]
    }

    // this returns composition for playback using AVPlayer
    override func composition(asset: AVAsset) -> AVMutableVideoComposition {
        let composition = AVMutableVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
            let inputImage = request.sourceImage.cropped(to: request.sourceImage.extent)
            DispatchQueue.global(qos: .userInitiated).async {
                let output = self.apply(sourceImage: inputImage, forComposition: true)
                request.finish(with: output, context: nil)
            }
        })
        let size = FilterManager.shared.cropRectForOrientation().size

        composition.renderSize = size
        return composition
    }

    // this returns actual filtered CIImage, used for both AVPlayer composition and realtime camera
    override func apply(sourceImage: CIImage, forComposition: Bool = false) -> CIImage {

        // rendered text
        tempImage = FilterManager.shared.textRenderedImage()

        // some filters chained one by one
        self.screenBlend?.setValue(tempImage, forKey: kCIInputImageKey)
        self.screenBlend?.setValue(sourceImage, forKey: kCIInputBackgroundImageKey)

        self.noise.inputImage = self.screenBlend?.outputImage
        self.noise.inputAmount = CGFloat.random(in: 1.0...3.0)

        // result
        tempImage = self.noise.outputImage

        // correct crop
        let rect = forComposition ? FilterManager.shared.cropRectForOrientation() : FilterManager.shared.cropRect
        final = self.context.createCGImage(tempImage!, from: rect!)

        return CIImage(cgImage: final!)
    }

}

现在,最奇怪的是,我有30个VSFilter,当我达到13个(通过UIButton逐个切换)时,出现错误“缓冲区不足”,这是一个错误:

kCMSampleBufferDroppedFrameReason_OutOfBuffers

我测试过的内容:

  • 我更改了FilterManager单例内部的过滤器数组中的vsFilters顺序-相同
  • 我尝试先从一个切换到12,然后再返回-可以,但是在我切换到13tn(从0到30的切换)后-错误

看起来它只能处理12个VSFIlter对象,例如它是否以某种方式保留了它们,或者可能与线程有关,我不知道。

此应用程序是为iOs设备制作的,已在iPhone X iOs 13.3.1上进行了测试这是一款视频编辑器应用,可将不同的效果应用于相机的实时流和相机胶卷中的视频文件

也许有人对此有经验?

祝你有美好的一天

Best,Victor

编辑1.如果我重新初始化cameraController(AVCaptureSession。输入/输出设备),它可以工作,但这是难看的选择,并且在切换滤镜时会增加延迟]

我有一个非常奇怪的情况,如果我使用13个不同的滤镜链,AVCaptureVideoDataOutputSampleBufferDelegate会丢帧。让我解释一下:我有CameraController设置,没什么特别的,这里是...

ios swift video avcapturesession cifilter
1个回答
0
投票

好,所以我最终赢得了这场战斗。如果有人遇到这个“ OutOfBuffer”问题,这是我的解决方法

© www.soinside.com 2019 - 2024. All rights reserved.