iOS如何使用AVAssetWriter捕获视频时正确处理方向

问题描述 投票:1回答:2

我正在制作一个利用AVFoundation录制视频的示例应用程序。总的来说,我可以更好地控制视频的录制方式。在我的示例项目中,我有视频捕捉,但我正在努力正确处理方向。

我在网上做了很多搜索,发现其他人建议我不应该让我的捕获视图或捕获会话根据方向旋转,而是设置转换以在播放期间旋转视频。我在iOS和Mac设备上运行良好,但我想知道我是否会在其他平台上遇到问题,例如Windows或Android。

此外,当我查看录制的视频的元数据时,我看到宽度和高度未正确设置方向。这是有道理的,因为我只是改变视频的呈现而不是它的实际分辨率。

我的问题是如何正确支持纵向和横向,并在视频文件输出中正确反映?我需要这些视频才能正确地在所有平台上播放,所以我认为解决方案非常重要。

以下是我到目前为止所写的完整资料。我感谢您提供的任何建议。

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

    //MARK: - Outlet

    @IBOutlet weak var previewView: UIView!
    @IBOutlet var playStopButton: UIButton!

    //MARK: - Private Variables

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
    private let captureSession = AVCaptureSession()

    var outputUrl: URL {
        get {

            if let url = _outputUrl {
                return url
            }


            _outputUrl = outputDirectory.appendingPathComponent("video.mp4")
            return _outputUrl!
        }
    }

    private var _outputUrl: URL?

    var outputDirectory: URL {
        get {

            if let url = _outputDirectory {
                return url
            }


            _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
            return _outputDirectory!
        }
    }

    private var _outputDirectory: URL?

    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?

    private var isRecording = false
    private var isWriting = false

    private var videoSize = CGSize(width: 640, height: 480)

    //MARK: - View Life-cycle

    override func viewDidLoad() {
        super.viewDidLoad()


        videoQueue.async {

            do {

                try self.configureCaptureSession()
                try self.configureAssetWriter()

                DispatchQueue.main.async {
                    self.configurePreview()
                }

            } catch {

                DispatchQueue.main.async {
                    self.showAlert("Unable to configure video output")
                }
            }
        }
    }

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
        return .portrait
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    //MARK: - Capture Session

    private func configureCaptureSession() throws {

        do {

            // configure the session
            if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) {
                captureSession.sessionPreset = AVCaptureSessionPreset640x480
            }

            // configure capture devices
            let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

            let camInput = try AVCaptureDeviceInput(device: camDevice)
            let micInput = try AVCaptureDeviceInput(device: micDevice)

            if captureSession.canAddInput(camInput) {
                captureSession.addInput(camInput)
            }

            if captureSession.canAddInput(micInput) {
                captureSession.addInput(micInput)
            }

            // configure audio/video output
            videoOutput = AVCaptureVideoDataOutput()
            videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
            videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let v = videoOutput {
                captureSession.addOutput(v)
            }

            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let a = audioOutput {
                captureSession.addOutput(a)
            }

            // configure audio session
            let audioSession = AVAudioSession.sharedInstance()
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
            try audioSession.setActive(true)

            var micPort: AVAudioSessionPortDescription?

            if let inputs = audioSession.availableInputs {
                for port in inputs {
                    if port.portType == AVAudioSessionPortBuiltInMic {
                        micPort = port
                        break;
                    }
                }
            }

            if let port = micPort, let dataSources = port.dataSources {

                for source in dataSources {
                    if source.orientation == AVAudioSessionOrientationFront {
                        try audioSession.setPreferredInput(port)
                        break
                    }
                }
            }

        } catch {
            print("Failed to configure audio/video capture session")
            throw error
        }
    }

    private func configureAssetWriter() throws {

        prepareVideoFile()

        do {

            assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

            guard let writer = assetWriter else {
                print("Asset writer not created")
                return
            }

            let vidSize = videoSize
            let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                 AVVideoWidthKey: NSNumber(value: Float(vidSize.width)),
                                 AVVideoHeightKey: NSNumber(value: Float(vidSize.height))]

            videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            videoInput?.expectsMediaDataInRealTime = true
            videoInput?.transform = getVideoTransform()

            var channelLayout = AudioChannelLayout()
            memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

            let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                                AVSampleRateKey: 44100,
                                                AVNumberOfChannelsKey: 2]

            audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
            audioInput?.expectsMediaDataInRealTime = true

            guard let vi = videoInput else {
                print("Video input not configured")
                return
            }

            guard let ai = audioInput else {
                print("Audio input not configured")
                return
            }

            if writer.canAdd(vi) {
                writer.add(vi)
            }

            if writer.canAdd(ai) {
                writer.add(ai)
            }

        } catch {
            print("Failed to configure asset writer")
            throw error
        }
    }

    private func prepareVideoFile() {

        if FileManager.default.fileExists(atPath: outputUrl.path) {

            do {
                try FileManager.default.removeItem(at: outputUrl)
            } catch {
                print("Unable to remove file at URL \(outputUrl)")
            }
        }

        if !FileManager.default.fileExists(atPath: outputDirectory.path) {

            do {
                try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
            } catch {
                print("Unable to create directory at URL \(outputDirectory)")
            }
        }
    }

    private func configurePreview() {

        if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            previewLayer.frame = previewView.bounds
            previewView.layer.addSublayer(previewLayer)
        }
    }

    private func getVideoSize() -> CGSize {

        if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {

            if videoSize.width > videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }

        } else {

            if videoSize.width < videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }
        }
    }

    private func getVideoTransform() -> CGAffineTransform {

        switch UIDevice.current.orientation {

        case .portraitUpsideDown:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0)) / 180.0)

        case .landscapeLeft:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: Add support for front facing camera
//            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: For front facing camera

        case .landscapeRight:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0)) / 180.0) // TODO: Add support for front facing camera
//            return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0)) / 180.0) // TODO: For front facing camera

        default:
            return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0)) / 180.0)
        }
    }

    //MARK: - Controls

    private func startRecording() {

        videoQueue.async {
            self.captureSession.startRunning()
        }

        isRecording = true
        playStopButton.setTitle("Stop Recording", for: .normal)
        print("Recording did start")
    }

    private func stopRecording() {

        if !isRecording {
            return
        }

        videoQueue.async {

            self.assetWriter?.finishWriting {
                print("Asset writer did finish writing")
                self.isWriting = false
            }

            self.captureSession.stopRunning()
        }

        isRecording = false

        playStopButton.setTitle("Start Recording", for: .normal)
        print("Recording did stop")
    }

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

        guard let w = assetWriter else {
            print("Asset writer not configured")
            return
        }

        guard let vo = videoOutput else {
            print("Video output not configured")
            return
        }

        guard let ao = audioOutput else {
            print("Audio output not configured")
            return
        }

        guard let vi = videoInput else {
            print("Video input not configured")
            return
        }

        guard let ai = audioInput else {
            print("Audio input not configured")
            return
        }

        let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        print("Writer status \(w.status.rawValue)")

        if let e = w.error {
            print("Writer error \(e)")
            stopRecording()
            return
        }

        switch w.status {

        case .unknown:

            if !isWriting {
                isWriting = true
                w.startWriting()
                w.startSession(atSourceTime: st)
            }

            return

        case .completed:
            print("Video writing completed")
            return

        case .cancelled:
            print("Video writing cancelled")
            return

        case .failed:
            print("Video writing failed")
            return

        default:
            print("Video is writing")
        }

        if vo == captureOutput {

            if !vi.append(sampleBuffer) {
                print("Unable to write to video buffer")
            }

        } else if ao == captureOutput {

            if !ai.append(sampleBuffer) {
                print("Unable to write to audio buffer")
            }
        }
    }

    //MARK: Helpers

    private func getDocumentsDirectory() -> URL {
        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }

    //MARK: Actions

    @IBAction func startStopTapped(sender: AnyObject) {

        if isRecording {
            stopRecording()
        } else {
            startRecording()
        }
    }
}
ios swift video avassetwriter avassetwriterinput
2个回答
3
投票

我找到了解决问题的方法。解决方案是使用AVAssetExportSession导出视频以处理设置视频大小,然后在导出时而不是在录制期间处理旋转。我仍然有一个问题,我需要修复比例因子,从我原来的视频大小到较小的640x480分辨率,但至少我解决了我的轮换问题。请参阅下面的更新代码。

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {

    //MARK: - Outlet

    @IBOutlet weak var previewView: UIView!
    @IBOutlet var playStopButton: UIButton!

    //MARK: - Private Variables

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil)
    private let captureSession = AVCaptureSession()

    var outputUrl: URL {
        get {

            if let url = _outputUrl {
                return url
            }

            _outputUrl = outputDirectory.appendingPathComponent("video.mp4")
            return _outputUrl!
        }
    }

    private var _outputUrl: URL?

    var exportUrl: URL {
        get {

            if let url = _exportUrl {
                return url
            }

            _exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4")
            return _exportUrl!
        }
    }

    private var _exportUrl: URL?

    var outputDirectory: URL {
        get {

            if let url = _outputDirectory {
                return url
            }

            _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording")
            return _outputDirectory!
        }
    }

    private var _outputDirectory: URL?

    private var assetWriter: AVAssetWriter?
    private var videoInput: AVAssetWriterInput?
    private var audioInput: AVAssetWriterInput?
    private var videoOutput: AVCaptureVideoDataOutput?
    private var audioOutput: AVCaptureAudioDataOutput?

    private var isRecording = false
    private var isWriting = false

    private var videoSize = CGSize(width: 640, height: 480)
    private var exportPreset = AVAssetExportPreset640x480

    //MARK: - View Life-cycle

    override func viewDidLoad() {
        super.viewDidLoad()

        videoQueue.async {

            do {

                try self.configureCaptureSession()

                DispatchQueue.main.sync {
                    self.configurePreview()
                }

            } catch {

                DispatchQueue.main.async {
                    self.showAlert("Unable to configure capture session")
                }
            }
        }
    }

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation {
        return .portrait
    }

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .portrait
    }

    //MARK: - Capture Session

    private func configureCaptureSession() throws {

        do {

            // configure capture devices
            let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
            let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio)

            let camInput = try AVCaptureDeviceInput(device: camDevice)
            let micInput = try AVCaptureDeviceInput(device: micDevice)

            if captureSession.canAddInput(camInput) {
                captureSession.addInput(camInput)
            }

            if captureSession.canAddInput(micInput) {
                captureSession.addInput(micInput)
            }

            // configure audio/video output
            videoOutput = AVCaptureVideoDataOutput()
            videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary?
            videoOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let v = videoOutput {
                captureSession.addOutput(v)
            }

            audioOutput = AVCaptureAudioDataOutput()
            audioOutput?.setSampleBufferDelegate(self, queue: videoQueue)

            if let a = audioOutput {
                captureSession.addOutput(a)
            }

            // configure audio session
            let audioSession = AVAudioSession.sharedInstance()
            try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord)
            try audioSession.setActive(true)

            var micPort: AVAudioSessionPortDescription?

            if let inputs = audioSession.availableInputs {
                for port in inputs {
                    if port.portType == AVAudioSessionPortBuiltInMic {
                        micPort = port
                        break;
                    }
                }
            }

            if let port = micPort, let dataSources = port.dataSources {

                for source in dataSources {
                    if source.orientation == AVAudioSessionOrientationFront {
                        try audioSession.setPreferredInput(port)
                        break
                    }
                }
            }

        } catch {
            print("Failed to configure audio/video capture session")
            throw error
        }
    }

    private func configureAssetWriter() throws {

        prepareVideoFile()

        do {

            if assetWriter != nil {
                assetWriter = nil
                videoInput = nil
                audioInput = nil
            }

            assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4)

            guard let writer = assetWriter else {
                print("Asset writer not created")
                return
            }

            let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264,
                                 AVVideoWidthKey: NSNumber(value: Float(videoSize.width)),
                                 AVVideoHeightKey: NSNumber(value: Float(videoSize.height))]

            videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
            videoInput?.expectsMediaDataInRealTime = true

            var channelLayout = AudioChannelLayout()
            memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size);
            channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;

            let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC,
                                                AVSampleRateKey: 44100,
                                                AVNumberOfChannelsKey: 2]

            audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings)
            audioInput?.expectsMediaDataInRealTime = true

            guard let vi = videoInput else {
                print("Video input not configured")
                return
            }

            guard let ai = audioInput else {
                print("Audio input not configured")
                return
            }

            if writer.canAdd(vi) {
                writer.add(vi)
            }

            if writer.canAdd(ai) {
                writer.add(ai)
            }

        } catch {
            print("Failed to configure asset writer")
            throw error
        }
    }

    private func prepareVideoFile() {

        if FileManager.default.fileExists(atPath: outputUrl.path) {

            do {
                try FileManager.default.removeItem(at: outputUrl)
            } catch {
                print("Unable to remove file at URL \(outputUrl)")
            }
        }

        if !FileManager.default.fileExists(atPath: outputDirectory.path) {

            do {
                try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil)
            } catch {
                print("Unable to create directory at URL \(outputDirectory)")
            }
        }
    }

    private func configurePreview() {

        if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            previewLayer.frame = previewView.bounds
            previewView.layer.addSublayer(previewLayer)
        }
    }

    private func getVideoSize() -> CGSize {

        if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight {

            if videoSize.width > videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }

        } else {

            if videoSize.width < videoSize.height {
                return videoSize
            } else {
                return CGSize(width: videoSize.height, height: videoSize.width)
            }
        }
    }

    //MARK: - Controls

    private func startRecording() {

        videoQueue.async {

            do {
                try self.configureAssetWriter()
                self.captureSession.startRunning()

            } catch {
                print("Unable to start recording")
                DispatchQueue.main.async { self.showAlert("Unable to start recording") }
            }
        }

        isRecording = true
        playStopButton.setTitle("Stop Recording", for: .normal)
        print("Recording did start")
    }

    private func stopRecording() {

        if !isRecording {
            return
        }

        videoQueue.async {

            self.assetWriter?.finishWriting {
                print("Asset writer did finish writing")
                self.isWriting = false
            }

            self.captureSession.stopRunning()

            do {
                try self.export()
            } catch {
                print("Export failed")
                DispatchQueue.main.async { self.showAlert("Unable to export video") }
            }
        }

        isRecording = false

        playStopButton.setTitle("Start Recording", for: .normal)
        print("Recording did stop")
    }

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

        guard let w = assetWriter else {
            print("Asset writer not configured")
            return
        }

        guard let vo = videoOutput else {
            print("Video output not configured")
            return
        }

        guard let ao = audioOutput else {
            print("Audio output not configured")
            return
        }

        guard let vi = videoInput else {
            print("Video input not configured")
            return
        }

        guard let ai = audioInput else {
            print("Audio input not configured")
            return
        }

        let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        print("Writer status \(w.status.rawValue)")

        if let e = w.error {
            print("Writer error \(e)")
            stopRecording()
            return
        }

        switch w.status {

        case .unknown:

            if !isWriting {
                isWriting = true
                w.startWriting()
                w.startSession(atSourceTime: st)
            }

            return

        case .completed:
            print("Video writing completed")
            return

        case .cancelled:
            print("Video writing cancelled")
            return

        case .failed:
            print("Video writing failed")
            return

        default:
            print("Video is writing")
        }

        if vo == captureOutput {

            if !vi.append(sampleBuffer) {
                print("Unable to write to video buffer")
            }

        } else if ao == captureOutput {

            if !ai.append(sampleBuffer) {
                print("Unable to write to audio buffer")
            }
        }
    }

    //MARK: - Export

    private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? {

        guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else {
            print("Unable to get video tracks")
            return nil
        }

        let videoComposition = AVMutableVideoComposition()
        videoComposition.renderSize = videoSize

        let seconds: Float64 = Float64(1.0 / videoTrack.nominalFrameRate)
        videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600);

        let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

        var transforms = asset.preferredTransform

        var isPortrait = true;

        if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0)
        || (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) {
            isPortrait = false;
        }

        if isPortrait {
            transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians)))
            transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0))
        }

        layerInst.setTransform(transforms, at: kCMTimeZero)

        let inst = AVMutableVideoCompositionInstruction()
        inst.backgroundColor = UIColor.black.cgColor
        inst.layerInstructions = [layerInst]
        inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration)

        videoComposition.instructions = [inst]

        return videoComposition

    }

    private func export() throws {

        let videoAsset = AVURLAsset(url: outputUrl)

        if FileManager.default.fileExists(atPath: exportUrl.path) {
            try FileManager.default.removeItem(at: exportUrl)
        }

        let videoSize = getVideoSize()

        guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else {
            print("Unable to create encoder")
            return
        }

        guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else {
            print("Unable to create video composition")
            return
        }

        encoder.videoComposition = vidcomp
        encoder.outputFileType = AVFileTypeMPEG4  // MP4 format
        encoder.outputURL = exportUrl
        encoder.shouldOptimizeForNetworkUse = true

        encoder.exportAsynchronously(completionHandler: {
            print("Video exported successfully")
        })
    }

    //MARK: Helpers

    private func getDocumentsDirectory() -> URL {
        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
        let documentsDirectory = paths[0]
        return documentsDirectory
    }

    //MARK: Actions

    @IBAction func startStopTapped(sender: AnyObject) {

        if isRecording {
            stopRecording()
        } else {
            startRecording()
        }
    }
}

0
投票

只需在编写器设置中交换宽度和高度

并且不要忘记HEVC

assetWriter = try AVAssetWriter(url:outputUrl,fileType:AVFileTypeMPEG4)

https://developer.apple.com/videos/play/wwdc2017/503 https://developer.apple.com/videos/play/wwdc2017/511

- (BOOL)
configureWriterInput {
    const BOOL isError = YES;

    AVFileType
        mov = AVFileTypeQuickTimeMovie;

    NSDictionary <NSString *, id> *settings;

    // HEVC
    if (@available(iOS 11.0, *)) {
        NSArray <AVVideoCodecType> *available = [self.sessionOutput availableVideoCodecTypesForAssetWriterWithOutputFileType:
            mov];

        const BOOL isHEVC = [available containsObject:AVVideoCodecTypeHEVC];

        if (isHEVC) {
            settings = [self.sessionOutput recommendedVideoSettingsForVideoCodecType:
                AVVideoCodecTypeHEVC

            assetWriterOutputFileType:
                mov];
        }
        else {
            settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
                mov];
        }
    }
    else {
        settings = [self.sessionOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:
            mov];
    }

    if ([writer
    canApplyOutputSettings:settings forMediaType:AVMediaTypeVideo]) {

        // swap width and height to fix orientation

        NSMutableDictionary <NSString *, id> *rotate =
            [settings mutableCopy];

        if (YES
            && settings[AVVideoHeightKey]
            && settings[AVVideoWidthKey]
        ) {
            rotate[AVVideoHeightKey] = settings[AVVideoWidthKey];
            rotate[AVVideoWidthKey] = settings[AVVideoHeightKey];

            if ([writer
            canApplyOutputSettings:rotate forMediaType:AVMediaTypeVideo]) {
                settings = rotate;
            }
            else {
            }
        }
        else {

        }
    }
    else {
        return isError;
    }

    writerInput = [AVAssetWriterInput
        assetWriterInputWithMediaType:AVMediaTypeVideo
        outputSettings:settings];

    {
        // AVCaptureConnection *con =
            // [self.sessionOutput connectionWithMediaType:AVMediaTypeVideo];

        // const AVCaptureVideoOrientation o = con.videoOrientation;

        // writerInput.transform = [[self class] configureOrientationTransform:o];
    }

    if ([writer canAddInput:writerInput]) {
        [writer addInput:writerInput];
        return ! isError;
    }
    else {
        return isError;
    }
}
© www.soinside.com 2019 - 2024. All rights reserved.