崩溃没有报告

问题描述 投票:1回答:1

我的课程中有一个课程,它结合了大量的视频文件来制作1个整体视频。我有一个主要资产,我主要使用并在顶部应用其他资产。使用的唯一音频文件来自主资源。这是代码:

import UIKit
import AVFoundation
import Photos


class Merger: NSObject {

    var controller:EditVideoViewController!
    var button:AddAssetButton!
    var view:UIView!
    var difference:Double!
    var changed:Bool = false
    var AI:AIView!

    convenience init(controller:EditVideoViewController, button:AddAssetButton) {
        self.init()
        self.controller = controller
        self.button = button

        self.view = UIView(frame: controller.view.bounds)
        self.view.backgroundColor = UIColor.black.withAlphaComponent(0.7)
        self.controller.view.addSubview(self.view)
    }  

    func setupAI() {
        self.AI = AIView(view: self.view)
        self.AI.start()
    }

    func removeAI() {
        self.AI.stop()
        self.AI.removeEverything()
    }

    //The video is displaying in Portrait after merge.
    func merge(completion:@escaping () -> Void, assets:[Asset]) {

        self.setupAI()

        let assets = assets.sorted(by: { $0.layer.zPosition < $1.layer.zPosition })
        if let firstAsset = controller.firstAsset {

            let mixComposition = AVMutableComposition()

            let firstTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
                                                                     preferredTrackID: Int32(kCMPersistentTrackID_Invalid))

            do {
                try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
                                           of: firstAsset.tracks(withMediaType: AVMediaTypeVideo)[0],
                                           at: kCMTimeZero)
            } catch _ {
                print("Failed to load first track")
            }

            var myTracks:[AVMutableCompositionTrack] = []

            for asset in assets {

                let secondTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo,
                                                                          preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
                secondTrack.preferredTransform = asset.asset.preferredTransform
                do {
                    try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.endTime-asset.beginTime),
                                               of: asset.asset.tracks(withMediaType: AVMediaTypeVideo)[0],
                                               at: CMTime(seconds: CMTimeGetSeconds(asset.beginTime), preferredTimescale: 600000))
                } catch _ {
                    print("Failed to load second track")
                }
                myTracks.append(secondTrack)
            }

            if let loadedAudioAsset = controller.audioAsset {
                let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: 0)
                do {
                    try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.controller.realDuration),
                                               of: loadedAudioAsset.tracks(withMediaType: AVMediaTypeAudio)[0] ,
                                               at: kCMTimeZero)
                } catch _ {
                    print("Failed to load Audio track")
                }
            }

            let mainInstruction = AVMutableVideoCompositionInstruction()
            mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.controller.realDuration)

            let firstInstruction = videoCompositionInstructionForTrack(firstTrack, firstAsset)
            var instructions:[AVMutableVideoCompositionLayerInstruction] = []
            var counter:Int = 0
            for tracks in myTracks {
                firstInstruction.setOpacity(0.0, at: assets[counter].beginTime)
                let secondInstruction = videoCompositionInstructionForTrack(tracks, assets[counter].asset, type:true)
                secondInstruction.setOpacity(0.0, at: assets[counter].endTime)
                firstInstruction.setOpacity(1.0, at: assets[counter].endTime)
                instructions.append(secondInstruction)
                counter += 1
            }

            mainInstruction.layerInstructions = [firstInstruction] + instructions
            let mainComposition = AVMutableVideoComposition()
            mainComposition.instructions = [mainInstruction]
            mainComposition.frameDuration = CMTimeMake(1, 30)
            mainComposition.renderSize = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            let savePath = (documentDirectory as NSString).appendingPathComponent("mergeVideo.mov")
            let url = URL(fileURLWithPath: savePath)
           _ = try? FileManager().removeItem(at: url)

            guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality) else { return }
            exporter.outputFileType = AVFileTypeMPEG4
            exporter.outputURL = url
            exporter.videoComposition = mainComposition


            exporter.exportAsynchronously(completionHandler: {
                DispatchQueue.main.async(execute: {
                    self.exportDidFinish(exporter)
                    self.removeAI()
                    completion()
                })
            })
        }
    }
    func exportDidFinish(_ exporter:AVAssetExportSession) {
         if(exporter.status == AVAssetExportSessionStatus.completed) {
            print("cool")
        }
        else if(exporter.status == AVAssetExportSessionStatus.failed) {
            print(exporter.error as Any)
        }
    }

    func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, _ asset: AVAsset, type:Bool = false) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

        var transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform)
        let width = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width/assetTrack.naturalSize.width
        var height = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height

        if assetInfo.isPortrait {
            //Vert Video taken from camera -- vert video from lib
            height = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.width
            transform = transform.scaledBy(x: height, y: height)
            let movement = ((1/height)*assetTrack.naturalSize.height)-assetTrack.naturalSize.height
            transform = transform.translatedBy(x: 0, y: movement)
            let totalBlackDistance = self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width-transform.tx
            transform = transform.translatedBy(x: 0, y: -(totalBlackDistance/2)*(1/height))

        } else {
            //Main Video -- hor photo from camera -- hor video from camera -- hor photo frmo lib -- hor vid frmo lib -- vert photos lib - vert photos camera
            transform = transform.scaledBy(x: width, y: height)
            let scale:CGFloat = ((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height)*(assetTrack.naturalSize.width))/self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width
            transform = transform.scaledBy(x: scale, y: 1)
            let movement = ((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.width-((self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height)*(assetTrack.naturalSize.width)))/2)*(1/(self.controller.firstAsset!.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize.height/assetTrack.naturalSize.height))
            transform = transform.translatedBy(x: movement, y: 0)
        }
        instruction.setTransform(transform, at: kCMTimeZero)
        return instruction
    }

    func orientationFromTransform(_ transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }

        return (assetOrientation, isPortrait)
    }
}

对于我的手机,iPhone 6s,这永远不会崩溃,我从来没有遇到过问题。

我的一位拥有iPhone 5s的测试人员在导出过程中随机崩溃。当他的手机崩溃时,它似乎并没有像终止那样崩溃。应用程序完全关闭,没有任何反应。没有崩溃报告发送到我的管理器窗口(这经常发生)并且似乎没有任何问题。测试这个问题的另一个问题是他的app在不同的应用程序上做了。即使选择了相同的资产,它也不会每次都崩溃。如果我可以通过一些标准化定期复制这个问题,我就不会有这么多问题。但是,不确定从哪里开始。

这个测试仪不在我附近 - 我使用Apple的管理器窗口来获取崩溃报告

潜在解决方案

内存:我当时认为这可能是一个内存问题,因为我知道如果内存过载,那么它将终止应用程序。但是,没有任何内容发送到“DidReceiveMemoryWarning”,应用程序才会终止。

任何建议/解决方案?所有的帮助表示赞赏。

ios swift crash crash-reports avassetexportsession
1个回答
0
投票

Xcode Organizer不会为您的应用提供每个崩溃报告,只有Apple知道他们如何选择他们提供的以及他们不提供的。通过TestFlight进行beta测试时,如果用户批准向开发人员提供崩溃报告数据,则报告有时可能需要几天时间才能到达。

即使应用程序因其他原因而被操作系统杀死,例如过多的内存消耗,崩溃报告将写入设备并提供给Apple,如果提供它的所有要求都是积极的,例如用户批准提供此数据。

没有报告就不可能说出发生了什么以及为什么,所以对内存使用问题的假设是一个疯狂的猜测,并且陈述的原因是无效的。

您需要获取崩溃报告并对其进行符号化。您的用户可以通过导航到“设置>常规>关于>诊断和使用数据”在设备上找到崩溃报告,然后复制内容并粘贴它,例如发送电子邮件并发送给您。

© www.soinside.com 2019 - 2024. All rights reserved.