所有videoLayer都显示相同的曲目,而不是不同的曲目

问题描述 投票:0回答:1

我正在尝试将许多视频合并为一个合成,以便我可以使用CGAFFine对其进行动画处理。所有动画在所有视频层上都可以正常工作。但是问题是所有视频层都显示相同的轨迹。即使我添加了不同的音轨到不同的AVMutableVideoCompositionLayerInstructions。

这是我的合并代码和unexpected output video

func newoverlay(videoURls:[URL]) {


    // 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
    let mixComposition = AVMutableComposition()
    let layercomposition = AVMutableVideoComposition()
    var tracks = [videoTrack]()
    var videoLayers = [CALayer]()
    let mainInstruction = AVMutableVideoCompositionInstruction()
    var instructions = [AVMutableVideoCompositionLayerInstruction]()
    var duration:CMTime = .zero

    // 2 - Create video tracks
    for i in 0 ..< multiLayerVideoUrls.count{
        if multiLayerVideoUrls[i] == URL(fileURLWithPath: ""){
            print("empty url")

        }
        else{
            let videoAsset = AVURLAsset(url: multiLayerVideoUrls[i])

            print(multiLayerVideoUrls[i])


            print("number of videoAssets are : \(i)")
            guard let track = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID:Int32(kCMPersistentTrackID_Invalid)) else { return }
            do {
                try track.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration),
                                          of: videoAsset.tracks(withMediaType: .video)[0],
                                          at: CMTime.zero)
            } catch {
                print("Failed to load first track")
                return
            }
            let currentTrack:videoTrack = videoTrack(track: track, atNumber: i)
                           tracks.append(currentTrack)

            duration = CMTimeAdd(duration, videoAsset.duration)

        }
    }



    let width:CGFloat = 720
    let height: CGFloat = 1280

    //bg layer
    let bglayer = CALayer()
    bglayer.frame = CGRect(x: 0, y: 0, width: width, height: height)
    bglayer.backgroundColor = videoOverView.backgroundColor?.cgColor



    mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: duration)

   tracks.enumerated().forEach { index, track in
      let videolayer = CALayer()

        if keyframesAdded[index]{
        videolayer.frame = CGRect(x: 0, y: 0, width: width, height: height)


        var startScale:CGFloat = 1
        var endScale:CGFloat = 0
        var startSecond:CGFloat = 0
        var endSecond:CGFloat = 0
        var startPoint:CGPoint = CGPoint(x: 0, y: 0)
        var EndPoint:CGPoint = videolayer.position
        var startAngle:Double = 0
        var endAngle:Double = 0



        for point in animationKeyPointsArray[index]{

            endSecond = CGFloat(point.atTime.value)
            print("endSecond is \(endSecond)")
            let timeInterval: CFTimeInterval = CFTimeInterval(endSecond-startSecond )

            endScale = point.resize
            EndPoint = CGPoint(x: videolayer.position.x + point.transform.x, y: videolayer.position.y - point.transform.y)
            endAngle = Double(point.rotate)

            if startSecond == 0 {
                startSecond = 0.01
            }
            let scaleAnimation = AnimationHelper.constructScaleAnimation(startingScale: startScale, endingScale: endScale, animationDuration: timeInterval)
                scaleAnimation.beginTime = CFTimeInterval(exactly: startSecond)!

            let  MoveAnimation = AnimationHelper.constructPositionAnimation(startingPoint: startPoint, endPoint: EndPoint, animationDuration: timeInterval)
                 MoveAnimation.beginTime = CFTimeInterval(exactly: startSecond)!

            let rotateAnimation = AnimationHelper.constructRotationAnimation(startValue: startAngle, endValue: endAngle, animationDuration: timeInterval)
            rotateAnimation.beginTime = CFTimeInterval(exactly: startSecond)!

            videolayer.add(scaleAnimation, forKey: nil)
            videolayer.add(MoveAnimation, forKey: nil)
            videolayer.add(rotateAnimation, forKey: nil)

            startSecond = endSecond
            startScale = endScale
            startPoint = EndPoint
            startAngle = endAngle
                          }


        }
        else{

            print("translation is \(GestureTranslation.x),\(GestureTranslation.y)")

            let xMultiplier = width/videoOverView.frame.width
            let yMultiplier = height/videoOverView.frame.height
            let translationX = GestureTranslation.x * xMultiplier
            let translationY = -(GestureTranslation.y * yMultiplier)
            videolayer.frame = CGRect(x: translationX, y: translationY, width: width , height: height)
            print(GestureTranslation.x,GestureTranslation.y)
            videolayer.setAffineTransform(CGAffineTransform(scaleX: GestureReSize, y: GestureReSize))
                    }




        if maskImageAdded{
        let maskedImage = UIImageView()
        maskedImage.frame = CGRect(x: 0, y: height/2 - width/2, width: width, height: width)

        maskedImage.image = maskImageView.image
        videolayer.mask = maskedImage.layer
        }






        videolayer.backgroundColor = UIColor.clear.cgColor
        videoLayers.append(videolayer)

        print(tracks.count)
        print(track.track)
        let firstInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: tracks[index].track)
        let bugFixTransform  = CGAffineTransform(scaleX: width / track.track.naturalSize.width, y: height / track.track.naturalSize.height)
        firstInstruction.setTransform(bugFixTransform, at: .zero)
        instructions.append(firstInstruction)
        print(instructions.count)
    }



    let parentlayer = CALayer()
    parentlayer.frame = CGRect(x: 0, y: 0, width: width, height: height)
    parentlayer.addSublayer(bglayer)

    for videolayer in videoLayers {
        parentlayer.addSublayer(videolayer)

    }


    layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
    layercomposition.renderSize = CGSize(width: width, height: height)
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: videoLayers, in: parentlayer)
    mainInstruction.layerInstructions = instructions
    layercomposition.instructions = [mainInstruction]
    mainInstruction.backgroundColor = UIColor.clear.cgColor

    print("composition Done")
swift xcode avfoundation swift4
1个回答
0
投票

据我了解,您的问题之所以出现,是因为您在此行中以错误的方式插入了时间范围。您总是插入CMTime.zero。

尝试track.insertTimeRange(CMTimeRangeMake(start:CMTime.zero,duration:videoAsset.duration),之:videoAsset.tracks(withMediaType:.video)[0],于:CMTime.zero)

© www.soinside.com 2019 - 2024. All rights reserved.