AVAssetWriter始终处于AVAssetWriterStatusFailed状态

问题描述 投票:3回答:1

我的目标是实时在相机输入上应用一些滤镜。为了一步一步地做到这一点,我试图通过AVFoundation记录视频并将其保存在相机胶卷中来获取相机的输入。我试过,但由于某种原因,AVAssetWriter总是在AVAssetWriterStatusFailed中,因此appendSampleBuffer:方法总是失败。我的错误在哪里?有人可以帮帮我吗?

谢谢!

ViewController.h

#import <UIKit/UIKit.h>
#import <AssetsLibrary/AssetsLibrary.h>
#import <AVFoundation/AVFoundation.h>

@interface ViewController : UIViewController <AVCaptureVideoDataOutputSampleBufferDelegate>
@property (weak, nonatomic) IBOutlet UIImageView *imageView;
@property (weak, nonatomic) IBOutlet UIButton *startRecButton;
@property (weak, nonatomic) IBOutlet UIButton *stopRecButton;
@property (weak, nonatomic) IBOutlet UIButton *startVideocamera;
- (IBAction)startRecordingButtonPressed:(UIButton *)sender;
- (IBAction)stopRecordingButtonPressed:(UIButton *)sender;
- (IBAction)startVideocameraButtonPressed:(UIButton *)sender;

@end

ViewController.m

#import "ViewController.h"
@interface ViewController ()
@property (strong, nonatomic) AVAssetWriter* videoAssetWriter;
@property (strong, nonatomic) AVAssetWriterInput* videoAssetWriterInput;
@property (strong, nonatomic) NSURL* temporaryVideoURL;
@end


@implementation ViewController

#pragma mark - Variables
@synthesize imageView;
@synthesize videoAssetWriter;
@synthesize videoAssetWriterInput;
@synthesize temporaryVideoURL;
//initCaptureSession Method 
AVCaptureSession* captureSession;
AVCaptureDevice* videoCaptureDevice;
AVCaptureDeviceInput* videoCaptureDeviceInput;
AVCaptureVideoDataOutput* videoDataOutput;
dispatch_queue_t videoQueue;

//captureOutput:didOutputSampleBuffer Method
CMSampleBufferRef currentSampleBuffer;

BOOL isRecording;

//newPixelBufferFromCGImage Method
CGAffineTransform frameTransform;
CGSize frameSize;


 #pragma mark - User Interface

- (void)viewDidLoad
 {
    [super viewDidLoad];
    // Do any additional setup after loading the view, typically from a nib.
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (IBAction)startRecordingButtonPressed:(UIButton *)sender {
    [self initWriter];
}

- (IBAction)stopRecordingButtonPressed:(UIButton *)sender {
    [self stopWriter];
}

- (IBAction)startVideocameraButtonPressed:(UIButton *)sender {
    [self initCaptureSession];
}



#pragma mark - Capture Utils

-(void) initCaptureSession{

    captureSession = [[AVCaptureSession alloc] init];
    [captureSession setSessionPreset:AVCaptureSessionPreset1280x720];

    videoCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    NSError* error;
    videoCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice      error:&error];
    if([captureSession canAddInput:videoCaptureDeviceInput]){
        [captureSession addInput:videoCaptureDeviceInput];
    }
    videoDataOutput = [[AVCaptureVideoDataOutput alloc]init];
    [captureSession addOutput:videoDataOutput];
    videoQueue = dispatch_queue_create("videoQueue", NULL);
    [videoDataOutput setAlwaysDiscardsLateVideoFrames:NO];
    [videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
    NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
    NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
    NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
    [videoDataOutput setVideoSettings:videoSettings];

    [captureSession startRunning]; 
}


-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:  (CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

    currentSampleBuffer = sampleBuffer;
    CGImageRef image = [self imageFromSampleBuffer:currentSampleBuffer];
    dispatch_sync(dispatch_get_main_queue(),
                  ^{
                      if(!isRecording){
                          imageView.image = [UIImage imageWithCGImage: image scale:1.0 orientation:UIImageOrientationRight];
                      }
                      else{
                          imageView.image = [UIImage imageWithCGImage: image scale:1.0 orientation:UIImageOrientationRight];
            //              [videoAssetWriterInput appendSampleBuffer:currentSampleBuffer];
                          if (![videoAssetWriterInput appendSampleBuffer:sampleBuffer]) {
                              [self showError:[videoAssetWriter error]];
                          }
                          NSLog(@"%ld", (long)[videoAssetWriter status]);
                      }
                });
    CGImageRelease(image);

}

-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:  (CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

    NSLog(@"didDropSampleBuffer CALLED");

}



#pragma mark - Writer Utils

-(void) initWriter{ 
    temporaryVideoURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"Movie.MOV"]];
    NSLog(@"%@", temporaryVideoURL);
    NSError* error;
    videoAssetWriter = [[AVAssetWriter alloc] initWithURL:temporaryVideoURL fileType:AVFileTypeQuickTimeMovie error:&error];

    NSParameterAssert(videoAssetWriter);
    NSLog(@"%ld", (long)[videoAssetWriter status]);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                              AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:1280], AVVideoWidthKey,
                               [NSNumber numberWithInt:720], AVVideoHeightKey,
                               nil];
    videoAssetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSParameterAssert(videoAssetWriterInput);
    NSLog(@"%ld", (long)[videoAssetWriter status]);

    if([videoAssetWriter canAddInput:videoAssetWriterInput]){
        [videoAssetWriter addInput:videoAssetWriterInput];
    }

    isRecording = YES;
    [videoAssetWriter startWriting];
    NSLog(@"%ld", (long)[videoAssetWriter status]);
}

-(void) stopWriter{
    [videoAssetWriterInput markAsFinished];
    [videoAssetWriter finishWritingWithCompletionHandler:^{

        NSLog(@"finishWritingWithCompletionHandler CALLED");
        isRecording = NO;
        [self saveVideoToCameraRoll];
        videoAssetWriter =nil;
        videoAssetWriterInput= nil;

    }];
//    [videoAssetWriter finishWriting];
//    isRecording = NO;
//    [self saveVideoToCameraRoll];


}

-(void) saveVideoToCameraRoll{

    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    [library writeVideoAtPathToSavedPhotosAlbum:temporaryVideoURL completionBlock:^(NSURL *assetURL, NSError *error){
        NSLog(@"ASSET URL: %@", [assetURL path]);

        if(error) {
            NSLog(@"CameraViewController: Error on saving movie : %@ {imagePickerController}", error);
        }
        else {
            NSLog(@"Video salvato correttamente in URL: %@", assetURL);
            BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:[temporaryVideoURL path]];
            NSLog(@"IL FILE ESISTE: %hhd", fileExists);
            NSLog(@"E PESA: %@", [[[NSFileManager defaultManager] attributesOfItemAtPath:  [temporaryVideoURL path] error:&error] objectForKey:NSFileSize]);
        }
    }];
}
ios7 avfoundation avassetwriter avcapturedevice cmsamplebufferref
1个回答
0
投票

此错误是由于已存在具有类似文件名的文件的原因。

就我而言,我使用静态文件名进行测试,导致错误。使它成为一些独特的东西:"\(Date().timeIntervalSince1970).mp4"修复它。

© www.soinside.com 2019 - 2024. All rights reserved.