创建UIImages的数组视频并保存视频到iPhone库。 AVAssetLibrary + AVFoundation

问题在视频保存到iPhone库。 我有UIImages的阵列,和两个按钮,“convertToVideo”&“saveToiPhoneLib”

- (IBAction为)convertToVideo {NSArray的*路径= NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask,YES); * NSString的documentsDirectory =(路径计数]> 0)? [路径objectAtIndex:0]:无; * NSString的savedVideoPath = [documentsDirectory stringByAppendingPathComponent:@“videoOutput”]; 的printf(“\ n \ n \ n视频文件==%S - \ n \ n \ n”,[savedVideoPath UTF8字符串]); [个体经营writeImageAsMovie:imageArray toPath:savedVideoPath大小:self.view.frame.size时间:3]; }在这里,我路过imageArray和savedVideoPath下面的函数 - (无效)writeImageAsMovie:(NSArray的*)阵列toPath:(* NSString的)路径尺寸:(CGSize)大小的持续时间:(INT){时间* NSError错误=零; AVAssetWriter * videoWriter = [[AVAssetWriter页头] initWithURL:[NSURL fileURLWithPath:路径]的f​​ileType:AVFileTypeQuickTimeMovie错误:错误] NSParameterAssert(videoWriter); *的NSDictionary = videoSettings [NSDictionary的dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,[NSNumber的numberWithInt:size.width],AVVideoWidthKey,[NSNumber的numberWithInt:size.height],AVVideoHeightKey,零] AVAssetWriterInput * writerInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]保留]; // *的NSDictionary = bufferAttributes [NSDictionary的dictionaryWithObjectsAndKeys:[NSNumber的numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,零] AVAssetWriterInputPixelBufferAdaptor *适配器= [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:无]; NSParameterAssert(writerInput); NSParameterAssert([videoWriter canAddInput:writerInput]); [videoWriter addInput:writerInput]。 //启动一个会话:[videoWriter startWriting] [videoWriter startSessionAtSourceTime:kCMTimeZero]。 CVPixelBufferRef缓冲= NULL; // UIImage的转换为CGImage。 缓冲区= [自我pixelBufferFromCGImage:[阵列objectAtIndex:0] CGImage]; [适配器appendPixelBuffer:缓冲区withPresentationTime:kCMTimeZero]; //写样:...... //完成了本届会议:[writerInput markAsFinished] [videoWriter finishWriting] }这里产生CVPixelBufferRef - (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)图像{*的NSDictionary选项= [NSDictionary的dictionaryWithObjectsAndKeys:[NSNumber的numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,[NSNumber的numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,零] CVPixelBufferRef pxbuffer = NULL; CVReturn状态= CVPixelBufferCreate(kCFAllocatorDefault,self.view.frame.size.width,self.view.frame.size.height,kCVPixelFormatType_32ARGB,(CFDictionaryRef)选项,与pxbuffer); NSParameterAssert(状态== kCVReturnSuccess && pxbuffer!= NULL); CVPixelBufferLockBaseAddress(pxbuffer,0); 无效* pxdata = CVPixelBufferGetBaseAddress(pxbuffer); NSParameterAssert(pxdata!= NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef上下文= CGBitmapContextCreate(pxdata,self.view.frame.size.width,self.view.frame.size.height,8,4 * self.view.frame.size.width,rgbColorSpace,kCGImageAlphaNoneSkipFirst); NSParameterAssert(上下文); CGContextConcatCTM(上下文,CGAffineTransformMakeRotation(0)); CGContextDrawImage(上下文,CGRectMake(0,0,CGImageGetWidth(图像),CGImageGetHeight(图像)),图像); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(上下文); CVPixelBufferUnlockBaseAddress(pxbuffer,0); 返回pxbuffer; }

保存到iPhone库

- (IBAction为)saveToiPhoneLib {NSArray的*路径= NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask,YES); * NSString的基本路径=(路径计数]> 0)? [路径objectAtIndex:0]:无; * NSString的getImagePath = [基本路径stringByAppendingPathComponent:@“videoOutput”]; 的printf(“\ n \ n \ n视频文件==%S - \ n \ n \ n”,[getImagePath UTF8字符串]); UISaveVideoAtPathToSavedPhotosAlbum(getImagePath,自我,@selector(视频:didFinishSavingWithError:contextInfo :),无); } - (无效)视频:(* NSString的)将videoPath didFinishSavingWithError:(NSError *)错误contextInfo:(无效*)contextInfo {的NSLog(@“说完节能,错误的视频:%@”,错误); }

但同时节省即时得到错误信息: -

。错误域= ALAssetsLibraryErrorDomain代码= -3302“无效数据”的UserInfo = 0x1d59f0 {NSLocalizedFailureReason =有写这个资产,因为数据是无效的,不能被看作还是起到了问题,NSLocalizedRecoverySuggestion =不同的数据尝试:错误保存完视频,NSLocalizedDescription =无效数据}

请让我知道我的错误。 提前致谢

--------------解决方案-------------

-(void)convertimagetoVideo
{
///////////// setup OR function def if we move this to a separate function ////////////
// this should be moved to its own function, that can take an imageArray, videoOutputPath, etc...

NSError *error = nil;

// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4";
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
//NSLog(@"-->videoOutputPath= %@", videoOutputPath);
// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(@"Unable to delete file: %@", [error localizedDescription]);

CGSize imageSize = CGSizeMake(400, 200);
// NSUInteger fps = 30;
NSUInteger fps = 30;

//NSMutableArray *imageArray;
//imageArray = [[NSMutableArray alloc] initWithObjects:@"download.jpeg", @"download2.jpeg", nil];
NSMutableArray *imageArray;
NSArray* imagePaths = [[NSBundle mainBundle] pathsForResourcesOfType:@"png" inDirectory:nil];
imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count];
NSLog(@"-->imageArray.count= %i", imageArray.count);
for (NSString* path in imagePaths)
{
[imageArray addObject:[UIImage imageWithContentsOfFile:path]];
//NSLog(@"-->image path= %@", path);
}

////////////// end setup ///////////////////////////////////

NSLog(@"Start building video from defined frames.");

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];

NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 6;
double frameDuration = fps * numberOfSecondsPerFrame;

//for(VideoFrame * frm in imageArray)
NSLog(@"**************************************************");
for(UIImage * img in imageArray)
{
//UIImage * img = frm._imageFrame;
buffer = [self pixelBufferFromCGImage:[img CGImage]];

BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(@"Processing video frame (%d,%d)",frameCount,[imageArray count]);

//CMTime frameTime = CMTimeMake((int64_t), (int32_t)2);

CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
NSLog(@"seconds = %f, %u, %d", CMTimeGetSeconds(frameTime),fps,j);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
}
}
}
else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
}
NSLog(@"**************************************************");

//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(@"Write Ended");

}

-(void)CompileFilestomakeVideo
{

// set up file manager, and file videoOutputPath, remove "test_output.mp4" if it exists...
//NSString *videoOutputPath = @"/Users/someuser/Desktop/test_output.mp4";
NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"];
//NSLog(@"-->videoOutputPath= %@", videoOutputPath);
// get rid of existing mp4 if exists...

AVMutableComposition* mixComposition = [AVMutableComposition composition];

NSString *bundleDirectory = [[NSBundle mainBundle] bundlePath];
// audio input file...
NSString *audio_inputFilePath = [bundleDirectory stringByAppendingPathComponent:@"30secs.mp3"];
NSURL *audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];

// this is the video file that was just written above, full path to file is in --> videoOutputPath
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoOutputPath];

// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"final_video.mp4"];
NSURL *outputFileUrl = [NSURL fileURLWithPath:outputFilePath];

if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

CMTime nextClipStartTime = kCMTimeZero;

AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];

//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);

AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];

AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = @"com.apple.quicktime-movie";
//_assetExport.outputFileType = @"public.mpeg-4";
//NSLog(@"support file types= %@", [_assetExport supportedFileTypes]);
_assetExport.outputURL = outputFileUrl;

[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
[self saveVideoToAlbum:outputFilePath];
}
];

///// THAT IS IT DONE... the final video file will be written here...
NSLog(@"DONE.....outputFilePath--->%@", outputFilePath);

// the final video file will be located somewhere like here:
// /Users/caferrara/Library/Application Support/iPhone Simulator/6.0/Applications/D4B12FEE-E09C-4B12-B772-7F1BD6011BE1/Documents/outputFile.mov

////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////
}
- (void) saveVideoToAlbum:(NSString*)path {

NSLog(@"saveVideoToAlbum");

if(UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(path)){
UISaveVideoAtPathToSavedPhotosAlbum (path, self, @selector(video:didFinishSavingWithError: contextInfo:), nil);
}
}

-(void) video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo {
if(error)
NSLog(@"error: %@", error);
else
NSLog(@" OK");
}

////////////////////////
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {

CGSize size = CGSizeMake(400, 200);

NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
size.width,
size.height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef) options,
&pxbuffer);
if (status != kCVReturnSuccess){
NSLog(@"Failed to create pixel buffer");
}

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;
}

下面使用代码

- (void)creatingVideo {

//get full path of video file from documents directory
NSError *error = nil;
NSFileManager *fileMgr = [NSFileManager defaultManager];
NSString *documentsDirectory = [self applicationDocumentsDirectory];
NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mov"];

// get rid of existing mp4 if exists...
if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES)
NSLog(@"Unable to delete file it does not exits on path");

//size of the video frame
CGSize imageSize = CGSizeMake(640,480);
//CGSize imageSize = CGSizeMake(1280, 720);

//frame per second
NSUInteger fps = 30;

NSLog(@"Start building video from defined frames.");

//AvAsset library to create video of images
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,[NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,nil];

AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] retain];
NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes];

NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//frameCount.
int frameCount = 0;
double frameDuration;
double numberOfSecondsPerFrame = appDelegate.delaySecond;

NSLog(@"**************************video creation started********************************");
for (int i = 0; i<[self.arrImageDataDict count]; i++) {
{
@autoreleasepool{
UIImage *img1 = nil;

img1 = [self getImageForVideoCreation:i];

buffer = [self pixelBufferFromCGImage: [img1 CGImage]];
if (buffer == NULL) {
NSLog(@"Pixel buffer not created");
} else {
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 20) {
if (adaptor.assetWriterInput.readyForMoreMediaData) {
//print out status:
NSLog(@"Processing video frame (%d,%d) delay %f",frameCount,[self.arrImageDataDict count],numberOfSecondsPerFrame);
frameDuration = fps * numberOfSecondsPerFrame;
CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(!append_ok){
NSError *error = videoWriter.error;
if(error!=nil) {
NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
}
}
} else {
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n, with error.", frameCount, j);
}
frameCount++;
CVPixelBufferRelease(buffer);
buffer = nil;
}
}
}
}

//Finish the session:
[videoWriterInput markAsFinished];

//get the iOS version of the device
float version = [[[UIDevice currentDevice] systemVersion] floatValue];
if (version < 6.0)
{
[videoWriter finishWriting];
//NSLog (@"finished writing iOS version:%f",version);

} else {
[videoWriter finishWritingWithCompletionHandler:^(){
//NSLog (@"finished writing iOS version:%f",version);
}];
}

CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[videoWriterInput release];

//OK now add an audio file to move file
AVMutableComposition* mixComposition = [AVMutableComposition composition];

//Get the saved audio song path to merge it in video
NSURL *audio_inputFileUrl ;
NSString *filePath = [self applicationDocumentsDirectory];
NSString *outputFilePath1 = [filePath stringByAppendingPathComponent:@"mySong.m4a"];
audio_inputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath1];

// this is the video file that was just written above
NSURL *video_inputFileUrl = [[NSURL alloc]initFileURLWithPath:videoOutputPath];;

[NSThread sleepForTimeInterval:2.0];

// create the final video output file as MOV file - may need to be MP4, but this works so far...
NSString *outputFilePath = [documentsDirectory stringByAppendingPathComponent:@"Slideshow_video.mov"];
NSURL *outputFileUrl = [[NSURL alloc]initFileURLWithPath:outputFilePath];

if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];

//AVURLAsset get video without audio
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
[videoAsset release];

[NSThread sleepForTimeInterval:3.0];

//If audio song merged
if (![self.appDelegate.musicFilePath isEqualToString:@"Not set"])
{
//*************************make sure all exception is off***********************
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

if (![audioAsset tracksWithMediaType:AVMediaTypeAudio].count == 0) {
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
}
[audioAsset release];
}

// Cleanup, in both success and fail cases
[audio_inputFileUrl release];
[video_inputFileUrl release];

[NSThread sleepForTimeInterval:0.1];

//AVAssetExportSession to export the video
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;

[_assetExport exportAsynchronouslyWithCompletionHandler:^(void){
switch (_assetExport.status) {
case AVAssetExportSessionStatusCompleted:
#if !TARGET_IPHONE_SIMULATOR
[self writeVideoToPhotoLibrary:outputFileUrl];
#endif
[self RemoveSlideshowImagesInTemp];
[self removeAudioFileFromDocumentsdirectory:outputFilePath1];
[self removeAudioFileFromDocumentsdirectory:videoOutputPath];
[outputFileUrl release];
[_assetExport release];
//NSLog(@"AVAssetExportSessionStatusCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
if (alrtCreatingVideo && alrtCreatingVideo.visible) {
[alrtCreatingVideo dismissWithClickedButtonIndex:alrtCreatingVideo.firstOtherButtonIndex animated:YES];
[databaseObj isVideoCreated:appDelegate.pro_id];
[self performSelector:@selector(successAlertView) withObject:nil afterDelay:0.0];
}
});
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Failed:%@",_assetExport.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Canceled:%@",_assetExport.error);
break;
default:
break;
}
}];
}

//writeVideoToPhotoLibrary
- (void)writeVideoToPhotoLibrary:(NSURL *)url
{
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];

[library writeVideoAtPathToSavedPhotosAlbum:url completionBlock:^(NSURL *assetURL, NSError *error){
if (error) {
NSLog(@"Video could not be saved");
}
}];
[library release];
}

综合类€™的简直太代码来检查错误。 请确保您可以开始导出会话,你真的获得像素缓冲区为你的形象,那笔者准备接收更多的数据,该缓冲区被追加没有错误,该出口会成功,并且输出影片文件完成存在且实际上包含了一些数据。 只有这样,你可以尝试,并保存到系统相册。 检查所有沿途可用的错误信息,让你知道的东西打破了第一次。 (另一件事是,你只是从网上取代码并将其粘贴在一起,这根本就不是要为AV编程 ​​工作。)

是的,我有同样的错误:

Error Domain=AVFoundationErrorDomain Code=-11823 "Cannot Save" UserInfo=0x193ce0 {NSLocalizedRecoverySuggestion=Try saving again., NSUnderlyingError=0x179e40 "The operation couldn’t be completed. (OSStatus error -12412.)", NSLocalizedDescription=Cannot Save}

但只能在模拟器,当我跑在设备上,保存到图片库的工作就好了。

分类:iOS4的 时间:2014-12-19 人气:12
分享到:

相关文章

Copyright (C) 55228885.com, All Rights Reserved.

55228885 版权所有 京ICP备15002868号

processed in 0.380 (s). 10 q(s)