iOS AVFoundation - 将视频转换为60 fps的图像

5
我试图将整个视频转换为以每秒60帧的速率生成的图像序列,这意味着每秒会生成60张图像...
为此,我利用了AVAssetImageGenerator和generateCGImagesAsynchronouslyForTimes方法...
虽然事情进展顺利,但是我在批量处理执行时间方面遇到了严重的性能问题(大约需要5分钟处理13秒钟的视频)...
此外,在尺寸为CGSizeMake(512, 324)及以上时,我遇到了崩溃问题...
有没有人已经有过这种处理的经验,并知道如何减少执行时间以及能够以更高的分辨率提取图像?
以下是我正在测试的代码...
NSURL *movieURL = [NSURL fileURLWithPath:getCaptureMoviePath()];

AVURLAsset *asset=[[AVURLAsset alloc] initWithURL:movieURL options:nil];
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc]  initWithAsset:asset];
generator.appliesPreferredTrackTransform=TRUE;
generator.requestedTimeToleranceAfter=kCMTimeZero;
generator.requestedTimeToleranceBefore=kCMTimeZero;
NSMutableArray *thumbTimes=[NSMutableArray arrayWithCapacity:asset.duration.value];

for(int t=0;t < asset.duration.value;t=t+2) {
    CMTime thumbTime = CMTimeMake(t, asset.duration.timescale);
    NSLog(@"Time Scale : %d ", asset.duration.timescale);
    NSValue *v=[NSValue valueWithCMTime:thumbTime];
        [thumbTimes addObject:v];
}
NSLog(@"thumbTimes array contains %d objects : ", [thumbTimes count]);
[asset release];
AVAssetImageGeneratorCompletionHandler handler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error) {
    if (result != AVAssetImageGeneratorSucceeded) {
        NSLog(@"couldn't generate thumbnail, error:%@", error);
    } else {
        NSLog(@"actual time: %lld/%d (requested: %lld/%d)",actualTime.value,actualTime.timescale,requestedTime.value,requestedTime.timescale);
        NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
        [formatter setDateFormat:@"yyyyMMdd-HHmmss"];
        NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
        NSString *documentsDirectory = [paths objectAtIndex:0];
        NSString *filename = [NSString stringWithFormat:@"%@.png", [formatter stringFromDate:[NSDate date]]];
        NSString *filepath = [documentsDirectory stringByAppendingPathComponent:filename];
        CFURLRef url = (CFURLRef)[NSURL fileURLWithPath:filepath];
        CGImageDestinationRef destination = CGImageDestinationCreateWithURL(url, kUTTypePNG, 1, NULL);
        CGImageDestinationAddImage(destination, im, nil);
        if (!CGImageDestinationFinalize(destination)) {     
            NSLog(@"Failed to write image to %@", filepath);
        }
        CFRelease(destination);
    }
    //[generator release];
};
CGSize maxSize = CGSizeMake(512, 324);
generator.maximumSize = maxSize;

[generator generateCGImagesAsynchronouslyForTimes:thumbTimes completionHandler:handler];

提前感谢您,
j.

我遇到了相同的问题,只不过在我的情况下,我正在尝试切割大约14个非常短的视频片段。你有没有找到任何可能的优化方法? - afrederick
1个回答

7

嘿 @Sooriah Joel,请尝试使用以下代码。它对我而言可以正常工作。

- (void)generateCMTimesArrayOfAllFramesUsingAsset:(AVURLAsset *)asset
{
    if (cmTimeArray.count>0) {
        [cmTimeArray removeAllObjects];
    }
    //Generate all frames present in video
    for(int t=0;t < asset.duration.value;t++) {
        CMTime thumbTime = CMTimeMake(t,asset.duration.timescale);
        NSValue *v=[NSValue valueWithCMTime:thumbTime];
        [cmTimeArray addObject:v];
    }
    NSLog(@"Array of time %@ count = %d",cmTimeArray, cmTimeArray.count);
    //NSLog(@"Array count = %d",cmTimeArray.count);
}


- (void)generateCMTimesArrayOfFrames:(int)framesInterval UsingAsset:(AVURLAsset *)asset
{
    int videoDuration = ceilf(((float)asset.duration.value/asset.duration.timescale));
    NSLog(@"Video duration %lld seconds timescale = %d",asset.duration.value,asset.duration.timescale);
    if (cmTimeArray.count>0) {
        [cmTimeArray removeAllObjects];
    }
    //Generate limited frames present in video
    for (int i = 0; i<videoDuration; i++)
   {
       int64_t tempInt = i;
       CMTime tempCMTime = CMTimeMake(tempInt,1);
       int32_t interval = framesInterval;
       for (int j = 1; j<framesInterval+1; j++)
       {
            CMTime newCMtime = CMTimeMake(j,interval);
            CMTime addition = CMTimeAdd(tempCMTime, newCMtime);
            [cmTimeArray addObject:[NSValue valueWithCMTime:addition]];
       }
   }
   NSLog(@"Array of time %@ count = %d",cmTimeArray, cmTimeArray.count);
   //NSLog(@"Array count = %d",cmTimeArray.count);
}


- (void)generateThumbnailsFromVideoURL:(AVURLAsset *)videoAsset
{
    //Generate CMTimes Array of required frames
    //1.Generate All Frames
    //[self generateCMTimesArrayOfAllFramesUsingAsset:asset];

    //2.Generate specific frames per second
    [self generateCMTimesArrayOfFrames:30 UsingAsset:videoAsset];

    __block int i = 0;
    AVAssetImageGeneratorCompletionHandler handler = ^(CMTime requestedTime, CGImageRef im, CMTime actualTime, AVAssetImageGeneratorResult result, NSError *error){
    if (result == AVAssetImageGeneratorSucceeded) {
        [framesArray addObject:[UIImage imageWithCGImage:im]];
    }
    if (result == AVAssetImageGeneratorFailed) {
        NSLog(@"Failed with error: %@ code %d", [error localizedDescription],error.code);
    }
    if (result == AVAssetImageGeneratorCancelled) {
        NSLog(@"Canceled");
    }

    i++;
    imageIndex = i;

    if(i == cmTimeArray.count) {
        //Thumbnail generation completed
    }
};

    // Launching the process...
    self.generator = [[AVAssetImageGenerator alloc] initWithAsset:videoAsset];
    self.generator.apertureMode = AVAssetImageGeneratorApertureModeCleanAperture;
    self.generator.appliesPreferredTrackTransform=TRUE;
    self.generator.requestedTimeToleranceBefore = kCMTimeZero;
    self.generator.requestedTimeToleranceAfter = kCMTimeZero;
    self.generator.maximumSize = CGSizeMake(40, 40);
    [self.generator generateCGImagesAsynchronouslyForTimes:cmTimeArray completionHandler:handler];
}

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接