用图片生成视频

趁着年前再写一篇。

前段时间项目要求用一组图片生成视频,查了一番资料后实现,这里做个记录。

1、初始化

        _videoSize = size;
        _videoPath = path;
        _timeScale = scale;

        NSError *error = nil;

        // 这里可以设置视频的格式
        self.videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:path]
                                                    fileType:AVFileTypeMPEG4
                                                       error:&error];

        NSParameterAssert(self.videoWriter);

        //  指定编码格式以及输出的尺寸
        NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                       AVVideoCodecH264,AVVideoCodecKey,
                                       [NSNumber numberWithInt:_videoSize.width],AVVideoWidthKey,
                                       [NSNumber numberWithInt:_videoSize.height],AVVideoHeightKey,
                                       nil];

        self.writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
                                                              outputSettings:videoSettings];

        self.adaptor = [AVAssetWriterInputPixelBufferAdaptor
                        assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.writerInput
                        sourcePixelBufferAttributes:nil];

        NSParameterAssert(self.writerInput);
        NSParameterAssert([self.videoWriter canAddInput:self.writerInput]);

        [self.videoWriter addInput:self.writerInput];

        [self.videoWriter startWriting];

        [self.videoWriter startSessionAtSourceTime:kCMTimeZero];

2、添加图片

这里我用的是一次添加一张进行处理的方法,Demo也有一次性给一个图片数组生成视频的方法
/**
 *  通过图片添加视频帧
 *
 *  @param image  图片
 *
 *  @return 是否添加成功
 */

- (BOOL)addVideoFrameWithImage:(UIImage *)image{

    CVPixelBufferRef buffer = NULL;

    if (self.writerInput.readyForMoreMediaData) {
        //  timeScale为视频帧率
        CMTime frameTime = CMTimeMake(1, self.timeScale);

        CMTime lastTime = CMTimeMake(self.frameNumber, self.timeScale);

        CMTime presentTime = CMTimeAdd(lastTime, frameTime);

        if (self.frameNumber == 0) {
            presentTime = CMTimeMake(0, self.timeScale);
        }

        //  这里是通过UIImage生成,pixelBuffer
        //  如果想进一步减少内存的损耗,可以考虑优化这个方法
        buffer = [self pixelBufferFromCGImage:[image CGImage]];
        if (buffer) {
            //  添加buffer,比较耗CPU的一步,优化方向可以强制使用GPU辅助?,我不懂
            if ([self.adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]) {
                //  网上找到的方法里,没有这一步,内存增长相当严重,这里是我自己添加的
                CVPixelBufferRelease(buffer);
                self.frameNumber++;
                return YES;
            }

        }

    }

    return NO;

}

3、结速添加

    [self.writerInput markAsFinished];

    [self.videoWriter finishWritingWithCompletionHandler:^{
        if (self.videoWriter.status != AVAssetReaderStatusFailed && self.videoWriter.status == AVAssetWriterStatusCompleted) {
            //  这个里面写自己的东西,我这里做了其他处理
            if (success) {
                success();
            }

        } else {
            if (fail) {
                fail(_videoWriter.error);
            }

            NSLog(@"create video failed, %@",self.videoWriter.error);
        }
    }];

    CVPixelBufferPoolRelease(self.adaptor.pixelBufferPool);

4、保存到相册

    ALAssetsLibrary *library = [[ALAssetsLibrary alloc]init];

    ALAssetsLibraryWriteVideoCompletionBlock videoWriteCompletionBlock = ^(NSURL *newURL,NSError *error) {
        if (!error) {
            NSLog(@"save to Photos Gallery success");

            // 保存到相册后删除视频
//            [[NSFileManager defaultManager] removeItemAtPath:[srcURL path]  error:NULL];
            dispatch_async(dispatch_get_main_queue(), ^{
                if (view) {
                    [view makeToast:@"保存视频到相册成功" duration:2.0 position:CSToastPositionCenter];
                }
            });
        } else {
            NSLog(@"save to Photos Gallery error ,%@",error);
            if (view) {
                [view makeToast:@"保存视频到相册成功失败" duration:2.0 position:CSToastPositionCenter];
            }

        }
    };

    //  srcURL为视频保存的路径
    if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:srcURL]) {

        [library writeVideoAtPathToSavedPhotosAlbum:srcURL
                                    completionBlock:videoWriteCompletionBlock];
    }

PS:UIImage 生成PixelBuffer

    - (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image {

    if (image) {
        NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                                 [NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
                                 [NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey, nil];

        CVPixelBufferRef pxbuffer = NULL;

        // TODO:kCVPixelFormatType_32ARGB 可改
        CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, CGImageGetWidth(image), CGImageGetHeight(image), kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef)options, &pxbuffer);

        NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
        //
        CVPixelBufferLockBaseAddress(pxbuffer, 0);
        void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
        NSParameterAssert(pxdata != NULL);

        CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();

        CGContextRef context = CGBitmapContextCreate(pxdata, CGImageGetWidth(image), CGImageGetHeight(image), 8, 4*CGImageGetWidth(image), rgbColorSpace, kCGImageAlphaNoneSkipFirst);
        //
        NSParameterAssert(context);
        //
        CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
        CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

        CGColorSpaceRelease(rgbColorSpace);
        CGContextRelease(context);

        CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

        return pxbuffer;
    } else {
        return NULL;
    }


}

Demo地址