Ekulelu's Blog

iOS视频制作

#
这里说的视频制作并不是从摄像头录制视频,而是如何将一帧一帧的图片合成一个视频文件。这里我们所使用到的类是AVAssetWriter,它的使用方式比较固定,且代码也比较多。
首先是设定好AVAssetWriter

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
-(void)setUpWriter
{
_rgbColorSpace = CGColorSpaceCreateDeviceRGB();
NSDictionary *bufferAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES,
(id)kCVPixelBufferWidthKey : @(_viewSize.width),
(id)kCVPixelBufferHeightKey : @(_viewSize.height),
(id)kCVPixelBufferBytesPerRowAlignmentKey : @(_viewSize.width * 4)
};
_outputBufferPool = NULL;
CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool);
NSError* error = nil;
_videoWriter = [[AVAssetWriter alloc] initWithURL: [self tempFileURL]
fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(_videoWriter);
NSInteger pixelNumber = _viewSize.width * _viewSize.height;
NSDictionary* videoCompression = @{AVVideoAverageBitRateKey: @(pixelNumber * 1),
};
NSDictionary* videoSettings = @{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width],
AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height],
AVVideoCompressionPropertiesKey: videoCompression};
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
NSParameterAssert(_videoWriterInput);
_videoWriterInput.expectsMediaDataInRealTime = YES;
_videoWriterInput.transform = CGAffineTransformIdentity;
_avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil];
[_videoWriter addInput:_videoWriterInput];
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)];
}

配置好AVAssetWriter后,就可以往它写入每一帧的图像数据了。写入的方法如下。代码里面使用了代理模式,将每一帧的context传递给了代理,由代理去完成绘制后,把这个context写入到视频。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
- (void)writeVideoFrame
{
if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) != 0) {
return;
}
dispatch_async(_append_pixelBuffer_queue, ^{
if (![_videoWriterInput isReadyForMoreMediaData]) return;
if (self.currentTime > self.totalTime && self.isExporting) {
[self stopExporting];
return;
}
DLog(@"time %f %f", self.currentTime, self.currentTime - self.totalTime);
CMTime time = CMTimeMakeWithSeconds(self.currentTime, 1000);
CVPixelBufferRef pixelBuffer = NULL;
CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer];
RYExporterNextOperation nextOpr = RYExporterNextOperationContinue;
//通过代理去绘制图像
if (self.delegate) {
if ([self.delegate respondsToSelector:@selector(exporter:getFrameInContext:time:totalTime:nextOperation:)] == YES) {
[self.delegate exporter:self getFrameInContext:bitmapContext time:self.currentTime totalTime:self.totalTime nextOperation:&nextOpr];
}
}
BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time];
if (!success) {
DLog(@"Warning: Unable to write buffer to video");
}
CGContextRelease(bitmapContext);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVPixelBufferRelease(pixelBuffer);
dispatch_semaphore_signal(_pixelAppendSemaphore);
if (nextOpr == RYExporterNextOperationCancel) {
[self cancelExporting];
return;
} else if (nextOpr == RYExporterNextOperationEnd) {
[self stopExporting];
return;
}
self.currentTime += 1/self.fps;
});
}
- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer
{
CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer);
CVPixelBufferLockBaseAddress(*pixelBuffer, 0);
CGContextRef bitmapContext = NULL;
bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer),
CVPixelBufferGetWidth(*pixelBuffer),
CVPixelBufferGetHeight(*pixelBuffer),
8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst
);
CGContextScaleCTM(bitmapContext, _scale, _scale);
CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height);
CGContextConcatCTM(bitmapContext, flipVertical);
return bitmapContext;
}

留意到上面的代码是运行在一个串行队列中,但是这个串行队列是运行在全局队列中,配置如下,写在了init方法里面。

1
2
3
_append_pixelBuffer_queue = dispatch_queue_create("RYExporter.append_queue", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_append_pixelBuffer_queue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0));
_pixelAppendSemaphore = dispatch_semaphore_create(0); //初始化一个信号量

为了能自动调用writeVideoFrame方法,使用了一个DisplayLink,在开始导出视频的时候,初始化这个DisplayLink并将它加入到runloop。使用DisplayLink的好处是能获得和屏幕刷新率一样的频率。在录屏的时候也可以使用这个视频导出类。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
- (BOOL)startExportingWithTotalTime:(CGFloat)totalTime viewSize:(CGSize)viewSize videoPath:(NSString*)videoPath fps:(NSInteger)fps
{
if (!_isExporting) {
self.totalTime = totalTime;
self.viewSize = viewSize;
_videoPath = videoPath;
self.fps = fps;
[self setUpWriter];
_isExporting = (_videoWriter.status == AVAssetWriterStatusWriting);
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(writeVideoFrame)];
[_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
self.beginTime = [NSDate date].timeIntervalSince1970;
dispatch_semaphore_signal(_pixelAppendSemaphore);
}
return _isExporting;
}

绘制完成或者取消后,必须对一些用到的对象进行清理,避免内存泄露。

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
- (void)completeExportingSession
{
@weakify(self)
dispatch_async(_append_pixelBuffer_queue, ^{
@strongify(self)
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(NSError* error) = ^(NSError* error) {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (self.delegate && [self.delegate respondsToSelector:@selector(exporterVideoCompleted:videoPath:error:)]) {
[self.delegate exporterVideoCompleted:self videoPath:self.videoPath error:error];
}
});
};
if (self.videoPath) { //如果有设定目标路径,则移动到目标路径
NSFileManager* fileManager = [NSFileManager defaultManager];
NSError *error;
if ([fileManager fileExistsAtPath:self.videoPath isDirectory:nil]) {
[fileManager removeItemAtPath:self.videoPath error:&error];
}
[fileManager moveItemAtPath:self.tmpFilePath toPath:self.videoPath error:&error];
completion(error);
} else {
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
[library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
NSLog(@"Error copying video to camera roll:%@", [error localizedDescription]);
} else {
[self removeTempFilePath:_videoWriter.outputURL.path];
}
completion(error);
}];
}
}];
});
}
- (void)cancelExportingSession {
@weakify(self)
dispatch_async(_append_pixelBuffer_queue, ^{
@strongify(self)
[_videoWriterInput markAsFinished];
[_videoWriter finishWritingWithCompletionHandler:^{
void (^completion)(void) = ^() {
[self cleanup];
dispatch_async(dispatch_get_main_queue(), ^{
if (self.delegate && [self.delegate respondsToSelector:@selector(exporterVideoCompleted:videoPath:error:)]) {
[self.delegate exporterVideoCompleted:self videoPath:nil error:nil];
}
});
};
[self removeTempFilePath:_videoWriter.outputURL.path];
completion();
}];
});
}
- (void)cleanup
{
self.avAdaptor = nil;
self.videoWriterInput = nil;
self.videoWriter = nil;
self.outputBufferPoolAuxAttributes = nil;
CGColorSpaceRelease(_rgbColorSpace);
CVPixelBufferPoolRelease(_outputBufferPool);
self.currentTime = 0;
self.totalTime = 0;
self.isExporting = NO;
}