Demo地址

苹果对于音视频的采集已经提供给我们很好的封装:AVFoundation.framework.我们可以使用AVFoundation框架来采集视频数据.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
@interface RTAVVideoCaputre ()<AVCaptureVideoDataOutputSampleBufferDelegate>
{
AVCaptureVideoPreviewLayer * _preViewLayer;
}
@property (nonatomic,strong)AVCaptureSession *session;
@property (nonatomic,strong)RTAVVideoConfiguration *videoConfiguration;
@end
@implementation RTAVVideoCaputre
- (instancetype)initWithVideoConfiguration:(RTAVVideoConfiguration *)configuration
{
if (self = [super init]) {
_videoConfiguration = configuration;
[self addPreVideo];
}
return self;
}
#pragma mark - Method
- (void)addPreVideo
{
AVCaptureVideoPreviewLayer * preViewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
preViewLayer.frame = [UIScreen mainScreen].bounds;
preViewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_preViewLayer = preViewLayer;
}
#pragma mark - delegate
#pragma mark AVCaptureSessionDelegete
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer != NULL) {
if ([_delegate respondsToSelector:@selector(captureOutput:pixelBuffer:)]) {
[_delegate captureOutput:self pixelBuffer:imageBuffer];
}
}
}
#pragma mark - setter & getter
- (AVCaptureSession *)session
{
if (!_session) {
//4.
AVCaptureSession * session = [[AVCaptureSession alloc]init];
session.sessionPreset = _videoConfiguration.avsessionPreset;
//1.
AVCaptureDevice * device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError * error = nil;
//2.
AVCaptureDeviceInput * videoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
//3.
AVCaptureVideoDataOutput * videoOutput = [[AVCaptureVideoDataOutput alloc]init];
dispatch_queue_t videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
[videoOutput setSampleBufferDelegate:self queue:videoDataOutputQueue];
[videoOutput connectionWithMediaType:AVMediaTypeVideo] ;
videoOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
//设置帧率
if ([device respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)] && [device respondsToSelector:@selector(setActiveVideoMinFrameDuration:)]) {
NSError * error ;
[device lockForConfiguration:&error];
if (nil == error) {
#if defined (__IPHONE_7_0)
device.activeVideoMaxFrameDuration = CMTimeMake(1, (int32_t)_videoConfiguration.videoFrameRate);
device.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)_videoConfiguration.videoFrameRate);
#endif
}
[device unlockForConfiguration];
}else
{
for (AVCaptureConnection * connection in videoOutput.connections) {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
connection.videoMinFrameDuration = CMTimeMake(1, (int32_t)_videoConfiguration.videoMinFrameRate);
if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
connection.videoMaxFrameDuration = CMTimeMake(1, (int32_t)_videoConfiguration.videoMaxFrameRate);
#pragma clang diagnostic pop
}
}
//5.
if ([session canAddInput:videoInput]) {
[session addInput:videoInput];
}
if ([session canAddOutput:videoOutput]) {
[session addOutput:videoOutput];
}
_session = session;
}
return _session;
}

1.通过获取系统支持有效的设备类型–视频类型;

2.设置作为捕捉视频的输入设备;

3.设置捕捉数据的输出,通过代理方法(7)获取处理未编解码的帧数据sampleBuffer;

4.每一个实时的捕获,全部通过一个session(会话)来开启;

5.将输入设备和输出设备添加到会话中;

6.开启会话捕获.