前言
前段时间忙着找工作所以没有进行更新,现在抽空更新一下吧,目前的进度由于离开了前公司,而现在的公司任务又比较多,所以更新的速度会慢一些吧.目前写到了RTP分包了,打算春节期间把C语言和TCP传输再好好看一下吧.后面还会把ffmpeg+opengl补完的.本文学习自链接,大家可以去看一下.
苹果提供了一个硬编码的框架VideoToolBox,这个框架iOS8以后开发者可以去使用,这里是VideoToolBox提供的编码类型:
支持类型初始化VideoToolBox
这里要提的一点是码率的设置,这里给一个公式吧,方便大家查看(原文链接),关于码率的理解我可以给大家举一个形象的例子.有钱的人可以过好一点的生活,没钱的人可以过差一点的生活,但也不至于饿死,码率大了的话就非常清晰,但同时文件也会比较大,码率小了的话,图像有时会糊,但也是勉强能看的,这里尽量给一个合适的码率吧.
码率公式,仅供参考/*
1、-initVideoToolBox中调用VTCompressionSessionCreate创建编码session,然后调用VTSessionSetProperty设置参数,最后调用VTCompressionSessionPrepareToEncodeFrames开始编码;
2、开始视频录制,获取到摄像头的视频帧,传入-encode:,调用VTCompressionSessionEncodeFrame传入需要编码的视频帧,如果返回失败,调用VTCompressionSessionInvalidate销毁session,然后释放session;
3、每一帧视频编码完成后会调用预先设置的编码函数didCompressH264,如果是关键帧需要用CMSampleBufferGetFormatDescription获取CMFormatDescriptionRef,然后用
CMVideoFormatDescriptionGetH264ParameterSetAtIndex取得PPS和SPS;
最后把每一帧的所有NALU数据前四个字节变成0x00 00 00 01之后再写入文件;
4、调用VTCompressionSessionCompleteFrames完成编码,然后销毁session:VTCompressionSessionInvalidate,释放session。
*/
/**
初始化videoToolBox
*/
-(void)initVideoToolBox{
//同步
dispatch_sync(_encodeQueue, ^{
_frameID = 0;
//给定宽高,过高的话会编码失败
int width = 640 , height = 480;
/**
创建编码会话
@param allocator#> 会话的分配器,传入NULL默认 description#>
@param width#> 帧宽 description#>
@param height#> 帧高 description#>
@param codecType#> 编码器类型 description#>
@param encoderSpecification#> 指定必须使用的特定视频编码器。通过空来让视频工具箱选择一个编码器。 description#>
@param sourceImageBufferAttributes#> 像素缓存池源帧 description#>
@param compressedDataAllocator#> 压缩数据分配器,默认为空 description#>
@param outputCallback#> 回调函数,图像编码成功后调用 description#>
@param outputCallbackRefCon#> 客户端定义的输出回调的参考值。 description#>
@param compressionSessionOut#> 指向一个变量,以接收新的压缩会话 description#>
@return <#return value description#>
*/
OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self), &_encodeingSession);
NSLog(@"H264状态:VTCompressionSessionCreate %d",(int)status);
if (status != 0) {
NSLog(@"H264会话创建失败");
return ;
}
//设置实时编码输出(避免延迟)
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
// 设置关键帧(GOPsize)间隔,gop太小的话有时候图像会糊
int frameInterval = 10;
CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef);
// 设置期望帧率,不是实际帧率
int fps = 10;
CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef);
//设置码率,上限,单位是bps
int bitRate = width * height * 3 * 4 * 8 ;
CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef);
// 设置码率,均值,单位是byte
int bitRateLimit = width * height * 3 * 4 ;
CFNumberRef bitRateLimitRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRateLimit);
NSLog(@"码率%@",bitRateLimitRef);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_DataRateLimits, bitRateLimitRef);
//可以开始编码
VTCompressionSessionPrepareToEncodeFrames(_encodeingSession);
});
}
根据代理方法判断是音频数据还是视频数据
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if ([self.videoDataOutput isEqual:captureOutput]) {
//捕获到视频数据
NSLog(@"视频");
//将视频数据转换成YUV420数据
// NSData *yuv420Data = [self convertVideoSampleToYUV420:sampleBuffer];
dispatch_sync(_encodeQueue, ^{
//开始硬编码
_isStartHardEncoding = 1;
// 摄像头采集后的图像是未编码的CMSampleBuffer形式,
[self videoEncode:sampleBuffer];
});
// [self sendVideoSampleBuffer:sampleBuffer];
}else if([self.audioDataOutput isEqual:captureOutput]){
//捕获到音频数据
NSLog(@"音频");
//AudioToolBox PCM->AAC硬编码
dispatch_sync(_encodeQueue, ^{
[self.aacEncode encodeSampleBuffer:sampleBuffer completionBlock:^(NSData *encodedData, NSError *error) {
[_audioFileHandle writeData:encodedData];
NSLog(@"%@",_audioFileHandle);
}];
});
//音频数据转PCM
// NSData *pcmData = [self convertAudioSampleToYUV420:sampleBuffer];
}
}
编码完成后的回调
/**
* h.264硬编码完成后回调 VTCompressionOutputCallback
* 将硬编码成功的CMSampleBuffer转换成H264码流,通过网络传播
* 解析出参数集SPS和PPS,加上开始码后组装成NALU。提取出视频数据,将长度码转换成开始码,组长成NALU。将NALU发送出去。
*/
//编码完成后回调
void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer){
// NSLog(@"didCompressH264 called with status %d infoFlags %d", (int)status, (int)infoFlags);
//状态错误
if (status != 0) {
return;
}
//没准备好
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
NSLog(@"didCompressH264 data is not ready ");
return;
}
VideoEncodeVC * encoder = (__bridge VideoEncodeVC*)outputCallbackRefCon;
bool keyframe = !CFDictionaryContainsKey( (CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), kCMSampleAttachmentKey_NotSync);
// 判断当前帧是否为关键帧 获取sps & pps 数据
// 解析出参数集SPS和PPS,加上开始码后组装成NALU。提取出视频数据,将长度码转换成开始码,组长成NALU。将NALU发送出去。
if (keyframe) {
// CMVideoFormatDescription:图像存储方式,编解码器等格式描述
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
// sps
size_t sparameterSetSize, sparameterSetCount;
const uint8_t *sparameterSet;
OSStatus statusSPS = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
if (statusSPS == noErr) {
// Found sps and now check for pps
// pps
size_t pparameterSetSize, pparameterSetCount;
const uint8_t *pparameterSet;
OSStatus statusPPS = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
if (statusPPS == noErr) {
// found sps pps
NSData *sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
NSData *pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
if (encoder) {
[encoder gotSPS:sps withPPS:pps];
}
}
}
}
// 编码后的图像,以CMBlockBuffe方式存储
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char *dataPointer;
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
if (statusCodeRet == noErr) {
size_t bufferOffSet = 0;
// 返回的nalu数据前四个字节不是0001的startcode,而是大端模式的帧长度length
static const int AVCCHeaderLength = 4;
// 循环获取nalu数据
while (bufferOffSet < totalLength - AVCCHeaderLength) {
uint32_t NALUUnitLength = 0;
// Read the NAL unit length
memcpy(&NALUUnitLength, dataPointer + bufferOffSet, AVCCHeaderLength);
// 从大端转系统端
NALUUnitLength = CFSwapInt32BigToHost(NALUUnitLength);
NSData *data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffSet + AVCCHeaderLength) length:NALUUnitLength];
[encoder gotEncodedData:data isKeyFrame:keyframe];
// Move to the next NAL unit in the block buffer
bufferOffSet += AVCCHeaderLength + NALUUnitLength;
}
}
}
h264视频编码
/**
视频编码
@param videoSample <#videoSample description#>
*/
-(void)videoEncode:(CMSampleBufferRef)videoSampleBuffer{
// CVPixelBufferRef 编码前图像数据结构
// 利用给定的接口函数CMSampleBufferGetImageBuffer从中提取出CVPixelBufferRef
CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(videoSampleBuffer);
// 帧时间, 如果不设置会导致时间轴过长
CMTime presentationTimeStamp = CMTimeMake(_frameID++, 1000);
VTEncodeInfoFlags flags;
// 使用硬编码接口VTCompressionSessionEncodeFrame来对该帧进行硬编码
// 编码成功后,会自动调用session初始化时设置的回调函数
OSStatus statusCode = VTCompressionSessionEncodeFrame(_encodeingSession, imageBuffer, presentationTimeStamp, kCMTimeInvalid, NULL, NULL, &flags);
if (statusCode != noErr) {
NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode);
VTCompressionSessionInvalidate(_encodeingSession);
CFRelease(_encodeingSession);
_encodeingSession = NULL;
return;
}
// NSLog(@"H264: VTCompressionSessionEncodeFrame Success : %d", (int)statusCode);
}
写入沙盒
//传入PPS和SPS,写入到文件
- (void)gotSPS:(NSData *)sps withPPS:(NSData *)pps{
// NSLog(@"gotSPSAndPPS %d withPPS %d", (int)[sps length], (int)[pps length]);
const char bytes[] = "\x00\x00\x00\x01";
size_t length = (sizeof bytes) - 1;
NSData *byteHeader = [NSData dataWithBytes:bytes length:length];
[_fileHandle writeData:byteHeader];
[_fileHandle writeData:sps];
[_fileHandle writeData:byteHeader];
[_fileHandle writeData:pps];
}
- (void)gotEncodedData:(NSData *)data isKeyFrame:(BOOL)isKeyFrame {
// NSLog(@"gotEncodedData %d", (int)[data length]);
if (_fileHandle != NULL) {
const char bytes[]= "\x00\x00\x00\x01";
size_t lenght = (sizeof bytes) - 1;
NSData *byteHeader = [NSData dataWithBytes:bytes length:lenght];
[_fileHandle writeData:byteHeader];
[_fileHandle writeData:data];
}
}
结束编码
/**
结束编码
*/
- (void)endVideoToolBox{
VTCompressionSessionCompleteFrames(_encodeingSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(_encodeingSession);
CFRelease(_encodeingSession);
_encodeingSession = NULL;
}
网友评论
解决了?