公司摄像机产品客户端需要添加iOS硬解码,解码原始摄像头数据,适合解码h264的数据!需要的朋友可以参考下,贴上硬解代码
-(CVPixelBufferRef)deCompressedCMSampleBufferWithData:(AVFrameData *)frameData andOffset:(int)offset
{
NALUnit nalUnit;
CVPixelBufferRef pixelBufferRef =NULL;
char *data = (char*)frameData->getData();
int dataLen = frameData->getDataLen();
if(data ==NULL || dataLen ==0){
returnNULL;
}
while([selfnalunitWithData:dataandDataLen:dataLen andOffset:offsettoNALUnit:&nalUnit])
{
if(nalUnit.data ==NULL || nalUnit.size ==0){
returnNULL;
}
pixelBufferRef = NULL;
[selfinfalteStartCodeWithNalunitData:&nalUnit];
NSLog(@"NALUint Type: %d.", nalUnit.type);
switch (nalUnit.type) {
caseNALUTypeIFrame://IFrame
if(_sps &&_pps)
{
if([selfinitH264Decoder]){
pixelBufferRef = [selfdecompressWithNalUint:nalUnit];
NSLog(@"NALUint I Frame size:%d", nalUnit.size);
free(_sps);
free(_pps);
_pps =NULL;
_sps =NULL;
return pixelBufferRef;
}
}
break;
caseNALUTypeSPS://SPS
_spsSize = nalUnit.size -4;
if(_spsSize <=0){
NSLog(@"_spsSize is null");
returnNULL;
}
_sps = (uint8_t*)malloc(_spsSize);
memcpy(_sps, nalUnit.data +4,_spsSize);
NSLog(@"NALUint SPS size:%d", nalUnit.size -4);
break;
caseNALUTypePPS://PPS
_ppsSize = nalUnit.size -4;
if(_ppsSize <=0){
// NSLog(@"_ppsSize is null");
returnNULL;
}
_pps = (uint8_t*)malloc(_ppsSize);
memcpy(_pps, nalUnit.data +4,_ppsSize);
NSLog(@"NALUint PPS size:%d", nalUnit.size -4);
break;
caseNALUTypeBPFrame://B/P Frame
pixelBufferRef = [selfdecompressWithNalUint:nalUnit];
NSLog(@"NALUint B/P Frame size:%d", nalUnit.size);
return pixelBufferRef;
default:
break;
}
offset += nalUnit.size;
if(offset >= dataLen){
returnNULL;
}
}
NSLog(@"The AVFrame data size:%d", offset);
returnNULL;
}
-(void)infalteStartCodeWithNalunitData:(NALUnit *)dataUnit
{
//Inflate start code with data length
unsignedchar* data = dataUnit->data;
unsignedint dataLen = dataUnit->size -4;
data[0] = (unsignedchar)(dataLen >>24);
data[1] = (unsignedchar)(dataLen >>16);
data[2] = (unsignedchar)(dataLen >>8);
data[3] = (unsignedchar)(dataLen &0xff);
}
-(int)nalunitWithData:(char *)data andDataLen:(int)dataLen andOffset:(int)offset toNALUnit:(NALUnit *)unit
{
unit->size =0;
unit->data =NULL;
int addUpLen = offset;
while(addUpLen < dataLen)
{
if(data[addUpLen++] ==0x00 &&
data[addUpLen++] == 0x00 &&
data[addUpLen++] == 0x00 &&
data[addUpLen++] == 0x01){//H264 start code
int pos = addUpLen;
while(pos < dataLen){//Find next NALU
if(data[pos++] ==0x00 &&
data[pos++] == 0x00 &&
data[pos++] == 0x00 &&
data[pos++] == 0x01){
break;
}
}
unit->type = data[addUpLen] &0x1f;
if(pos == dataLen){
unit->size = pos - addUpLen +4;
}else{
unit->size = pos - addUpLen;
}
unit->data = (unsignedchar*)&data[addUpLen -4];
return1;
}
}
return -1;
}
-(BOOL)initH264Decoder
{
if(_decompressionSession){
returntrue;
}
constuint8_t *const parameterSetPointers[2] = {_sps,_pps};
constsize_t parameterSetSizes[2] = {_spsSize,_ppsSize};
OSStatus status =CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
2,//parameter count
parameterSetPointers,
parameterSetSizes,
4,//NAL start code size
&(_decompressionFormatDesc));
if(status ==noErr){
constvoid *keys[] = {kCVPixelBufferPixelFormatTypeKey};
//kCVPixelFormatType_420YpCbCr8Planar is YUV420, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12
uint32_t biPlanarType =kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
constvoid *values[] = {CFNumberCreate(NULL,kCFNumberSInt32Type, &biPlanarType)};
CFDictionaryRef attributes =CFDictionaryCreate(NULL, keys, values,1,NULL,NULL);
//Create decompression session
VTDecompressionOutputCallbackRecord outputCallBaclRecord;
outputCallBaclRecord.decompressionOutputRefCon =NULL;
outputCallBaclRecord.decompressionOutputCallback =decompressionOutputCallbackRecord;
status = VTDecompressionSessionCreate(kCFAllocatorDefault,
_decompressionFormatDesc,
NULL, attributes,
&outputCallBaclRecord,
&_decompressionSession);
CFRelease(attributes);
if(status !=noErr){
returnfalse;
}
}else{
NSLog(@"Error code %d:Creates a format description for a video media stream described by H.264 parameter set NAL units.", (int)status);
returnfalse;
}
returntrue;
}
static void decompressionOutputCallbackRecord(void *CM_NULLABLE decompressionOutputRefCon,
void *CM_NULLABLE sourceFrameRefCon,
OSStatus status,
VTDecodeInfoFlags infoFlags,
CM_NULLABLECVImageBufferRef imageBuffer,
CMTime presentationTimeStamp,
CMTime presentationDuration ){
CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
*outputPixelBuffer = CVPixelBufferRetain(imageBuffer);
}
-(CVPixelBufferRef)decompressWithNalUint:(NALUnit)dataUnit
{
CMBlockBufferRef blockBufferRef =NULL;
CVPixelBufferRef outputPixelBufferRef =NULL;
//1.Fetch video data and generate CMBlockBuffer
OSStatus status =CMBlockBufferCreateWithMemoryBlock(kCFAllocatorDefault,
dataUnit.data,
dataUnit.size,
kCFAllocatorNull,
NULL,
0,
dataUnit.size,
0,
&blockBufferRef);
//2.Create CMSampleBuffer
if(status ==kCMBlockBufferNoErr){
CMSampleBufferRef sampleBufferRef =NULL;
constsize_t sampleSizes[] = {dataUnit.size};
OSStatus createStatus =CMSampleBufferCreateReady(kCFAllocatorDefault,
blockBufferRef,
_decompressionFormatDesc,
1,
0,
NULL,
1,
sampleSizes,
&sampleBufferRef);
//3.Create CVPixelBuffer
if(createStatus ==kCMBlockBufferNoErr && sampleBufferRef){
VTDecodeFrameFlags frameFlags =0;
VTDecodeInfoFlags infoFlags =0;
OSStatus decodeStatus =VTDecompressionSessionDecodeFrame(_decompressionSession,
sampleBufferRef,
frameFlags,
&outputPixelBufferRef,
&infoFlags);
if(decodeStatus !=noErr){
CFRelease(sampleBufferRef);
CFRelease(blockBufferRef);
outputPixelBufferRef = nil;
return outputPixelBufferRef;
}
if(_isTakePicture){
if(!_isSaveTakePictureImage){
CIImage *ciImage = [CIImageimageWithCVPixelBuffer:outputPixelBufferRef];
CIContext *ciContext = [CIContextcontextWithOptions:nil];
CGImageRef videoImage = [ciContext
createCGImage:ciImage
fromRect:CGRectMake(0,0,
CVPixelBufferGetWidth(outputPixelBufferRef),
CVPixelBufferGetHeight(outputPixelBufferRef))];
UIImage *uiImage = [UIImageimageWithCGImage:videoImage];
_isSaveTakePictureImage = [UIImageJPEGRepresentation(uiImage,1.0)writeToFile:_saveTakePicturePathatomically:YES];
CGImageRelease(videoImage);
_isTakePicture =false;
}
}
CFRelease(sampleBufferRef);
}
CFRelease(blockBufferRef);
}
return outputPixelBufferRef;
}
-(void)dealloc
{
if(_sps){
free(_sps);
_sps =NULL;
}
if(_pps){
free(_pps);
_pps =NULL;
}
if(_decompressionSession){
CFRelease(_decompressionSession);
_decompressionSession =NULL;
}
if(_decompressionFormatDesc){
CFRelease(_decompressionFormatDesc);
_decompressionFormatDesc =NULL;
}
}
//消费
-(void)enqueueSampleBuffer:(CMSampleBufferRef &)sampleBuffer playerView:(AVSampleBufferDisplayLayer *)layer{
if (sampleBuffer){
CFRetain(sampleBuffer);
[layer enqueueSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (layer.status ==AVQueuedSampleBufferRenderingStatusFailed){
NSLog(@"ERROR: %@", layer.error);
if (-11847 == layer.error.code){
// [self rebuildSampleBufferDisplayLayer];
}
}else{
// NSLog(@"STATUS: %i", (int)layer.status);
}
}else{
NSLog(@"ignore null samplebuffer");
}
}
网友评论