x265-iOS:https://github.com/XuningZhai/x265-iOS-2.5
Code
#import <AVFoundation/AVFoundation.h>
#import "ViewController.h"
#include "x265.h"
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (nonatomic,weak) IBOutlet UIView *viewCapture;
@property (nonatomic,weak) IBOutlet UIButton *btnOutput;
@property (nonatomic,strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
@property (nonatomic,strong) AVCaptureSession *captureSession;
@property (nonatomic,strong) AVCaptureConnection *captureVideoConnection;
@property (strong) NSMutableArray *yuv420Frames;
@property (strong) NSMutableData *dataX265;
@property (nonatomic,assign) x265_param *x265Param;
@property (nonatomic,assign) x265_encoder *x265Encoder;
@end
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
[self initData];
[self initCapture];
}
- (void)viewDidAppear:(BOOL)animated
{
[super viewDidAppear:animated];
[self start];
}
- (void)viewDidDisappear:(BOOL)animated
{
[super viewDidDisappear:animated];
[self stop];
}
- (void)initData
{
[self setYuv420Frames:[NSMutableArray array]];
[self setDataX265:[NSMutableData data]];
int width = 352;
int height = 288;
self.x265Param = x265_param_alloc();
x265_param_default(self.x265Param);
self.x265Param->bRepeatHeaders = 1;
self.x265Param->internalCsp = X265_CSP_I420;
self.x265Param->sourceWidth = width;
self.x265Param->sourceHeight = height;
self.x265Param->fpsNum = 18;
self.x265Param->fpsDenom = 1;
self.x265Encoder = x265_encoder_open(self.x265Param);
}
- (void)initCapture
{
self.captureSession = [[AVCaptureSession alloc] init];
AVCaptureDevice* inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:nil];
[self.captureSession addInput:captureInput];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
[captureOutput setAlwaysDiscardsLateVideoFrames:YES];
[captureOutput setSampleBufferDelegate:self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)];
NSString* key = (NSString *)kCVPixelBufferPixelFormatTypeKey;
//Pixel Format NV12
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
[self.captureSession setSessionPreset:AVCaptureSessionPreset352x288];
[self.captureSession addOutput:captureOutput];
[self setCaptureVideoConnection:[captureOutput connectionWithMediaType:AVMediaTypeVideo]];
[self setCaptureVideoPreviewLayer:[AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]];
[self.captureVideoPreviewLayer setFrame:self.view.bounds];
[self.captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
[self.captureVideoPreviewLayer connection];
[self.viewCapture.layer addSublayer:self.captureVideoPreviewLayer];
}
- (void)start
{
[self.captureSession startRunning];
}
- (void)stop
{
[self.captureSession stopRunning];
}
- (IBAction)outputX265Video:(id)sender
{
[self.captureSession stopRunning];
dispatch_async(dispatch_queue_create("x265_queue", dispatch_queue_attr_make_with_qos_class(DISPATCH_QUEUE_SERIAL, QOS_CLASS_USER_INITIATED, -1)), ^{
while ([self.yuv420Frames count] != 0)
{
NSData *yuv420Frame = [self.yuv420Frames firstObject];
[self encodeX265FromYuv420Frame:yuv420Frame];
[self.yuv420Frames removeObject:yuv420Frame];
dispatch_async(dispatch_get_main_queue(), ^{
NSString *title = [NSString stringWithFormat:@"remaining encode frames:%ld", [self.yuv420Frames count]];
[self.btnOutput setTitle:title forState:UIControlStateNormal];
});
}
dispatch_async(dispatch_get_main_queue(), ^{
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docDir = [paths objectAtIndex:0];
NSString *recordX265VideoPath = [docDir stringByAppendingPathComponent:@"test265.hevc"];
if([[NSFileManager defaultManager] fileExistsAtPath:recordX265VideoPath]) {
[[NSFileManager defaultManager] removeItemAtPath:recordX265VideoPath error:nil];
}
[self.dataX265 writeToFile:recordX265VideoPath atomically:YES];
[self.btnOutput setTitle:@"finish" forState:UIControlStateNormal];
});
});
}
- (void)dealloc
{
if (self.x265Encoder)
{
x265_encoder_close(self.x265Encoder);
}
if (self.x265Param)
{
x265_param_free(self.x265Param);
}
}
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (connection == self.captureVideoConnection)
{
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
{
OSType pixelFormat = CVPixelBufferGetPixelFormatType(imageBuffer);
switch (pixelFormat)
{
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
{
//Capture pixel format is NV12, convert to yuv420
NSData *yuv420Frame = [self convertYUV420FromNV12ImageBuffer:imageBuffer];
[self.yuv420Frames addObject:yuv420Frame];
}
break;
}
}
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
}
- (NSData*)convertYUV420FromNV12ImageBuffer:(CVPixelBufferRef)imageBuffer
{
UInt8 *bufferPtr = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
UInt8 *bufferPtr1 = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,1);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t bytesrow0 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
size_t bytesrow1 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,1);
size_t yuv420_len = sizeof(UInt8) * width * height * 3 / 2;
//buffer to store YUV with layout YYYYYYYYUUVV
UInt8 *yuv420_data = malloc(yuv420_len);
//convert NV12 data to YUV420
UInt8 *pY = bufferPtr ;
UInt8 *pUV = bufferPtr1;
UInt8 *pU = yuv420_data + width * height;
UInt8 *pV = pU + width * height / 4;
for(int i = 0; i < height; i++)
{
memcpy(yuv420_data + i * width, pY + i * bytesrow0, width);
}
for(int j = 0; j < height / 2; j++)
{
for(int i =0; i < width / 2; i++)
{
*(pU++) = pUV[i << 1];
*(pV++) = pUV[(i << 1) + 1];
}
pUV += bytesrow1;
}
NSData *yuv420Frame = [NSData dataWithBytes:yuv420_data length:yuv420_len];
free(yuv420_data);
return yuv420Frame;
}
- (void)encodeX265FromYuv420Frame:(NSData*)yuv420Frame
{
UInt8 *yuv420_buf = (UInt8*)yuv420Frame.bytes;
//encode x265
x265_picture *x265Pic = NULL;
char *x265PicBuf = NULL;
int width = self.x265Param->sourceWidth;
int height = self.x265Param->sourceHeight;
int pixeSize = width * height;
x265Pic = x265_picture_alloc();
x265_picture_init(self.x265Param, x265Pic);
x265PicBuf = malloc(sizeof(char) * pixeSize * 3 / 2);
x265Pic->planes[0] = x265PicBuf;
x265Pic->planes[1] = x265PicBuf + pixeSize;
x265Pic->planes[2] = x265PicBuf + pixeSize * 5 / 4;
x265Pic->stride[0] = width;
x265Pic->stride[1] = width / 2;
x265Pic->stride[2] = width / 2;
memcpy(x265Pic->planes[0], yuv420_buf, pixeSize);
memcpy(x265Pic->planes[1], yuv420_buf + pixeSize, pixeSize / 4);
memcpy(x265Pic->planes[2], yuv420_buf + pixeSize * 5 / 4, pixeSize / 4);
x265_nal *x265NalPp = NULL;
uint32_t x265NalPi = 0;
x265_encoder_encode(self.x265Encoder, &x265NalPp, &x265NalPi, x265Pic, NULL);
for (int i = 0; i < x265NalPi; i++)
{
uint8_t* payload = x265NalPp[i].payload;
uint32_t sizeBytes = x265NalPp[i].sizeBytes;
[self.dataX265 appendBytes:payload length:sizeBytes];
}
x265_encoder_encode(self.x265Encoder, &x265NalPp, &x265NalPi, NULL, NULL);
for (int i = 0; i < x265NalPi; i++)
{
uint8_t* payload = x265NalPp[i].payload;
uint32_t sizeBytes = x265NalPp[i].sizeBytes;
[self.dataX265 appendBytes:payload length:sizeBytes];
}
x265_picture_free(x265Pic);
free(x265PicBuf);
}
@end
参考:https://www.jianshu.com/p/30a2486e4ab6
网友评论