- 与cv::Mat格式互相转换
+ (Mat)matFromImageBuffer: (CVPixelBufferRef)pixelBuffer {
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
void *baseaddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
int bufferWidth = (int)CVPixelBufferGetWidthOfPlane(pixelBuffer,0);
int bufferHeight = (int)CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
cv::Mat converted;
// Get the yPlane (Luma values)
cv::Mat yPlane = cv::Mat(bufferHeight, bufferWidth, CV_8UC1, baseaddress);
// Get cbcrPlane (Chroma values)
int cbcrWidth = (int)CVPixelBufferGetWidthOfPlane(pixelBuffer,1);
int cbcrHeight = (int)CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
void *cbcrAddress = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
// Since the CbCr Values are alternating we have 2 channels: Cb and Cr. Thus we need to use CV_8UC2 here.
cv::Mat cbcrPlane = cv::Mat(cbcrHeight, cbcrWidth, CV_8UC2, cbcrAddress);
// Split them apart so we can merge them with the luma values
std::vector<cv::Mat> cbcrPlanes;
cv::split(cbcrPlane, cbcrPlanes);
cv::Mat cbPlane;
cv::Mat crPlane;
// Since we have a 4:2:0 format, cb and cr values are only present for each 2x2 luma pixels. Thus we need to enlargen them (by a factor of 2).
cv::resize(cbcrPlanes[0], cbPlane, yPlane.size(), 0, 0, cv::INTER_NEAREST);
cv::resize(cbcrPlanes[1], crPlane, yPlane.size(), 0, 0, cv::INTER_NEAREST);
cv::Mat ycbcr;
std::vector<cv::Mat> allPlanes = {yPlane, cbPlane, crPlane};
cv::merge(allPlanes, ycbcr);
// ycbcr now contains all three planes. We need to convert it from YCbCr to BGR so OpenCV can work with it
cv::cvtColor(ycbcr, converted, cv::COLOR_YCrCb2BGR);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
return converted;
}
- 旋转
- (CVPixelBufferRef)rotateBuffer:(CVPixelBufferRef)imageBuffer withConstant:(uint8_t)rotationConstant {
vImage_Error err = kvImageNoError;
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t outHeight = width;
size_t outWidth = height;
assert(CVPixelBufferGetPixelFormatType(imageBuffer) == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
assert(CVPixelBufferGetPlaneCount(imageBuffer) == 2);
// create buffer
CVPixelBufferRef rotatedBuffer = NULL;
CVPixelBufferCreate(kCFAllocatorDefault, outWidth, outHeight, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange, NULL, &rotatedBuffer);
CVPixelBufferLockBaseAddress(rotatedBuffer, 0);
// rotate Y plane
vImage_Buffer originalYBuffer = { CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0), CVPixelBufferGetHeightOfPlane(imageBuffer, 0),
CVPixelBufferGetWidthOfPlane(imageBuffer, 0), CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0) };
vImage_Buffer rotatedYBuffer = { CVPixelBufferGetBaseAddressOfPlane(rotatedBuffer, 0), CVPixelBufferGetHeightOfPlane(rotatedBuffer, 0),
CVPixelBufferGetWidthOfPlane(rotatedBuffer, 0), CVPixelBufferGetBytesPerRowOfPlane(rotatedBuffer, 0) };
err = vImageRotate90_Planar8(&originalYBuffer, &rotatedYBuffer, rotationConstant, 0.0, kvImageNoFlags);
// rotate UV plane
vImage_Buffer originalUVBuffer = { CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 1), CVPixelBufferGetHeightOfPlane(imageBuffer, 1),
CVPixelBufferGetWidthOfPlane(imageBuffer, 1), CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1) };
vImage_Buffer rotatedUVBuffer = { CVPixelBufferGetBaseAddressOfPlane(rotatedBuffer, 1), CVPixelBufferGetHeightOfPlane(rotatedBuffer, 1),
CVPixelBufferGetWidthOfPlane(rotatedBuffer, 1), CVPixelBufferGetBytesPerRowOfPlane(rotatedBuffer, 1) };
err = vImageRotate90_Planar16U(&originalUVBuffer, &rotatedUVBuffer, rotationConstant, 0.0, kvImageNoFlags);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CVPixelBufferUnlockBaseAddress(rotatedBuffer, 0);
return rotatedBuffer;
}
网友评论