美文网首页
GPUImage自定义输入源SNImagePixelBuffer

GPUImage自定义输入源SNImagePixelBuffer

作者: Mr_fei | 来源:发表于2023-05-10 11:25 被阅读0次

场景:从SDK或者相机获取到pixelBuffer,需要经过自己处理美颜,贴纸等情况

#import <GPUImage/GPUImage.h>

NS_ASSUME_NONNULL_BEGIN

@interface SNImagePixelBufferInput : GPUImageOutput

- (void)processPixelBuffer:(CVPixelBufferRef)pixelBuffer time:(CMTime)time;

@end

NS_ASSUME_NONNULL_END
#import "SNImagePixelBufferInput.h"

@interface SNImagePixelBufferInput ()
@property (nonatomic) CVOpenGLESTextureRef textureRef;

@property (nonatomic, strong) dispatch_semaphore_t frameRenderingSemaphore;

@end

@implementation SNImagePixelBufferInput

- (instancetype)init {
    if (self = [super init]) {
        self.frameRenderingSemaphore = dispatch_semaphore_create(1);
    }
    return self;
}

- (void)dealloc {
    runSynchronouslyOnVideoProcessingQueue(^{
        [GPUImageContext useImageProcessingContext];
        
        if (self.textureRef) {
            CFRelease(self.textureRef);
        }
    });
}

- (void)processPixelBuffer:(CVPixelBufferRef)pixelBuffer time:(CMTime)time {
    if (dispatch_semaphore_wait(self.frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
        return ;
    }
    
    CFRetain(pixelBuffer);
    runAsynchronouslyOnVideoProcessingQueue(^{
        [self processVideoPixelBuffer:pixelBuffer time:time];
        
        CFRelease(pixelBuffer);
        dispatch_semaphore_signal(self.frameRenderingSemaphore);
    });
}

- (void)processVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer time:(CMTime)time
{
    NSAssert(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA, @"%@: only kCVPixelFormatType_32BGRA is supported currently.",self);

    size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
    size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
    
    CVPixelBufferLockBaseAddress(pixelBuffer, 0);
    [GPUImageContext useImageProcessingContext];
    
    if (self.textureRef) {
        CFRelease(self.textureRef);
    }
    
    CVOpenGLESTextureRef textureRef = NULL;
    CVReturn result = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                   [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache],
                                                                   pixelBuffer,
                                                                   NULL,
                                                                   GL_TEXTURE_2D,
                                                                   GL_RGBA,
                                                                   (GLsizei)bufferWidth,
                                                                   (GLsizei)bufferHeight,
                                                                   GL_BGRA,
                                                                   GL_UNSIGNED_BYTE,
                                                                   0,
                                                                   &textureRef);
    
    NSAssert(result == kCVReturnSuccess, @"CVOpenGLESTextureCacheCreateTextureFromImage error: %@",@(result));

    if (result == kCVReturnSuccess && textureRef) {
        self.textureRef = textureRef;
        
        glActiveTexture(GL_TEXTURE4);
        glBindTexture(CVOpenGLESTextureGetTarget(textureRef), CVOpenGLESTextureGetName(textureRef));
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        
        outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:CGSizeMake(bufferWidth, bufferHeight) overriddenTexture:CVOpenGLESTextureGetName(textureRef)];
        
       
        for (id<GPUImageInput> currentTarget in targets) {
            if ([currentTarget enabled]) {
                NSInteger indexOfObject = [targets indexOfObject:currentTarget];
                NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
                if (currentTarget != self.targetToIgnoreForUpdates) {
                    [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
                    [currentTarget newFrameReadyAtTime:time atIndex:targetTextureIndex];
                } else {
                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
                }
            }
        }
    }
    
    CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
/*
第二种方式:
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
        
        int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(pixelBuffer);
        outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
        [outputFramebuffer activateFramebuffer];

        glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);

        
        // Using BGRA extension to pull in video frame data directly
        // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(pixelBuffer));
        
        [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
        
        CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
*/
}

- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
{
    // First, update all the framebuffers in the targets
    for (id<GPUImageInput> currentTarget in targets)
    {
        if ([currentTarget enabled])
        {
            NSInteger indexOfObject = [targets indexOfObject:currentTarget];
            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
            
            if (currentTarget != self.targetToIgnoreForUpdates)
            {
                [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
                [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
                
                if ([currentTarget wantsMonochromeInput])
                {
                    [currentTarget setCurrentlyReceivingMonochromeInput:YES];
                    // TODO: Replace optimization for monochrome output
                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
                }
                else
                {
                    [currentTarget setCurrentlyReceivingMonochromeInput:NO];
                    [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
                }
            }
            else
            {
                [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
                [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
            }
        }
    }
    
    // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
    [outputFramebuffer unlock];
    outputFramebuffer = nil;
    
    // Finally, trigger rendering as needed
    for (id<GPUImageInput> currentTarget in targets)
    {
        if ([currentTarget enabled])
        {
            NSInteger indexOfObject = [targets indexOfObject:currentTarget];
            NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
            
            if (currentTarget != self.targetToIgnoreForUpdates)
            {
                [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
            }
        }
    }
}

@end

注意点:输入的pixelBuffer要BGRA格式,如果是其他的请先转成BGRA

使用方式:

self.pixelBufferFilter = [[YUGPUImageCVPixelBufferInput alloc] init];
self.outputFilter = [[GPUImageFilter alloc] init];
self.outputFilter.frameProcessingCompletionBlock = ^(GPUImageOutput *output, CMTime time) {
         //   output.framebufferForOutput.pixelBuffer就是通过滤镜处理后的pixelBuffer
 };
[self.pixelBufferFilter addTarget: self.outputFilter];

[self.pixelBufferFilter processCVPixelBuffer:frame frameTime:time];

相关文章

  • iOS GPUImage blog收集

    iOS GPUImage blog收集 GPUImage详解(简书博客) GPUImage(五):五种类型输入源(...

  • GPUImage 基础学习GPUImagePicture 图片

    附一张GPUImage的结构图: GPUImage中的几个概念 output为输出源input为输入源filter...

  • iOS库之GPUImage

    iOS库之GPUImage GPUImageVideoCamera 相机视频输入源, GPUImageStillC...

  • GPUImage

    GPUImage的结构图: ⁃ output为输出源 ⁃ intput为输入源 ⁃ filter为滤镜 共125个...

  • GPUImage源码分析

    GPUImage的简单使用 图片添加滤镜 创建一个输入源 GPUImagePicture 创建滤镜 GPUImag...

  • GPUImage-实时美颜滤镜

    GPUImage类介绍 GPUImageFilter就是用来接收源图像,通过自定义的顶点、片元着色器来渲染新的图像...

  • GPUImage 自定义滤镜

    GPUImage 自定义滤镜 (转载地址)http://www.itiger.me/?p=143 GPUImage...

  • Run Loop使用(二)

    这里所有的例子都只是为了说明如何使用,不去讨论代码的封装性 一、自定义输入源 自定义输入源需要定义如下内容: 你需...

  • GPUImage(五):五种类型输入源

    GPUImage作为iOS相当老牌的图片处理三方库已经有些日子了(2013年发布第一个版本),至今甚至感觉要离我们...

  • 配置Runloop的sources

    当系统的输入源不足以满足我们的需求的时候, 我们可以自定义输入源. 看了苹果的官方文档, 也没有知道为什么, 所以...

网友评论

      本文标题:GPUImage自定义输入源SNImagePixelBuffer

      本文链接:https://www.haomeiwen.com/subject/wkzdsdtx.html