美文网首页
iOS WebRTC 实现美颜滤镜特效

iOS WebRTC 实现美颜滤镜特效

作者: 本本的开心牧场 | 来源:发表于2022-02-16 11:28 被阅读0次

    最近需要实现美颜功能,调研了很多相关技术文章和开源代码。踩了很多坑,记录实现步骤,希望对读者有所帮助。
    发现有2种实现方式,基于GPUImage
    方案一:替换WebRTC的原生采集,使用GPUImageVideoCamera替换WebRTC中的视频采集,得到经过GPUImage添加美颜处理后的图像,发送给WebRTC的OnFrame方法。(相对比较简单)
    方案二:拿到WebRTC采集的原始视频帧数据,然后传给GPUImage库进行处理,最后把经过处理的视频帧传回WebRTC。
    本文章采用方案二
    步骤为
    取到采集数据i420 CVPixelBufferRef->纹理->GPUImage处理-> BGRA CVPixelBufferRef -> i420 CVPixelBufferRef(转为webrtc支持格式)

    取到采集数据i420 CVPixelBufferRef->纹理->GPUImage处理-> BGRA CVPixelBufferRef

    _capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:_filter];
    

    自定义类 遵守RTCVideoCapturerDelegate 重写didCaptureVideoFrame方法

    - (void)capturer:(RTCVideoCapturer*)capturer
        didCaptureVideoFrame:(RTCVideoFrame*)frame {
    //  操作C 需要手动释放  否则内存暴涨
      CVPixelBufferRelease(_buffer)
    //    拿到pixelBuffer
        ((RTCCVPixelBuffer*)frame.buffer).pixelBuffer
    }
    
    
      //  采集拿到的数据进行处理
    - (CVPixelBufferRef)renderByGPUImage:(CVPixelBufferRef)pixelBuffer {
        CVPixelBufferRetain(pixelBuffer);
        __block CVPixelBufferRef output = nil;
        runSynchronouslyOnVideoProcessingQueue(^{
            [GPUImageContext useImageProcessingContext];
            //        1.取到采集数据i420 CVPixelBufferRef->纹理
            GLuint textureID = [self.pixelBufferHelper convertYUVPixelBufferToTexture:pixelBuffer];
            CGSize size = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer),
                                     CVPixelBufferGetHeight(pixelBuffer));
            //        2.GPUImage滤镜处理
            [GPUImageContext setActiveShaderProgram:nil];
            GPUImageTextureInput *textureInput = [[GPUImageTextureInput alloc] initWithTexture:textureID size:size];
            // First pass: face smoothing filter
            GPUImageBilateralFilter *bilateralFilter = [[GPUImageBilateralFilter alloc] init];
            bilateralFilter.distanceNormalizationFactor = self->_distanceNormalizationFactor;
            [textureInput addTarget:bilateralFilter];
            GPUImageTextureOutput *textureOutput = [[GPUImageTextureOutput alloc] init];
            [bilateralFilter addTarget:textureOutput];
            [textureInput processTextureWithFrameTime:kCMTimeZero];
            //       3. 处理后的纹理转pixelBuffer BGRA 
            output = [self.pixelBufferHelper convertTextureToPixelBuffer:textureOutput.texture
                                                             textureSize:size];
            [textureOutput doneWithTexture];
            glDeleteTextures(1, &textureID);
        });
        CVPixelBufferRelease(pixelBuffer);
        
        return output;
    }
    
    /// YUV 格式的 PixelBuffer 转化为纹理
    - (GLuint)convertYUVPixelBufferToTexture:(CVPixelBufferRef)pixelBuffer {
        if (!pixelBuffer) {
            return 0;
        }
        
        CGSize textureSize = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer),
                                        CVPixelBufferGetHeight(pixelBuffer));
    
        [EAGLContext setCurrentContext:self.context];
        
        GLuint frameBuffer;
        GLuint textureID;
        
        // FBO
        glGenFramebuffers(1, &frameBuffer);
        glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
        
        // texture
        glGenTextures(1, &textureID);
        glBindTexture(GL_TEXTURE_2D, textureID);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textureSize.width, textureSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
        
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        
        
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, textureID, 0);
        
        glViewport(0, 0, textureSize.width, textureSize.height);
        
        // program
        glUseProgram(self.yuvConversionProgram);
        
        // texture
        CVOpenGLESTextureRef luminanceTextureRef = nil;
        CVOpenGLESTextureRef chrominanceTextureRef = nil;
    
        CVReturn status = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                                       self.textureCache,
                                                                       pixelBuffer,
                                                                       nil,
                                                                       GL_TEXTURE_2D,
                                                                       GL_LUMINANCE,
                                                                       textureSize.width,
                                                                       textureSize.height,
                                                                       GL_LUMINANCE,
                                                                       GL_UNSIGNED_BYTE,
                                                                       0,
                                                                       &luminanceTextureRef);
        if (status != kCVReturnSuccess) {
            NSLog(@"Can't create luminanceTexture");
        }
        
        status = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
                                                              self.textureCache,
                                                              pixelBuffer,
                                                              nil,
                                                              GL_TEXTURE_2D,
                                                              GL_LUMINANCE_ALPHA,
                                                              textureSize.width / 2,
                                                              textureSize.height / 2,
                                                              GL_LUMINANCE_ALPHA,
                                                              GL_UNSIGNED_BYTE,
                                                              1,
                                                              &chrominanceTextureRef);
        
        if (status != kCVReturnSuccess) {
            NSLog(@"Can't create chrominanceTexture");
        }
        
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(luminanceTextureRef));
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        glUniform1i(glGetUniformLocation(self.yuvConversionProgram, "luminanceTexture"), 0);
        
        glActiveTexture(GL_TEXTURE1);
        glBindTexture(GL_TEXTURE_2D, CVOpenGLESTextureGetName(chrominanceTextureRef));
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        glUniform1i(glGetUniformLocation(self.yuvConversionProgram, "chrominanceTexture"), 1);
        
        GLfloat kXDXPreViewColorConversion601FullRange[] = {
            1.0,    1.0,    1.0,
            0.0,    -0.343, 1.765,
            1.4,    -0.711, 0.0,
        };
        
        GLuint yuvConversionMatrixUniform = glGetUniformLocation(self.yuvConversionProgram, "colorConversionMatrix");
        glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, kXDXPreViewColorConversion601FullRange);
        
        // VBO
        glBindBuffer(GL_ARRAY_BUFFER, self.VBO);
        
        GLuint positionSlot = glGetAttribLocation(self.yuvConversionProgram, "position");
        glEnableVertexAttribArray(positionSlot);
        glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
        
        GLuint textureSlot = glGetAttribLocation(self.yuvConversionProgram, "inputTextureCoordinate");
        glEnableVertexAttribArray(textureSlot);
        glVertexAttribPointer(textureSlot, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3* sizeof(float)));
        
        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
        
        glDeleteFramebuffers(1, &frameBuffer);
        
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
        glBindBuffer(GL_ARRAY_BUFFER, 0);
        
        glFlush();
        
        self.luminanceTexture = luminanceTextureRef;
        self.chrominanceTexture = chrominanceTextureRef;
        
        CFRelease(luminanceTextureRef);
        CFRelease(chrominanceTextureRef);
        
        return textureID;
    }
    
    
    // 纹理转化为CVPixelBufferRef 数据
    - (CVPixelBufferRef)convertTextureToPixelBuffer:(GLuint)texture
                                        textureSize:(CGSize)textureSize {
        [EAGLContext setCurrentContext:self.context];
        
        CVPixelBufferRef pixelBuffer = [self createPixelBufferWithSize:textureSize];
        GLuint targetTextureID = [self convertRGBPixelBufferToTexture:pixelBuffer];
        
        GLuint frameBuffer;
        
        // FBO
        glGenFramebuffers(1, &frameBuffer);
        glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
        
        // texture
        glBindTexture(GL_TEXTURE_2D, targetTextureID);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textureSize.width, textureSize.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
        
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        
        glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, targetTextureID, 0);
        
        glViewport(0, 0, textureSize.width, textureSize.height);
        
        // program
        glUseProgram(self.normalProgram);
        
        // texture
        glActiveTexture(GL_TEXTURE0);
        glBindTexture(GL_TEXTURE_2D, texture);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
        glUniform1i(glGetUniformLocation(self.normalProgram, "renderTexture"), 0);
        
        // VBO
        glBindBuffer(GL_ARRAY_BUFFER, self.VBO);
        
        GLuint positionSlot = glGetAttribLocation(self.normalProgram, "position");
        glEnableVertexAttribArray(positionSlot);
        glVertexAttribPointer(positionSlot, 3, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)0);
        
        GLuint textureSlot = glGetAttribLocation(self.normalProgram, "inputTextureCoordinate");
        glEnableVertexAttribArray(textureSlot);
        glVertexAttribPointer(textureSlot, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(float), (void*)(3* sizeof(float)));
        
        glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
        
        glDeleteFramebuffers(1, &frameBuffer);
        
        glBindFramebuffer(GL_FRAMEBUFFER, 0);
        glBindBuffer(GL_ARRAY_BUFFER, 0);
        
        glFlush();
        
        return pixelBuffer;
    }
    
    // RBG 格式的 PixelBuffer 转化为纹理
    - (GLuint)convertRGBPixelBufferToTexture:(CVPixelBufferRef)pixelBuffer {
        if (!pixelBuffer) {
            return 0;
        }
        
        CGSize textureSize = CGSizeMake(CVPixelBufferGetWidth(pixelBuffer),
                                        CVPixelBufferGetHeight(pixelBuffer));
        CVOpenGLESTextureRef texture = nil;
        
        CVReturn status = CVOpenGLESTextureCacheCreateTextureFromImage(nil,
                                                                       self.textureCache,
                                                                       pixelBuffer,
                                                                       nil,
                                                                       GL_TEXTURE_2D,
                                                                       GL_RGBA,
                                                                       textureSize.width,
                                                                       textureSize.height,
                                                                       GL_BGRA,
                                                                       GL_UNSIGNED_BYTE,
                                                                       0,
                                                                       &texture);
        
        if (status != kCVReturnSuccess) {
            NSLog(@"Can't create texture");
        }
        
        self.renderTexture = texture;
        CFRelease(texture);
        return CVOpenGLESTextureGetName(texture);
    }
    

    BGRA CVPixelBufferRef -> i420 CVPixelBufferRef

    需要引入libyuv库

    int transfer_32bgra_to_I420_ScaleToSize(CVPixelBufferRef source_pixelBuffer,CGSize targetSize,CVPixelBufferRef dst_pixelBuffer) {
        
        CVPixelBufferLockBaseAddress(source_pixelBuffer, 0);
        CVPixelBufferLockBaseAddress(dst_pixelBuffer, 0);
        
        //source-size
        size_t width = CVPixelBufferGetWidth(source_pixelBuffer);//图像宽度(像素)
        size_t height = CVPixelBufferGetHeight(source_pixelBuffer);//图像高度(像素)
        uint8_t *rgbaBuffer = (uint8_t *)CVPixelBufferGetBaseAddress(source_pixelBuffer);
        
        int yuvBufSize = width * height * 3 / 2;
        
        uint8_t* yuvBuf= (uint8_t*)malloc(yuvBufSize);
        
        
        //source-stride
        int Dst_Stride_Y = width;
        const int32 uv_stride = (width+1) / 2;
        
        //source-length
        const int y_length = width * height;
        int uv_length = uv_stride * ((height+1) / 2);
        
        //source-data
        unsigned char *Y_data_Dst = yuvBuf;
        unsigned char *U_data_Dst = yuvBuf + y_length;
        unsigned char *V_data_Dst = U_data_Dst + uv_length;
        
        //BGRAToI420, 内存顺序是BGRA,所以用方法得反过来ARGB
        ARGBToI420(rgbaBuffer,
                           width * 4,
                           Y_data_Dst, Dst_Stride_Y,
                           U_data_Dst, uv_stride,
                           V_data_Dst, uv_stride,
                           width, height);
    
        //scale-size
        int scale_yuvBufSize = targetSize.width * targetSize.height * 3 / 2;
        
        uint8_t* scale_yuvBuf= (uint8_t*)malloc(scale_yuvBufSize);
        
        //scale-stride
        int scale_Dst_Stride_Y = targetSize.width;
        const int32 scale_uv_stride = (targetSize.width+1) / 2;
        
        //scale-length
        const int scale_y_length = targetSize.width * targetSize.height;
        int scale_uv_length = scale_uv_stride * ((targetSize.height+1) / 2);
        
        //scale-data
        unsigned char *scale_Y_data_Dst = scale_yuvBuf;
        unsigned char *scale_U_data_Dst = scale_yuvBuf + scale_y_length;
        unsigned char *scale_V_data_Dst = scale_U_data_Dst + scale_uv_length;
        
        I420Scale(Y_data_Dst, Dst_Stride_Y,
                          U_data_Dst, uv_stride,
                          V_data_Dst, uv_stride,
                          width, height,
                          scale_Y_data_Dst, scale_Dst_Stride_Y,
                          scale_U_data_Dst, scale_uv_stride,
                          scale_V_data_Dst, scale_uv_stride,
                          targetSize.width, targetSize.height,
                          kFilterNone);
        
        //final-data
        uint8_t *final_y_buffer = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(dst_pixelBuffer, 0);
        uint8_t *final_uv_buffer = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(dst_pixelBuffer, 1);
        
        I420ToNV12(scale_Y_data_Dst, scale_Dst_Stride_Y,
                           scale_U_data_Dst, scale_uv_stride,
                           scale_V_data_Dst, scale_uv_stride,
                           final_y_buffer, scale_Dst_Stride_Y,
                           final_uv_buffer, scale_uv_stride*2,  //因为u的宽度 = y * 0.5,v的宽度 = y * 0.5
                           targetSize.width, targetSize.height);
        
        CVPixelBufferUnlockBaseAddress(source_pixelBuffer, 0);
        CVPixelBufferUnlockBaseAddress(dst_pixelBuffer, 0);
        
        free(yuvBuf);
        free(scale_yuvBuf);
        
        return yuvBufSize;
    }
    
    

    didCaptureVideoFrame输出处理后的数据

        transfer_32bgra_to_I420_ScaleToSize(_buffer, size, _i420Buffer);
    
        RTCCVPixelBuffer *rtcPixelBuffer =
        [[RTCCVPixelBuffer alloc] initWithPixelBuffer:_i420Buffer];
        RTCVideoFrame *filteredFrame =
        [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
                                     rotation:frame.rotation
                                  timeStampNs:frame.timeStampNs];
        [_output capturer:capturer didCaptureVideoFrame:filteredFrame];
    

    总结

    这样就完成了为WebRTC的视频添加美颜 ,此方案也适用于添加其他滤镜。
    参考链接
    IOS技术分享| 在iOS WebRTC 中添加美颜滤镜
    iOS WebRTC 杂谈之 视频采集添加美颜特效
    WebRTC IOS视频硬编码流程及其中传递的CVPixelBufferRef

    相关文章

      网友评论

          本文标题:iOS WebRTC 实现美颜滤镜特效

          本文链接:https://www.haomeiwen.com/subject/lrjllrtx.html