美文网首页
IJKPlayer实时录制网络流

IJKPlayer实时录制网络流

作者: 萤火虫_629e | 来源:发表于2021-12-08 11:12 被阅读0次

一、概要说明

网络流播放时,IJKPlayer新增录制mp4功能,因为是实时流,需要逐帧编码,当然也可以在播放器未解码前,逐帧写入到文件,但是不能确定读到的是什么格式的流,直接逐帧写入到文件怕不能使用,故采用解码后的流逐帧编码再写入的方式录制视频.

二、概要设计

三、音视频数据回调

需要实现这个功能,重点修改代码在ffplayer里面

1、在视频解码处把视频原始数据及pts回调出去,在queue_picture方法中,搜索SDL_VoutFillFrameYUVOverlay,在下面添加:

  // FIXME: set swscale options

if (SDL_VoutFillFrameYUVOverlay(vp->bmp, src_frame) < 0) {

    av_log(NULL, AV_LOG_FATAL, "Cannot initialize the conversion context\n");

    exit(1);

}

//新增代码

if (ffp -> videotoolbox) {

  // TODO edit

    ffp_pixelbuffer_lock(ffp);

    ffp->szt_pixelbuffer = SDL_VoutOverlayVideoToolBox_GetCVPixelBufferRef(vp->bmp); // picture->opaque;

    if (s_pixel_buff_callback)

//                ffp->stat.vdps

        s_pixel_buff_callback(ffp->inject_opaque, ffp->szt_pixelbuffer, vp->pts*1000*1000);

    ffp_pixelbuffer_unlock(ffp);

    if (!ffp->szt_pixelbuffer) {

        ALOGE("nil pixelBuffer in overlay\n");

    }

}

2、在音频解码后回调音频PCM数据,ffmpeg中回调的音频是32位float,但是试了下在iOS中不能正常编码,故先转格式为16位Int型,在int audio_thread(void *arg)中,在do{}while循环中,增加如下代码:

if (is->swr_ctx) {

 uint8_t *targetData[1];

 int len = 5760*2; //这里这么写定死长度可能会有问题

 targetData[0] = (uint8_t *)malloc(len);

 int size = audio_swr_resampling_audio(is->swr_ctx, context, frame, targetData);

 tb = (AVRational){1, frame->sample_rate};

 ffp_pcm_lock(ffp);

 int pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);

 uint8_t *audioData = frame->data[0];

 int audioDataSize = frame->linesize[0];

 if (s_pcm_callback) {

 s_pcm_callback(ffp->inject_opaque, pts, frame->format, frame->nb_samples, frame->channels, frame->sample_rate, frame->channel_layout, 1, size, targetData[0]);

 //                s_pcm_callback(audioData, audioDataSize, ffp_get_current_position_l(ffp)*1000);

 }

 free(targetData[0]);

 ffp_pcm_unlock(ffp);

 }

在解码前对音频做转格式初始化:

AVCodecContext *context = NULL;

 for (int i = 0; i < is->ic->nb_streams; i++) {

 // 对照输入流创建输出流通道

 AVStream *in_stream = is->ic->streams[i];

 if (in_stream->codec->codec_type == AVMEDIA_TYPE_AUDIO) {

 context = in_stream->codec;

 }

 }

 audio_swr_resampling_audio_init(&is->swr_ctx, context);

32位float型音频转位16位Int型音频代码:

void audio_swr_resampling_audio_destory(SwrContext **swr_ctx){

 if(*swr_ctx){

 swr_free(swr_ctx);

 *swr_ctx = NULL;

 }

}

void audio_swr_resampling_audio_init(SwrContext **swr_ctx,AVCodecContext *codec){

 if(codec->sample_fmt == AV_SAMPLE_FMT_S16 || codec->sample_fmt == AV_SAMPLE_FMT_S32 ||codec->sample_fmt == AV_SAMPLE_FMT_U8){

 av_log(NULL, AV_LOG_ERROR, "codec->sample_fmt:%d",codec->sample_fmt);

 if(*swr_ctx){

 swr_free(swr_ctx);

 *swr_ctx = NULL;

 }

 return;

 }

 if(*swr_ctx){

 swr_free(swr_ctx);

 }

 *swr_ctx = swr_alloc();

 if(!*swr_ctx){

 av_log(NULL, AV_LOG_ERROR, "swr_alloc failed");

 return;

 }

 /* set options */

 av_opt_set_int(*swr_ctx, "in_channel_layout",    codec->channel_layout, 0);

 av_opt_set_int(*swr_ctx, "out_channel_layout",    codec->channel_layout, 0);

 av_opt_set_int(*swr_ctx, "in_sample_rate",       codec->sample_rate, 0);

 av_opt_set_int(*swr_ctx, "out_sample_rate",       codec->sample_rate, 0);

 av_opt_set_sample_fmt(*swr_ctx, "in_sample_fmt", codec->sample_fmt, 0);

 av_opt_set_sample_fmt(*swr_ctx, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);// AV_SAMPLE_FMT_S16

 /* initialize the resampling context */

 int ret = 0;

 if ((ret = swr_init(*swr_ctx)) < 0) {

 av_log(NULL, AV_LOG_ERROR, "Failed to initialize the resampling context\n");

 if(*swr_ctx){

 swr_free(swr_ctx);

 *swr_ctx = NULL;

 }

 return;

 }

}

int audio_swr_resampling_audio(SwrContext *swr_ctx,AVCodecContext *codec,AVFrame *audioFrame,uint8_t **targetData){

 uint8_t **extendedData = audioFrame->data;

 int len = swr_convert(swr_ctx,targetData,audioFrame->nb_samples,extendedData,audioFrame->nb_samples);

 if(len < 0){

 av_log(NULL, AV_LOG_ERROR, "error swr_convert");

 goto end;

 }

 int dst_bufsize = len * codec->channels * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16);

//    av_log(NULL, AV_LOG_INFO, " dst_bufsize:%d",dst_bufsize);

 return dst_bufsize;

 end:

 return -1;

}

至此,音视频原始数据已经回调到上层了

四、逐帧编码及写入

视频回调到上层已经是CVPixelBufferRef格式了,无需另外操作,直接使用AVAssetWriter写入到文件就行

CMTime tm = CMTimeMake(pts, 1000 * 1000);

BOOL success = [self.pixelBuffAdptor appendPixelBuffer:pixelBuff withPresentationTime:tm];

NSLog(@"___%d", success);

但是音频回调到上层是uint8_t *, 需要多一次音频数据处理变为iOS中适用的CMSampleBufferRef, 代码如下:

IJKWeakHolder *weakHolder = (__bridge IJKWeakHolder*)opaque;

 TCLIJKPlayer *mpc = weakHolder.object;

 if (!mpc) {

 return 0;

 }

 uint8_t *targetData = malloc(data_lineSize + mpc->lastCount);

 memcpy(targetData, mpc->lastData, mpc->lastCount);

 memcpy(targetData + mpc->lastCount, data, data_lineSize);

 int len = 2048;

 if(data_lineSize + mpc->lastCount < len) {

 memcpy(mpc->lastData + mpc->lastCount, data, data_lineSize);

 mpc->lastCount = data_lineSize + mpc->lastCount;

 free(targetData);

 return 0;

 }

 for (int i = 0; i <= (data_lineSize + mpc->lastCount)/len; i++) {

 if ((i+1)*len > (data_lineSize + mpc->lastCount)) {

 mpc->lastCount = (data_lineSize + mpc->lastCount) - i*len;

 memcpy(mpc->lastData, targetData + i*len, mpc->lastCount);

 } else {

 uint8_t *dst = malloc(len);

 memcpy(dst, targetData + i*len, len);

 CMSampleBufferRef buffer = createAudioSample(dst, len, pts, channels, sample_rate);

 free(dst);

 if (mpc.delegate && [mpc.delegate respondsToSelector:@selector(onAudioSampleBuffer:)]) {

 id buffRef = (__bridge id _Nullable)buffer;

 dispatch_async(dispatch_get_main_queue(), ^{

 [mpc.delegate onAudioSampleBuffer:(__bridge CMSampleBufferRef)(buffRef)];

 });

 }

 CFRelease(buffer);

 }

 }

 free(targetData);

static CMSampleBufferRef createAudioSample(void *audioData, UInt32 len, double pts, int channels, int sample_rate)

{

    int mDataByteSize = len;

    AudioBufferList audioBufferList;

    audioBufferList.mNumberBuffers = 1;

    audioBufferList.mBuffers[0].mNumberChannels=channels;

    audioBufferList.mBuffers[0].mDataByteSize=mDataByteSize;

    audioBufferList.mBuffers[0].mData = audioData;

    AudioStreamBasicDescription asbd;

    asbd.mSampleRate = sample_rate;

    asbd.mFormatID = kAudioFormatLinearPCM;

    asbd.mFormatFlags = kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsSignedInteger;

    asbd.mChannelsPerFrame = channels;

    asbd.mBitsPerChannel = 16;

    asbd.mFramesPerPacket = 1;

    asbd.mBytesPerFrame = asbd.mBitsPerChannel / 8 * asbd.mChannelsPerFrame;

    asbd.mBytesPerPacket = asbd.mBytesPerFrame * asbd.mFramesPerPacket;

    asbd.mReserved = 0;

    static CMFormatDescriptionRef format = NULL;

    CMSampleTimingInfo timing = {CMTimeMake(1, sample_rate), kCMTimeZero, kCMTimeInvalid };

    OSStatus error = 0;

    error = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &asbd, 0, NULL, 0, NULL, NULL, &format);

    CMSampleBufferRef buff = NULL;

    error = CMSampleBufferCreate(kCFAllocatorDefault, NULL, false, NULL, NULL, format, (CMItemCount)mDataByteSize/(2*channels), 1, &timing, 0, NULL, &buff);

    CFRelease(format);

    if ( error ) {

        NSLog(@"CMSampleBufferCreate returned error: %ld", (long)error);

        return NULL;

    }

    error = CMSampleBufferSetDataBufferFromAudioBufferList(buff, kCFAllocatorDefault, kCFAllocatorDefault, 0, &audioBufferList);

    if( error )

    {

        NSLog(@"CMSampleBufferSetDataBufferFromAudioBufferList returned error: %ld", (long)error);

        return NULL;

    }

    return buff;

}

再使用AVAssetWriter写入到mp4

BOOL success = [self.assetWriterAudioInput appendSampleBuffer:sampleBuffer];

NSLog(@"-------%d", success);

if (!success) {

    @synchronized(self) {

        [self stopWrite:nil];

        [self destroyWrite];

    }

}

相关文章

  • IJKPlayer实时录制网络流

    一、概要说明 网络流播放时,IJKPlayer新增录制mp4功能,因为是实时流,需要逐帧编码,当然也可以在播放器未...

  • 直播优化方案

    [如果觉得文章有用,可以支持一下放眼直播] 播放端优化实时性 参考我写的链接:ijkplayer丢帧的处理方案 流...

  • iOS RePlayKit的使用

    在iOS 11.0以上的系统中,ReplayKit支持了实时流的录制,iOS11前的系统只支持将录制的视频保存在本...

  • 关于IJKPlayer

    ijkplayer 拉流简单实现 首先是要自己去打一个IJKPlayer的静态库 简单的拉流代码

  • ijkplayer在rtsp协议下使用经验

    折腾ijkplayer也有好一段时间,我司使用rtsp输出设备的视频流,要求实时性1s以下,如果使用默认配置必然是...

  • OBS (Open Broadcaster Software)

    Open Broadcaster Software是一个免费的开源的视频录制和视频实时流软件。其有多种功能并广泛使...

  • Spark Streaming实时流处理-1.初识实时流处理

    目录 业务现状分析 实时流处理产生背景 实时流处理概述 离线计算与实时计算对比 实时流处理框架对比 实时流处理架构...

  • iOS直播组成

    播放端 ijkplayer(推荐)MobileVLCKitplplayerkit(莫名崩溃) 推流 LFLiveK...

  • ijkplayer增加截图/录制功能

    ijkplayer是一个基于FFmpeg的轻量级Android/iOS视频播放器。ijkplayer默认不支持截图...

  • 直播原理

    通过计算机上的音视频输入设备或者手机端摄像头和麦克风实时录制的音视频流,编好码后通过直播协议将数据包实时发送给服务...

网友评论

      本文标题:IJKPlayer实时录制网络流

      本文链接:https://www.haomeiwen.com/subject/rgxtfrtx.html