美文网首页音视频开发集锦爱框架多媒体科技
ijkplayer开启rtsp,并且支持录制和截图功能

ijkplayer开启rtsp,并且支持录制和截图功能

作者: lebonbill | 来源:发表于2017-12-01 17:55 被阅读377次
     * author:lebonbill
     * E-mail:lebonbillwu@gmail.com
    

    ijkplayer框架

    ijkplayer 是B站开源的一款做视频直播的播放器框架, 基于ffmpeg, 支持 Android 和 iOS,git地址是:https://github.com/Bilibili/ijkplayer.git

    编译之前

    之前用Cygwin编译,各种坑,建议还是用ubuntu编译会方便很多,我是在windows8跑vmware虚拟ubuntu来编译的.首先git获取ijkplayer的源代码,然后还需要配置环境变量Android SDK和Android NDK
    # add these lines to your ~/.bash_profile or ~/.profile
    # export ANDROID_SDK=<your sdk path>
    # export ANDROID_NDK=<your ndk path>
    

    开启支持RTSP

    开启支持RTSP,默认不支持RTSP,需要修改module-lite.sh内容,新增对应的协议,module-lite.sh是在config目录下
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-protocol=rtp"
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=rtsp"
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-decoder=mjpeg"
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=mjpeg"
    
    然后修改支持module-lite.sh
    cd config
    rm module.sh
    ln -s module-lite.sh module.sh
    

    添加录制+截图功能

    在编译之前如果想实现实现录制和截图功能的话可以看这里,不需要可以忽略跳过。由于是基于FFMpeg,新增startRecord(开始录播),stopRecord(通知录制),和截图都需要修改FFMpeg的源码,所以找到ijkplayer-android/ijkmedia/ijkplayer目录下的ff_ffplay.h,新增三个方法
    int       ffp_start_record(FFPlayer *ffp, const char *file_name);
    int       ffp_stop_record(FFPlayer *ffp);
    int       ffp_record_file(FFPlayer *ffp, AVPacket *packet);
    void      ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf);
    
    修改ff_ffplay.c
    * 开始录制函数:file_name是保存路径
    int ffp_start_record(FFPlayer *ffp, const char *file_name)
    {
        assert(ffp);
        
        VideoState *is = ffp->is;
        
        ffp->m_ofmt_ctx = NULL;
        ffp->m_ofmt = NULL;
        ffp->is_record = 0;
        ffp->record_error = 0;
        
        if (!file_name || !strlen(file_name)) { // 没有路径
            av_log(ffp, AV_LOG_ERROR, "filename is invalid");
            goto end;
        }
        
        if (!is || !is->ic|| is->paused || is->abort_request) { // 没有上下文,或者上下文已经停止
            av_log(ffp, AV_LOG_ERROR, "is,is->ic,is->paused is invalid");
            goto end;
        }
        
        if (ffp->is_record) { // 已经在录制
            av_log(ffp, AV_LOG_ERROR, "recording has started");
            goto end;
        }
        
        // 初始化一个用于输出的AVFormatContext结构体
        avformat_alloc_output_context2(&ffp->m_ofmt_ctx, NULL, "mp4", file_name);
        if (!ffp->m_ofmt_ctx) {
            av_log(ffp, AV_LOG_ERROR, "Could not create output context filename is %s\n", file_name);
            goto end;
        }
        ffp->m_ofmt = ffp->m_ofmt_ctx->oformat;
        
        for (int i = 0; i < is->ic->nb_streams; i++) {
            // 对照输入流创建输出流通道
            AVStream *in_stream = is->ic->streams[i];
            AVStream *out_stream = avformat_new_stream(ffp->m_ofmt_ctx, in_stream->codec->codec);
            if (!out_stream) {
                av_log(ffp, AV_LOG_ERROR, "Failed allocating output stream\n");
                goto end;
            }
            
            // 将输入视频/音频的参数拷贝至输出视频/音频的AVCodecContext结构体
            av_log(ffp, AV_LOG_DEBUG, "in_stream->codec;%@\n", in_stream->codec);
            if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
                av_log(ffp, AV_LOG_ERROR, "Failed to copy context from input to output stream codec context\n");
                goto end;
            }
            
            out_stream->codec->codec_tag = 0;
            if (ffp->m_ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
                out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
            }
        }
        
        av_dump_format(ffp->m_ofmt_ctx, 0, file_name, 1);
        
        // 打开输出文件
        if (!(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
            if (avio_open(&ffp->m_ofmt_ctx->pb, file_name, AVIO_FLAG_WRITE) < 0) {
                av_log(ffp, AV_LOG_ERROR, "Could not open output file '%s'", file_name);
                goto end;
            }
        }
        
        // 写视频文件头
        if (avformat_write_header(ffp->m_ofmt_ctx, NULL) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Error occurred when opening output file\n");
            goto end;
        }
        
        ffp->is_record = 1;
        ffp->record_error = 0;
        pthread_mutex_init(&ffp->record_mutex, NULL);
        
        return 0;
    end:
        ffp->record_error = 1;
        return -1;
    }
    
    *停止录播
    
    int ffp_stop_record(FFPlayer *ffp)
    {
        assert(ffp);
        if (ffp->is_record) {
            ffp->is_record = 0;
            pthread_mutex_lock(&ffp->record_mutex);
            if (ffp->m_ofmt_ctx != NULL) {
                av_write_trailer(ffp->m_ofmt_ctx);
                if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                    avio_close(ffp->m_ofmt_ctx->pb);
                }
                avformat_free_context(ffp->m_ofmt_ctx);
                ffp->m_ofmt_ctx = NULL;
                ffp->is_first = 0;
            }
            pthread_mutex_unlock(&ffp->record_mutex);
            pthread_mutex_destroy(&ffp->record_mutex);
            av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
        } else {
            av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
        }
        return 0;
    }
    
    *保存文件
    
    int ffp_stop_record(FFPlayer *ffp)
    {
        assert(ffp);
        if (ffp->is_record) {
            ffp->is_record = 0;
            pthread_mutex_lock(&ffp->record_mutex);
            if (ffp->m_ofmt_ctx != NULL) {
                av_write_trailer(ffp->m_ofmt_ctx);
                if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                    avio_close(ffp->m_ofmt_ctx->pb);
                }
                avformat_free_context(ffp->m_ofmt_ctx);
                ffp->m_ofmt_ctx = NULL;
                ffp->is_first = 0;
            }
            pthread_mutex_unlock(&ffp->record_mutex);
            pthread_mutex_destroy(&ffp->record_mutex);
            av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
        } else {
            av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
        }
        return 0;
    }
    
    *截图
    void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
    {
      ALOGD("=============>start snapshot\n");
    
      VideoState *is = ffp->is;
      Frame *vp;
      int i = 0, linesize = 0, pixels = 0;
      uint8_t *src;
    
      vp = &is->pictq.queue[is->pictq.rindex];
      int height = vp->bmp->h;
      int width = vp->bmp->w;
    
      ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);
    
      // copy data to bitmap in java code
      linesize = vp->bmp->pitches[0];
      src = vp->bmp->pixels[0];
      pixels = width * 4;
      for (i = 0; i < height; i++) {
          memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
      }
      
      ALOGD("=============>end snapshot\n");
    }
    
    修改ff_ffplay_def.h,加入对应的录制所需的参数变量
        AVFormatContext *m_ofmt_ctx;        // 用于输出的AVFormatContext结构体
        AVOutputFormat *m_ofmt;
        pthread_mutex_t record_mutex;       // 锁
        int is_record;                      // 是否在录制
        int record_error;
        
        int is_first;                       // 第一帧数据
        int64_t start_pts;                  // 开始录制时pts
        int64_t start_dts;                  // 开始录制时dts
    
    修改ijkplayer.c,加入对应的录制方法
    int ijkmp_start_record(IjkMediaPlayer *mp,const char *file_name)
    {
        assert(mp);
        MPTRACE("ijkmp_startRecord()\n");
        pthread_mutex_lock(&mp->mutex);
        int retval = ffp_start_record(mp->ffplayer,file_name);
        pthread_mutex_unlock(&mp->mutex);
        MPTRACE("ijkmp_startRecord()=%d\n", retval);
        return retval;
    }
    
    int ijkmp_stop_record(IjkMediaPlayer *mp)
    {
        assert(mp);
        MPTRACE("ijkmp_stopRecord()\n");
        pthread_mutex_lock(&mp->mutex);
        int retval = ffp_stop_record(mp->ffplayer);
        pthread_mutex_unlock(&mp->mutex);
        MPTRACE("ijkmp_stopRecord()=%d\n", retval);
        return retval;
    }
    
    
    实现截图的函数
    void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
    {
      ALOGD("=============>start snapshot\n");
    
      VideoState *is = ffp->is;
      Frame *vp;
      int i = 0, linesize = 0, pixels = 0;
      uint8_t *src;
    
      vp = &is->pictq.queue[is->pictq.rindex];
      int height = vp->bmp->h;
      int width = vp->bmp->w;
    
      ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);
    
      // copy data to bitmap in java code
      linesize = vp->bmp->pitches[0];
      src = vp->bmp->pixels[0];
      pixels = width * 4;
      for (i = 0; i < height; i++) {
          memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
      }
      
      ALOGD("=============>end snapshot\n");
    }
    
    还要修改ijkmedia>ijkplayer>android下的jni文件(ijkplayer_jni.c),新增对应的方法
    static jboolean
    IjkMediaPlayer_getCurrentFrame(JNIEnv *env, jobject thiz, jobject bitmap)
    {
        jboolean retval = JNI_TRUE;
        IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
        JNI_CHECK_GOTO(mp, env, NULL, "mpjni: getCurrentFrame: null mp", LABEL_RETURN);
    
        uint8_t *frame_buffer = NULL;
    
        if (0 > AndroidBitmap_lockPixels(env, bitmap, (void **)&frame_buffer)) {
            (*env)->ThrowNew(env, "java/io/IOException", "Unable to lock pixels.");
            return JNI_FALSE;
        }
    
        ijkmp_get_current_frame(mp, frame_buffer);
    
        if (0 > AndroidBitmap_unlockPixels(env, bitmap)) {
            (*env)->ThrowNew(env, "java/io/IOException", "Unable to unlock pixels.");
            return JNI_FALSE;
        }
    
        LABEL_RETURN:
        ijkmp_dec_ref_p(&mp);
        return retval;
    }
    
    
    static jint
    IjkMediaPlayer_startRecord(JNIEnv *env, jobject thiz,jstring file)
    {
        jint retval = 0;
        IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
        JNI_CHECK_GOTO(mp, env, NULL, "mpjni: startRecord: null mp", LABEL_RETURN);
        const char *nativeString = (*env)->GetStringUTFChars(env, file, 0);
        retval = ijkmp_start_record(mp,nativeString);
    
    LABEL_RETURN:
        ijkmp_dec_ref_p(&mp);
        return retval;
    }
    
    static jint
    IjkMediaPlayer_stopRecord(JNIEnv *env, jobject thiz)
    {
        jint retval = 0;
        IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
        JNI_CHECK_GOTO(mp, env, NULL, "mpjni: stopRecord: null mp", LABEL_RETURN);
    
        retval = ijkmp_stop_record(mp);
    
    LABEL_RETURN:
        ijkmp_dec_ref_p(&mp);
        return retval;
    }
    
    修改JNINativeMethod g_methods添加对应的方法
    static JNINativeMethod g_methods[] = {
        {
            "_setDataSource",
            "(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V",
            (void *) IjkMediaPlayer_setDataSourceAndHeaders
        },
        { "_setDataSourceFd",       "(I)V",     (void *) IjkMediaPlayer_setDataSourceFd },
        { "_setDataSource",         "(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback },
        { "_setAndroidIOCallback",  "(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback },
    
        { "_setVideoSurface",       "(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface },
        { "_prepareAsync",          "()V",      (void *) IjkMediaPlayer_prepareAsync },
        { "_start",                 "()V",      (void *) IjkMediaPlayer_start },
        { "_stop",                  "()V",      (void *) IjkMediaPlayer_stop },
        { "seekTo",                 "(J)V",     (void *) IjkMediaPlayer_seekTo },
        { "_pause",                 "()V",      (void *) IjkMediaPlayer_pause },
        { "isPlaying",              "()Z",      (void *) IjkMediaPlayer_isPlaying },
        { "getCurrentPosition",     "()J",      (void *) IjkMediaPlayer_getCurrentPosition },
        { "getDuration",            "()J",      (void *) IjkMediaPlayer_getDurations },
        { "startRecord",            "(Ljava/lang/String;)I",      (void *) IjkMediaPlayer_startRecord },
        { "stopRecord",             "()I",      (void *) IjkMediaPlayer_stopRecord },
    ……
    
    

    开始编译了,这个是Android版本

    cd android/contrib
    sh compile-ffmpeg.sh clean
    sh compile-ffmpeg.sh all
    cd ..
    sh compile-ijk.sh all
    

    编译出来在android目录下能找到对应cpu版本的so,这个是我编译出来的so:
    点我下载,百度云盘,密码是:mc0l

    如何使用

    使用方法,在自己的android项目引入上面的SO文件,然后复制ijkplayer-java,修改IjkMediaPlayer文件,新加几个native方法
        @Override
        public native int startRecord(String file);
    
        @Override
        public native int stopRecord();
    
        @Override
        public native boolean getCurrentFrame(Bitmap bitmap);
    
    在项目中使用,我用的是开源的example,所以我修改了,IjkVideoView,实现上面的几个方法,如下:
        //截图
        public boolean snapshotPicture() {
            int width = getVideoWidth();
            int height = getVideoHeight();
            Bitmap srcBitmap = Bitmap.createBitmap(width,
                    height, Bitmap.Config.ARGB_8888);
            boolean flag = getCurrentFrame(srcBitmap);
            if (flag) {
                // 保存图片
                String path = getInnerSDCardPath() + "/ijkplayer/snapshot";
                File screenshotsDirectory = new File(path);
                if (!screenshotsDirectory.exists()) {
                    screenshotsDirectory.mkdirs();
                }
    
                File savePath = new File(
                        screenshotsDirectory.getPath()
                                + "/"
                                + new SimpleDateFormat("yyyyMMddHHmmss")
                                .format(new Date()) + ".jpg");
                ImageUtils.saveBitmap(savePath.getPath(), srcBitmap);
            }
            return flag;
        }
    
        //开始录像
        public void startRecord() {
            if (mMediaPlayer != null && mIjkPlayer != null) {
                String path = getInnerSDCardPath() + "/"
                        + new SimpleDateFormat("yyyyMMddHHmmss")
                        .format(new Date()) + ".mp4";
                mMediaPlayer.startRecord(path);
               
            }
        }
    
        //结束录像
        public void stopRecord() {
            if (mMediaPlayer != null && mIjkPlayer != null) {
                mMediaPlayer.stopRecord();
    
            }
        }
    

    最后

    搞了好几天终于把上头交代的功能一一实现,首先要感谢IOS组的思路和方案,最后抽空把他总结一下,分享之……

    [图片上传中...(11111.jpg-b4f6cb-1512122172499-0)]

     * author:lebonbill
     * E-mail:lebonbillwu@gmail.com
    

    ijkplayer框架

    ijkplayer 是B站开源的一款做视频直播的播放器框架, 基于ffmpeg, 支持 Android 和 iOS,git地址是:https://github.com/Bilibili/ijkplayer.git

    编译ijkplayer

    之前用Cygwin编译,各种坑,建议还是用ubuntu编译会方便很多,我是在windows8跑vmware虚拟ubuntu来编译的.首先git获取ijkplayer的源代码,然后还需要配置环境变量Android SDK和Android NDK
    # add these lines to your ~/.bash_profile or ~/.profile
    # export ANDROID_SDK=<your sdk path>
    # export ANDROID_NDK=<your ndk path>
    

    开启支持RTSP

    开启支持RTSP,默认不支持RTSP,需要修改module-lite.sh内容,新增对应的协议,module-lite.sh是在config目录下
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-protocol=rtp"
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=rtsp"
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-decoder=mjpeg"
    export COMMON_FF_CFG_FLAGS="$COMMON_FF_CFG_FLAGS --enable-demuxer=mjpeg"
    
    然后修改支持module-lite.sh
    cd config
    rm module.sh
    ln -s module-lite.sh module.sh
    

    添加录制+截图功能

    在编译之前如果想实现实现录制和截图功能的话可以看这里,不需要可以忽略跳过。由于是基于FFMpeg,新增startRecord(开始录播),stopRecord(通知录制),和截图都需要修改FFMpeg的源码,所以找到ijkplayer-android/ijkmedia/ijkplayer目录下的ff_ffplay.h,新增三个方法
    int       ffp_start_record(FFPlayer *ffp, const char *file_name);
    int       ffp_stop_record(FFPlayer *ffp);
    int       ffp_record_file(FFPlayer *ffp, AVPacket *packet);
    void      ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf);
    
    修改ff_ffplay.c
    * 开始录制函数:file_name是保存路径
    int ffp_start_record(FFPlayer *ffp, const char *file_name)
    {
        assert(ffp);
        
        VideoState *is = ffp->is;
        
        ffp->m_ofmt_ctx = NULL;
        ffp->m_ofmt = NULL;
        ffp->is_record = 0;
        ffp->record_error = 0;
        
        if (!file_name || !strlen(file_name)) { // 没有路径
            av_log(ffp, AV_LOG_ERROR, "filename is invalid");
            goto end;
        }
        
        if (!is || !is->ic|| is->paused || is->abort_request) { // 没有上下文,或者上下文已经停止
            av_log(ffp, AV_LOG_ERROR, "is,is->ic,is->paused is invalid");
            goto end;
        }
        
        if (ffp->is_record) { // 已经在录制
            av_log(ffp, AV_LOG_ERROR, "recording has started");
            goto end;
        }
        
        // 初始化一个用于输出的AVFormatContext结构体
        avformat_alloc_output_context2(&ffp->m_ofmt_ctx, NULL, "mp4", file_name);
        if (!ffp->m_ofmt_ctx) {
            av_log(ffp, AV_LOG_ERROR, "Could not create output context filename is %s\n", file_name);
            goto end;
        }
        ffp->m_ofmt = ffp->m_ofmt_ctx->oformat;
        
        for (int i = 0; i < is->ic->nb_streams; i++) {
            // 对照输入流创建输出流通道
            AVStream *in_stream = is->ic->streams[i];
            AVStream *out_stream = avformat_new_stream(ffp->m_ofmt_ctx, in_stream->codec->codec);
            if (!out_stream) {
                av_log(ffp, AV_LOG_ERROR, "Failed allocating output stream\n");
                goto end;
            }
            
            // 将输入视频/音频的参数拷贝至输出视频/音频的AVCodecContext结构体
            av_log(ffp, AV_LOG_DEBUG, "in_stream->codec;%@\n", in_stream->codec);
            if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
                av_log(ffp, AV_LOG_ERROR, "Failed to copy context from input to output stream codec context\n");
                goto end;
            }
            
            out_stream->codec->codec_tag = 0;
            if (ffp->m_ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
                out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
            }
        }
        
        av_dump_format(ffp->m_ofmt_ctx, 0, file_name, 1);
        
        // 打开输出文件
        if (!(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
            if (avio_open(&ffp->m_ofmt_ctx->pb, file_name, AVIO_FLAG_WRITE) < 0) {
                av_log(ffp, AV_LOG_ERROR, "Could not open output file '%s'", file_name);
                goto end;
            }
        }
        
        // 写视频文件头
        if (avformat_write_header(ffp->m_ofmt_ctx, NULL) < 0) {
            av_log(ffp, AV_LOG_ERROR, "Error occurred when opening output file\n");
            goto end;
        }
        
        ffp->is_record = 1;
        ffp->record_error = 0;
        pthread_mutex_init(&ffp->record_mutex, NULL);
        
        return 0;
    end:
        ffp->record_error = 1;
        return -1;
    }
    
    *停止录播
    
    int ffp_stop_record(FFPlayer *ffp)
    {
        assert(ffp);
        if (ffp->is_record) {
            ffp->is_record = 0;
            pthread_mutex_lock(&ffp->record_mutex);
            if (ffp->m_ofmt_ctx != NULL) {
                av_write_trailer(ffp->m_ofmt_ctx);
                if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                    avio_close(ffp->m_ofmt_ctx->pb);
                }
                avformat_free_context(ffp->m_ofmt_ctx);
                ffp->m_ofmt_ctx = NULL;
                ffp->is_first = 0;
            }
            pthread_mutex_unlock(&ffp->record_mutex);
            pthread_mutex_destroy(&ffp->record_mutex);
            av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
        } else {
            av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
        }
        return 0;
    }
    
    *保存文件
    
    int ffp_stop_record(FFPlayer *ffp)
    {
        assert(ffp);
        if (ffp->is_record) {
            ffp->is_record = 0;
            pthread_mutex_lock(&ffp->record_mutex);
            if (ffp->m_ofmt_ctx != NULL) {
                av_write_trailer(ffp->m_ofmt_ctx);
                if (ffp->m_ofmt_ctx && !(ffp->m_ofmt->flags & AVFMT_NOFILE)) {
                    avio_close(ffp->m_ofmt_ctx->pb);
                }
                avformat_free_context(ffp->m_ofmt_ctx);
                ffp->m_ofmt_ctx = NULL;
                ffp->is_first = 0;
            }
            pthread_mutex_unlock(&ffp->record_mutex);
            pthread_mutex_destroy(&ffp->record_mutex);
            av_log(ffp, AV_LOG_DEBUG, "stopRecord ok\n");
        } else {
            av_log(ffp, AV_LOG_ERROR, "don't need stopRecord\n");
        }
        return 0;
    }
    
    *截图
    void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
    {
      ALOGD("=============>start snapshot\n");
    
      VideoState *is = ffp->is;
      Frame *vp;
      int i = 0, linesize = 0, pixels = 0;
      uint8_t *src;
    
      vp = &is->pictq.queue[is->pictq.rindex];
      int height = vp->bmp->h;
      int width = vp->bmp->w;
    
      ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);
    
      // copy data to bitmap in java code
      linesize = vp->bmp->pitches[0];
      src = vp->bmp->pixels[0];
      pixels = width * 4;
      for (i = 0; i < height; i++) {
          memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
      }
      
      ALOGD("=============>end snapshot\n");
    }
    
    修改ff_ffplay_def.h,加入对应的录制所需的参数变量
        AVFormatContext *m_ofmt_ctx;        // 用于输出的AVFormatContext结构体
        AVOutputFormat *m_ofmt;
        pthread_mutex_t record_mutex;       // 锁
        int is_record;                      // 是否在录制
        int record_error;
        
        int is_first;                       // 第一帧数据
        int64_t start_pts;                  // 开始录制时pts
        int64_t start_dts;                  // 开始录制时dts
    
    修改ijkplayer.c,加入对应的录制方法
    int ijkmp_start_record(IjkMediaPlayer *mp,const char *file_name)
    {
        assert(mp);
        MPTRACE("ijkmp_startRecord()\n");
        pthread_mutex_lock(&mp->mutex);
        int retval = ffp_start_record(mp->ffplayer,file_name);
        pthread_mutex_unlock(&mp->mutex);
        MPTRACE("ijkmp_startRecord()=%d\n", retval);
        return retval;
    }
    
    int ijkmp_stop_record(IjkMediaPlayer *mp)
    {
        assert(mp);
        MPTRACE("ijkmp_stopRecord()\n");
        pthread_mutex_lock(&mp->mutex);
        int retval = ffp_stop_record(mp->ffplayer);
        pthread_mutex_unlock(&mp->mutex);
        MPTRACE("ijkmp_stopRecord()=%d\n", retval);
        return retval;
    }
    
    
    实现截图的函数
    void ffp_get_current_frame_l(FFPlayer *ffp, uint8_t *frame_buf)
    {
      ALOGD("=============>start snapshot\n");
    
      VideoState *is = ffp->is;
      Frame *vp;
      int i = 0, linesize = 0, pixels = 0;
      uint8_t *src;
    
      vp = &is->pictq.queue[is->pictq.rindex];
      int height = vp->bmp->h;
      int width = vp->bmp->w;
    
      ALOGD("=============>%d X %d === %d\n", width, height, vp->bmp->pitches[0]);
    
      // copy data to bitmap in java code
      linesize = vp->bmp->pitches[0];
      src = vp->bmp->pixels[0];
      pixels = width * 4;
      for (i = 0; i < height; i++) {
          memcpy(frame_buf + i * pixels, src + i * linesize, pixels);
      }
      
      ALOGD("=============>end snapshot\n");
    }
    
    还要修改ijkmedia>ijkplayer>android下的jni文件(ijkplayer_jni.c),新增对应的方法
    static jboolean
    IjkMediaPlayer_getCurrentFrame(JNIEnv *env, jobject thiz, jobject bitmap)
    {
        jboolean retval = JNI_TRUE;
        IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
        JNI_CHECK_GOTO(mp, env, NULL, "mpjni: getCurrentFrame: null mp", LABEL_RETURN);
    
        uint8_t *frame_buffer = NULL;
    
        if (0 > AndroidBitmap_lockPixels(env, bitmap, (void **)&frame_buffer)) {
            (*env)->ThrowNew(env, "java/io/IOException", "Unable to lock pixels.");
            return JNI_FALSE;
        }
    
        ijkmp_get_current_frame(mp, frame_buffer);
    
        if (0 > AndroidBitmap_unlockPixels(env, bitmap)) {
            (*env)->ThrowNew(env, "java/io/IOException", "Unable to unlock pixels.");
            return JNI_FALSE;
        }
    
        LABEL_RETURN:
        ijkmp_dec_ref_p(&mp);
        return retval;
    }
    
    
    static jint
    IjkMediaPlayer_startRecord(JNIEnv *env, jobject thiz,jstring file)
    {
        jint retval = 0;
        IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
        JNI_CHECK_GOTO(mp, env, NULL, "mpjni: startRecord: null mp", LABEL_RETURN);
        const char *nativeString = (*env)->GetStringUTFChars(env, file, 0);
        retval = ijkmp_start_record(mp,nativeString);
    
    LABEL_RETURN:
        ijkmp_dec_ref_p(&mp);
        return retval;
    }
    
    static jint
    IjkMediaPlayer_stopRecord(JNIEnv *env, jobject thiz)
    {
        jint retval = 0;
        IjkMediaPlayer *mp = jni_get_media_player(env, thiz);
        JNI_CHECK_GOTO(mp, env, NULL, "mpjni: stopRecord: null mp", LABEL_RETURN);
    
        retval = ijkmp_stop_record(mp);
    
    LABEL_RETURN:
        ijkmp_dec_ref_p(&mp);
        return retval;
    }
    
    修改JNINativeMethod g_methods添加对应的方法
    static JNINativeMethod g_methods[] = {
        {
            "_setDataSource",
            "(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V",
            (void *) IjkMediaPlayer_setDataSourceAndHeaders
        },
        { "_setDataSourceFd",       "(I)V",     (void *) IjkMediaPlayer_setDataSourceFd },
        { "_setDataSource",         "(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback },
        { "_setAndroidIOCallback",  "(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback },
    
        { "_setVideoSurface",       "(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface },
        { "_prepareAsync",          "()V",      (void *) IjkMediaPlayer_prepareAsync },
        { "_start",                 "()V",      (void *) IjkMediaPlayer_start },
        { "_stop",                  "()V",      (void *) IjkMediaPlayer_stop },
        { "seekTo",                 "(J)V",     (void *) IjkMediaPlayer_seekTo },
        { "_pause",                 "()V",      (void *) IjkMediaPlayer_pause },
        { "isPlaying",              "()Z",      (void *) IjkMediaPlayer_isPlaying },
        { "getCurrentPosition",     "()J",      (void *) IjkMediaPlayer_getCurrentPosition },
        { "getDuration",            "()J",      (void *) IjkMediaPlayer_getDurations },
        { "startRecord",            "(Ljava/lang/String;)I",      (void *) IjkMediaPlayer_startRecord },
        { "stopRecord",             "()I",      (void *) IjkMediaPlayer_stopRecord },
    ……
    
    

    开始编译了,这个是Android版本

    cd android/contrib
    sh compile-ffmpeg.sh clean
    sh compile-ffmpeg.sh all
    cd ..
    sh compile-ijk.sh all
    

    编译出来在android目录下能找到对应cpu版本的so,这个是我编译出来的so:
    点我下载,百度云盘,密码是:mc0l

    如何使用

    使用方法,在自己的android项目引入上面的SO文件,然后复制ijkplayer-java,修改IjkMediaPlayer文件,新加几个native方法
        @Override
        public native int startRecord(String file);
    
        @Override
        public native int stopRecord();
    
        @Override
        public native boolean getCurrentFrame(Bitmap bitmap);
    
    在项目中使用,我用的是开源的example,所以我修改了,IjkVideoView,实现上面的几个方法,如下:
        //截图
        public boolean snapshotPicture() {
            int width = getVideoWidth();
            int height = getVideoHeight();
            Bitmap srcBitmap = Bitmap.createBitmap(width,
                    height, Bitmap.Config.ARGB_8888);
            boolean flag = getCurrentFrame(srcBitmap);
            if (flag) {
                // 保存图片
                String path = getInnerSDCardPath() + "/ijkplayer/snapshot";
                File screenshotsDirectory = new File(path);
                if (!screenshotsDirectory.exists()) {
                    screenshotsDirectory.mkdirs();
                }
    
                File savePath = new File(
                        screenshotsDirectory.getPath()
                                + "/"
                                + new SimpleDateFormat("yyyyMMddHHmmss")
                                .format(new Date()) + ".jpg");
                ImageUtils.saveBitmap(savePath.getPath(), srcBitmap);
            }
            return flag;
        }
    
        //开始录像
        public void startRecord() {
            if (mMediaPlayer != null && mIjkPlayer != null) {
                String path = getInnerSDCardPath() + "/"
                        + new SimpleDateFormat("yyyyMMddHHmmss")
                        .format(new Date()) + ".mp4";
                mMediaPlayer.startRecord(path);
               
            }
        }
    
        //结束录像
        public void stopRecord() {
            if (mMediaPlayer != null && mIjkPlayer != null) {
                mMediaPlayer.stopRecord();
    
            }
        }
    

    最后

    搞了好几天终于把上头交代的功能一一实现,首先要感谢IOS组的思路和方案,最后抽空把他总结一下,分享之……

    相关文章

      网友评论

      • 大脸_先森:什么时候总结iOS的思路和代码?@lebonbill
      • 一个野路子程序员:呵呵,话说一半的感觉。关键的帧保存和音视频同步没有, 不对还是感谢提供了思路,我已经完美实现了,蟹蟹
        JiaCcc:楼主能共享一下吗
      • _乘风破浪_:无意中看到你的实现方法,我们两个的实现几乎一模一样,除了那个线程互斥我是用了sdl封装的互斥所,不过你少了一个关键的步骤,就是每一帧的保存没有
        五月槐花香:你好,保存每一帧的方法能贴一下么
      • d281e4ce66ea:博主,我按照你的方法修改底层代码了以后,播放视频没问题,但一调用截屏方法页面就变白,闪退,你知道什么原因吗,求救:pray:
      • 可乐老爸:还是ffp_record_file是不是需要在解码方法decoder_decode_frame中调用,这段没怎么看明白
      • 可乐老爸:大神你好,博客里面的有段代码贴错了,ffp_record_file这段没有,可以重新发下,给我参考下么,谢谢了
      • 可乐老爸:多谢大神分享经验,最近在研究ijkplayer,还望多多指教

      本文标题:ijkplayer开启rtsp,并且支持录制和截图功能

      本文链接:https://www.haomeiwen.com/subject/axsibxtx.html