美文网首页
ffmpeg-OpenSLES播放音频数据

ffmpeg-OpenSLES播放音频数据

作者: Peakmain | 来源:发表于2019-10-30 11:59 被阅读0次

    前言

    ndk调试命令

    adb logcat | ndk-stack -sym app/build/intermediates/cmake/debug/obj/armeabi
    

    上两篇文章
    FFmpeg - 初探ffmepg并解码数据
    FFmpeg-多线程解码播放
    下面是基于上面所写的文章系列

    OpenSLES播放音频的流程

    OpenSLES和OpenGLES都是自带的
    xxxes和xxx之间基本上没有区别,主要区别就是xxxes是xxx的精简
    播放音频流程

    • 创建引擎接口
    • 创建混音器和设置参数
    • 创建播放器
    • 设置缓存队列和回掉函数
    • 设置播放状态且调用回掉函数

    大家可以看google提供的案例

    FILE *pcmFile;
    void *pcmBuffer;
    
    void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context) {
    
        if (!feof(pcmFile)) {
            fread(pcmBuffer, 1, 44100 * 2 * 2, pcmFile);
            (*bq)->Enqueue(bq, pcmBuffer, 44100 * 2 * 2);
        } else {
            fclose(pcmFile);
            free(pcmBuffer);
        }
    
    }
    
    void initCreateOpenSLES() {
    
        //1、创建引擎接口
        SLObjectItf pEngineObject;
        SLEngineItf engineEngine;
        slCreateEngine(&pEngineObject, 0, NULL, 0, NULL, NULL);
        //实现
        (*pEngineObject)->Realize(pEngineObject, SL_BOOLEAN_FALSE);
        //获取引擎接口
        (*pEngineObject)->GetInterface(pEngineObject, SL_IID_ENGINE, &engineEngine);
        //2、创建混音器和设置参数
        SLObjectItf outputMixObject;
        SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
        const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
        const SLboolean req[1] = {SL_BOOLEAN_FALSE};
        const SLEnvironmentalReverbSettings reverbSettings =
                SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
        //创建
        (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
        //实现
        (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
        //获取接口
        (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
                                         &outputMixEnvironmentalReverb);
        //设置参数
        (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
                outputMixEnvironmentalReverb, &reverbSettings);
        //3、创建播放器
        SLObjectItf bqPlayerObject = NULL;
        SLPlayItf bqPlayerPlay;
        //指定可以播放什么
        SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue = {
                SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
        SLDataFormat_PCM formatPcm = {SL_DATAFORMAT_PCM,
                                      2,
                                      SL_SAMPLINGRATE_44_1,
                                      SL_PCMSAMPLEFORMAT_FIXED_16,
                                      SL_PCMSAMPLEFORMAT_FIXED_16,
                                      SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
                                      SL_BYTEORDER_LITTLEENDIAN//结尾
        };
        SLDataSource pAudioSrc = {&simpleBufferQueue, &formatPcm};
    
        SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
        SLDataSink audioSnk = {&loc_outmix, NULL};
    
        const SLInterfaceID pInterfaceIds[3] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME,
                                                SL_IID_PLAYBACKRATE,//声音变速
                /*SL_IID_MUTESOLO,*/};
        const SLboolean pInterfaceRequired[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
                /*SL_BOOLEAN_TRUE,*/ };
        (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &pAudioSrc, &audioSnk, 3,
                                           pInterfaceIds, pInterfaceRequired);
        (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
        (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
        //4、设置缓存队列和回掉函数
        SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
        (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
                                        &bqPlayerBufferQueue);
        (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
        //5、设置播放状态
        (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
    
        //6、调用回掉函数
        bqPlayerCallback(bqPlayerBufferQueue, NULL);
    }
    
    extern "C"
    JNIEXPORT void JNICALL
    Java_com_peakmain_ndkproject_MainActivity_playPCM(JNIEnv *env, jobject instance, jstring url_) {
        const char *url = env->GetStringUTFChars(url_, 0);
        pcmFile = fopen(url, "r");
        pcmBuffer = malloc(44100 * 2 * 2);
    
        initCreateOpenSLES();
        env->ReleaseStringUTFChars(url_, url);
    }
    

    加入OpenSLES和重整项目代码

    • 首先CmakeLists我们需要添加OpenSLES的依赖
    target_link_libraries( # Specifies the target library.
    # 链接额外的 ffmpeg 的编译
            music_play
            opencv_java
            # 编解码(最重要的库)
            avcodec-57
            # 设备信息
            avdevice-57
            # 滤镜特效处理库
            avfilter-6
            # 封装格式处理库
            avformat-57
            # 工具库(大部分库都需要这个库的支持)
            avutil-55
            # 后期处理
            postproc-54
            # 音频采样数据格式转换库
            swresample-2
            # 视频像素数据格式转换
            swscale-4
            # 链接 android ndk 自带的一些库
            android
            jnigraphics
              OpenSLES
            # Links the target library to the log library
            # included in the NDK.
            log)
    
    • music_play的修改
    //
    // Created by admin on 2019/10/24.
    //
    #include <jni.h>
    
    
    #include "PeakmainJNICall.h"
    #include "PeakmainFFmpeg.h"
    
    
    PeakmainJNICall *pJNICall;
    PeakmainFFmpeg *ffmpeg;
    
    JavaVM *pJavaVM = NULL;
    //重写so加载时的一个方法
    extern "C"
    JNIEXPORT jint JNICALL
    JNI_OnLoad(JavaVM *javaVM, void *resverved) {
        pJavaVM = javaVM;
        JNIEnv *env;
        if (javaVM->GetEnv((void **) &env, JNI_VERSION_1_4) != JNI_OK) {
            return -1;
        }
        return JNI_VERSION_1_4;
    }
    extern "C"
    JNIEXPORT void JNICALL
    Java_com_peakmain_mall_ndk_media_MusicPlayer_npay(JNIEnv *env, jobject instance) {
        if (ffmpeg != NULL) {
             ffmpeg->play();
        }
    }
    extern "C"
    JNIEXPORT void JNICALL
    Java_com_peakmain_mall_ndk_media_MusicPlayer_nPrepare(JNIEnv *env, jobject instance, jstring url_) {
    
        const char *url = env->GetStringUTFChars(url_, 0);
        if (ffmpeg == NULL) {
            pJNICall = new PeakmainJNICall(pJavaVM, env, instance);
    
            ffmpeg = new PeakmainFFmpeg(pJNICall, url);
    
            ffmpeg->prepared();
        }
    
    
        env->ReleaseStringUTFChars(url_, url);
    
    }
    
    • PeakmainJNICall.h和PeakmainJNICall.cpp
    //
    // Created by admin on 2019/10/28.
    //
    
    #ifndef NDK_PEAKMAINJNICALL_H
    #define NDK_PEAKMAINJNICALL_H
    
    
    #include <jni.h>
    enum ThreadMode{
        THREAD_CHILD,THREAD_MAIN
    };
    class PeakmainJNICall {
    public:
        JavaVM *javaVM;
        JNIEnv *jniEnv;
        jmethodID jPlayerErrorMid;
        jobject jobject;
    public:
        PeakmainJNICall(JavaVM *javaVM, JNIEnv *jniEnv, _jobject * jobject);
        ~PeakmainJNICall();
    
    
    
    public:
        void callPlayerJniError(ThreadMode  threadMode,int code, char *msg);
    };
    
    
    #endif //NDK_PEAKMAINJNICALL_H
    
    
    //
    // Created by admin on 2019/10/28.
    //
    
    #include "PeakmainJNICall.h"
    #include "ConstantsDefine.h"
    
    PeakmainJNICall::PeakmainJNICall(JavaVM *javaVM, JNIEnv *jniEnv, _jobject *jobject) {
        this->javaVM = javaVM;
        this->jniEnv = jniEnv;
        this->jobject = jniEnv->NewGlobalRef(jobject);
    
        jclass jPlayerClass = jniEnv->GetObjectClass(jobject);
        jPlayerErrorMid = jniEnv->GetMethodID(jPlayerClass, "onError", "(ILjava/lang/String;)V");
    }
    
    
    PeakmainJNICall::~PeakmainJNICall() {
        jniEnv->DeleteGlobalRef(jobject);
    }
    
    void PeakmainJNICall::callPlayerJniError(ThreadMode threadMode, int code, char *msg) {
        LOGE("报错进来了");
        if (threadMode == THREAD_MAIN) {
            jstring jsg = jniEnv->NewStringUTF(msg);
            jniEnv->CallVoidMethod(jobject, jPlayerErrorMid, code, jsg);
            jniEnv->DeleteLocalRef(jsg);
        } else if (threadMode == THREAD_CHILD) {
            //获取当前线程的jniEnv
            JNIEnv *env;
            if (javaVM->AttachCurrentThread(&env, 0) != JNI_OK) {
                LOGE("get child thread jni error");
                return;
            }
            jstring jsg = env->NewStringUTF(msg);
            env->CallVoidMethod(jobject, jPlayerErrorMid, code, jsg);
            env->DeleteLocalRef(jsg);
            javaVM->DetachCurrentThread();
        }
    
    }
    
    • PeakmainFFmpeg.h和PeakmainFFmpeg.cpp
    //
    // Created by admin on 2019/10/28.
    //
    
    #ifndef NDK_PEAKMAINFFMPEG_H
    #define NDK_PEAKMAINFFMPEG_H
    
    #include "PeakmainJNICall.h"
    #include "PeakmainAudio.h"
    
    extern "C" {
    #include "libavformat/avformat.h"
    #include "libswresample/swresample.h"
    
    }
    
    #include <pthread.h>
    
    
    class PeakmainFFmpeg {
    public:
        //上下文
        AVFormatContext *pFormatContext = NULL;
        AVCodecContext *pCodecContext = NULL;
        SwrContext *swrContext = NULL;
        char* url = NULL;
        PeakmainJNICall *pJniCall = NULL;
        PeakmainAudio *pAudio = NULL;
    public:
        PeakmainFFmpeg(PeakmainJNICall *jniCall, const char *url);
    
        ~PeakmainFFmpeg();
    
    public:
        void play();
    
        void prepared();
    
        void prepared(ThreadMode threadMode);
    
    
        void callPlayerJniError(ThreadMode threadMode, int code, char *msg);
    
        void release();
    
    
        void prepareAsync();
    };
    #endif //NDK_PEAKMAINFFMPEG_H
    
    //
    // Created by admin on 2019/10/28.
    //
    
    #include "PeakmainFFmpeg.h"
    #include "ConstantsDefine.h"
    
    
    PeakmainFFmpeg::PeakmainFFmpeg(PeakmainJNICall *jniCall, const char *url) {
        this->pJniCall = jniCall;
        //复制一份url
        this->url = (char *) malloc(strlen(url) + 1);
        memcpy(this->url, url, strlen(url) + 1);
    }
    
    
    PeakmainFFmpeg::~PeakmainFFmpeg() {
        release();
    }
    
    
    
    
    void PeakmainFFmpeg::play() {
        if(pAudio!=NULL){
            pAudio->play();
        }
    
    
    }
    
    void PeakmainFFmpeg::callPlayerJniError(ThreadMode mode, int code, char *msg) {
    
        //释放资源
        release();
        LOGE("进来了0");
        //回掉
        pJniCall->callPlayerJniError(mode, code, msg);
    
    }
    
    
    void PeakmainFFmpeg::prepared() {
        prepared(THREAD_MAIN);
    
    }
    
    void *threadPrepare(void *context) {
        PeakmainFFmpeg *pFFmpeg = (PeakmainFFmpeg *) context;
        pFFmpeg->prepared(THREAD_CHILD);
        return 0;
    }
    
    void PeakmainFFmpeg::prepareAsync() {
        // 创建一个线程去播放,多线程编解码边播放
        pthread_t prepareThreadT;
        pthread_create(&prepareThreadT, NULL, threadPrepare, this);
        pthread_detach(prepareThreadT);
    }
    
    
    void PeakmainFFmpeg::prepared(ThreadMode threadMode) {
        av_register_all();
        avformat_network_init();
        int formatOpenInputRes = 0;
        int formatFindStreamInfoRes = 0;
        AVCodecParameters *pCodecParameters;
        AVCodec *pCodec = NULL;
        int codecParametersToContextRes = -1;
        int codecOpenRes = -1;
    
        formatOpenInputRes = avformat_open_input(&pFormatContext, url, NULL, NULL);
        if (formatOpenInputRes != 0) {
            // 第一件事,需要回调给 Java 层(下次课讲)
            // 第二件事,需要释放资源
            LOGE("format open input error: %s", av_err2str(formatOpenInputRes));
            callPlayerJniError(threadMode, formatOpenInputRes, av_err2str(formatOpenInputRes));
            return;
        }
    
        formatFindStreamInfoRes = avformat_find_stream_info(pFormatContext, NULL);
        if (formatFindStreamInfoRes < 0) {
            LOGE("format find stream info error: %s", av_err2str(formatFindStreamInfoRes));
            // 这种方式一般不推荐这么写,但是的确方便
            callPlayerJniError(threadMode, formatFindStreamInfoRes,
                               av_err2str(formatFindStreamInfoRes));
            return;
        }
    
        // 查找音频流的 index
        int audioStramIndex = av_find_best_stream(pFormatContext, AVMediaType::AVMEDIA_TYPE_AUDIO, -1,
                                                  -1,
                                                  NULL, 0);
        if (audioStramIndex < 0) {
            LOGE("format audio stream error.");
            callPlayerJniError(threadMode, FIND_STREAM_ERROR_CODE, "format audio stream error");
            return;
        }
    
        // 查找解码
        pCodecParameters = pFormatContext->streams[audioStramIndex]->codecpar;
        pCodec = avcodec_find_decoder(pCodecParameters->codec_id);
        if (pCodec == NULL) {
            LOGE("codec find audio decoder error");
            // 这种方式一般不推荐这么写,但是的确方便
            callPlayerJniError(threadMode, CODEC_FIND_DECODER_ERROR_CODE,
                               "codec find audio decoder error");
            return;
        }
        // 打开解码器
        pCodecContext = avcodec_alloc_context3(pCodec);
        if (pCodecContext == NULL) {
            LOGE("codec alloc context error");
            // 这种方式一般不推荐这么写,但是的确方便
            callPlayerJniError(threadMode, CODEC_ALLOC_CONTEXT_ERROR_CODE, "codec alloc context error");
            return;
        }
        codecParametersToContextRes = avcodec_parameters_to_context(pCodecContext, pCodecParameters);
        if (codecParametersToContextRes < 0) {
            LOGE("codec parameters to context error: %s", av_err2str(codecParametersToContextRes));
            callPlayerJniError(threadMode, codecParametersToContextRes,
                               av_err2str(codecParametersToContextRes));
            return;
        }
    
        codecOpenRes = avcodec_open2(pCodecContext, pCodec, NULL);
        if (codecOpenRes != 0) {
            LOGE("codec audio open error: %s", av_err2str(codecOpenRes));
            callPlayerJniError(threadMode, codecOpenRes, av_err2str(codecOpenRes));
            return;
        }
    
        // ---------- 重采样 start ----------
        int64_t out_ch_layout = AV_CH_LAYOUT_STEREO;
        enum AVSampleFormat out_sample_fmt = AVSampleFormat::AV_SAMPLE_FMT_S16;
        int out_sample_rate = AUDIO_SAMPLE_RATE;
        int64_t in_ch_layout = pCodecContext->channel_layout;
        enum AVSampleFormat in_sample_fmt = pCodecContext->sample_fmt;
        int in_sample_rate = pCodecContext->sample_rate;
        swrContext = swr_alloc_set_opts(NULL, out_ch_layout, out_sample_fmt,
                                        out_sample_rate, in_ch_layout, in_sample_fmt, in_sample_rate, 0, NULL);
        if (swrContext == NULL) {
            // 提示错误
            callPlayerJniError(threadMode, SWR_ALLOC_SET_OPTS_ERROR_CODE, "swr alloc set opts error");
            return;
        }
        int swrInitRes = swr_init(swrContext);
        if (swrInitRes < 0) {
            callPlayerJniError(threadMode, SWR_CONTEXT_INIT_ERROR_CODE, "swr context swr init error");
            return;
        }
        pAudio = new PeakmainAudio(audioStramIndex, pJniCall, pCodecContext, pFormatContext,swrContext);
        // ---------- 重采样 end ----------
        // 回调到 Java 告诉他准备好了------
    
    
    
    }
    
    void PeakmainFFmpeg::release() {
    
        if (pCodecContext != NULL) {
            avcodec_close(pCodecContext);
            avcodec_free_context(&pCodecContext);
            pCodecContext = NULL;
        }
    
        if (pFormatContext != NULL) {
            avformat_close_input(&pFormatContext);
            avformat_free_context(pFormatContext);
            pFormatContext = NULL;
        }
    
        if (swrContext != NULL) {
            swr_free(&swrContext);
            free(swrContext);
            swrContext = NULL;
        }
    
        avformat_network_deinit();
    
        if (url != NULL) {
            free(url);
            url = NULL;
        }
    }
    
    • PeakmainAudio.h和PeakmainAudio.cpp主要用来播放
    //
    // Created by admin on 2019/10/29.
    //
    
    #ifndef NDK_PEAKMAINAUDIO_H
    #define NDK_PEAKMAINAUDIO_H
    
    #include "PeakmainJNICall.h"
    #include "utils.h"
    #include <SLES/OpenSLES.h>
    #include <SLES/OpenSLES_Android.h>
    extern "C" {
    #include "libavformat/avformat.h"
    #include "libswresample/swresample.h"
    
    }
    
    class PeakmainAudio {
    public:
        //上下文
        AVFormatContext *pFormatContext;
        AVCodecContext *pCodecContext;
        struct SwrContext *swrContext;
        uint8_t *resampleOutBuffer;
        char *url;
        int audioStreamIndex;
        PeakmainJNICall *pJNICall;
    
    
    public:
        PeakmainAudio( int audioStreamIndex,PeakmainJNICall *pJNICall
                ,AVCodecContext *pCodecContext,AVFormatContext *pFormatContext,SwrContext *swrContext);
    
        void play();
        void initCreateOpenSLES();
    
        int resampleAudio();
    };
    
    
    #endif //NDK_PEAKMAINAUDIO_H
    
    //
    // Created by admin on 2019/10/29.
    //
    
    #include "PeakmainAudio.h"
    
    
    PeakmainAudio::PeakmainAudio(int audioStreamIndex, PeakmainJNICall *pJNICall,
                                 AVCodecContext *pCodecContext, AVFormatContext *pFormatContext,
                                 SwrContext *swrContex) {
        this->audioStreamIndex = audioStreamIndex;
        this->pJNICall = pJNICall;
        this->pCodecContext = pCodecContext;
        this->pFormatContext = pFormatContext;
        resampleOutBuffer = (uint8_t *) malloc(pCodecContext->frame_size * 2 * 2);
        this->swrContext = swrContex;
    }
    
    void *threadPlay(void *context) {
        PeakmainAudio *pFFmepg = (PeakmainAudio *) context;
        pFFmepg->initCreateOpenSLES();
    
        return 0;
    }
    
    void PeakmainAudio::play() {
        //多线程播放边解码边播放
        pthread_t playThread;
        pthread_create(&playThread, NULL, threadPlay, this);
        pthread_detach(playThread);
    }
    
    int PeakmainAudio::resampleAudio() {
        int dataSize = 0;
        AVPacket *pPacket = av_packet_alloc();
        AVFrame *pFrame = av_frame_alloc();
    
        while (av_read_frame(pFormatContext, pPacket) >= 0) {
            if (pPacket->stream_index == audioStreamIndex) {
                // Packet 包,压缩的数据,解码成 pcm 数据
                int codecSendPacketRes = avcodec_send_packet(pCodecContext, pPacket);
                if (codecSendPacketRes == 0) {
                    int codecReceiveFrameRes = avcodec_receive_frame(pCodecContext, pFrame);
                    if (codecReceiveFrameRes == 0) {
                        // AVPacket -> AVFrame
                        LOGE("解码音频帧");
    
                        // 调用重采样的方法
                        dataSize = swr_convert(swrContext, &resampleOutBuffer, pFrame->nb_samples,
                                               (const uint8_t **) pFrame->data, pFrame->nb_samples);
                        dataSize = dataSize * 2 * 2;
                        LOGE("解码音频帧");
                        // write 写到缓冲区 pFrame.data -> javabyte
                        // size 是多大,装 pcm 的数据
                        // 1s 44100 点  2通道 ,2字节    44100*2*2
                        // 1帧不是一秒,pFrame->nb_samples点
                        break;
                    }
                }
            }
            // 解引用
            av_packet_unref(pPacket);
            av_frame_unref(pFrame);
        }
        // 1. 解引用数据 data , 2. 销毁 pPacket 结构体内存  3. pPacket = NULL
        av_packet_free(&pPacket);
        av_frame_free(&pFrame);
        return dataSize;
    }
    
    void playerCallback(SLAndroidSimpleBufferQueueItf bq, void *context) {
        PeakmainAudio *pFfmpeg = (PeakmainAudio *) (context);
        int dataSize = pFfmpeg->resampleAudio();
        (*bq)->Enqueue(bq, pFfmpeg->resampleOutBuffer, dataSize);
    }
    
    void PeakmainAudio::initCreateOpenSLES() {
    
        /*OpenSLES OpenGLES 都是自带的
          XXXES 与 XXX 之间可以说是基本没有区别,区别就是 XXXES 是 XXX 的精简
          而且他们都有一定规则,命名规则 slXXX() , glXXX3f*/
        // 3.1 创建引擎接口对象
        SLObjectItf engineObject = NULL;
        SLEngineItf engineEngine;
        slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
        // realize the engine
        (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
        // get the engine interface, which is needed in order to create other objects
        (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
        // 3.2 设置混音器
        static SLObjectItf outputMixObject = NULL;
        const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
        const SLboolean req[1] = {SL_BOOLEAN_FALSE};
        (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
        (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
        SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
        (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
                                         &outputMixEnvironmentalReverb);
        SLEnvironmentalReverbSettings reverbSettings = SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
        (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(outputMixEnvironmentalReverb,
                                                                          &reverbSettings);
        // 3.3 创建播放器
        SLObjectItf pPlayer = NULL;
        SLPlayItf pPlayItf = NULL;
        SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue = {
                SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
        SLDataFormat_PCM formatPcm = {
                SL_DATAFORMAT_PCM,
                2,
                SL_SAMPLINGRATE_44_1,
                SL_PCMSAMPLEFORMAT_FIXED_16,
                SL_PCMSAMPLEFORMAT_FIXED_16,
                SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT,
                SL_BYTEORDER_LITTLEENDIAN};
        SLDataSource audioSrc = {&simpleBufferQueue, &formatPcm};
        SLDataLocator_OutputMix outputMix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
        SLDataSink audioSnk = {&outputMix, NULL};
        SLInterfaceID interfaceIds[3] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME, SL_IID_PLAYBACKRATE};
        SLboolean interfaceRequired[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
        (*engineEngine)->CreateAudioPlayer(engineEngine, &pPlayer, &audioSrc, &audioSnk, 3,
                                           interfaceIds, interfaceRequired);
        (*pPlayer)->Realize(pPlayer, SL_BOOLEAN_FALSE);
        (*pPlayer)->GetInterface(pPlayer, SL_IID_PLAY, &pPlayItf);
        // 3.4 设置缓存队列和回调函数
        SLAndroidSimpleBufferQueueItf playerBufferQueue;
        (*pPlayer)->GetInterface(pPlayer, SL_IID_BUFFERQUEUE, &playerBufferQueue);
        (*playerBufferQueue)->RegisterCallback(playerBufferQueue, playerCallback, this);
        // 3.5 设置播放状态
        (*pPlayItf)->SetPlayState(pPlayItf, SL_PLAYSTATE_PLAYING);
        // 3.6 调用回调函数
        playerCallback(playerBufferQueue, this);
    }
    

    相关文章

      网友评论

          本文标题:ffmpeg-OpenSLES播放音频数据

          本文链接:https://www.haomeiwen.com/subject/diswvctx.html