OpenSL ES全称为Open Sound Library for Embedded Systems。OpenSL ES是无授权费、跨平台、针对嵌入式系统 精心优化的硬件音频加速API。当然安卓也使用了它,AudioTrack、MediaPlayer的音频播放,底层都是利用OpenSL。和AudioTrack相同,它只接受流,不支持音频数据的编解码,所以需要结合第三方库来使用。
为什么要使用OpenSL?一般应用使用安卓SDK提供的API就足够满足使用,但对于专门音频播放的APP,系统提供的方法就有点力不从心了,比如:AudioTrack利用native层调用OpenSL,必不可免的要进行java层流数据转化为native层流数据,这就会造成一定延迟,而对于音频播放APP而言,这是不可接受的。
前面我们使用了FFmpeg解码音频流,并使用了java层的AudioTrack进行播放,今天来实现在native层直接使用OpenSL播放。
由于OpenSL系统底层本身就集成,我们只需要在CMakeLists中导入系统动态库就可以了。
target_link_libraries(
native-lib
avcodec-56
avdevice-56
avfilter-5
avformat-56
avutil-54
postproc-53
swresample-1
swscale-3
android
//openSL库
OpenSLES
${log-lib})
Opensl的套路如下:
1、创建引擎接口对象
2、创建混音器
3、创建播放器(录音器)
4、设置缓冲队列和回调函数
5、设置播放状态
6、启动回调函数
这边现在java中编写对应native中的方法
package com.aruba.ffmpegapplication;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import androidx.appcompat.app.AppCompatActivity;
import java.io.File;
public class PcmPlayActivity extends AppCompatActivity {
static {
System.loadLibrary("native-lib");
}
private AudioTrack audioTrack;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_pcm_play);
}
/**
* 给native层回调
*
* @param sampleRateInHz
* @param channelCount
*/
private void create(int sampleRateInHz, int channelCount) {
int channelConfig = AudioFormat.CHANNEL_OUT_MONO; //单声道
if (channelCount == 2) {
channelConfig = AudioFormat.CHANNEL_OUT_STEREO;
}
int buffSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);//计算最小缓冲区
// @Deprecated
// public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat, int bufferSizeInBytes, int mode) throws IllegalArgumentException {
// throw new RuntimeException("Stub!");
// }
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, buffSize, AudioTrack.MODE_STREAM);
audioTrack.play();
}
/**
* 给native层回调
*/
private void play(byte[] bytes, int size) {
if (audioTrack != null && audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING)
audioTrack.write(bytes, 0, size);
}
public void click(View view) {
switch (view.getId()) {
case R.id.btn_audiotrack:
final File input1 = new File(Environment.getExternalStorageDirectory(), "input.mp3");
new Thread() {
@Override
public void run() {
playByAudio(input1.getAbsolutePath());
}
}.start();
break;
case R.id.btn_opensl:
final File input2 = new File(Environment.getExternalStorageDirectory(), "input.mp3");
new Thread() {
@Override
public void run() {
playByOpenSL(input2.getAbsolutePath());
}
}.start();
break;
case R.id.btn_opensl_stop:
stopByOpenSL();
break;
}
}
private native void playByAudio(String inputFilePath);
private native void playByOpenSL(String inputFilePath);
private native void stopByOpenSL();
}
在native层将opensl封装成一个类
_opensl_helper.h
//
// Created by aruba on 2020/7/6.
//
#ifndef FFMPEGAPPLICATION_OPENSL_HELPER_H
#define FFMPEGAPPLICATION_OPENSL_HELPER_H
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
class OpenslHelper {
public:
//返回结果
SLresult result;
//opensl引擎
SLObjectItf engine;
//引擎接口
SLEngineItf engineInterface;
//混音器
SLObjectItf mix;
//环境混响混音器接口
SLEnvironmentalReverbItf environmentalReverbItf;
//环境混响混音器环境
const SLEnvironmentalReverbSettings settings = SL_I3DL2_ENVIRONMENT_PRESET_DEFAULT;
//播放器
SLObjectItf player;
//播放接口
SLPlayItf playInterface;
//缓冲区
SLAndroidSimpleBufferQueueItf bufferQueueItf;
//音量
SLVolumeItf volumeItf;
//播放状态 SL_PLAYSTATE_XXX
SLuint32 playState;
/**
* 创建opensl引擎接口
* @return SLresult
*/
SLresult createEngine();
/**
* 是否成功
* @param result
* @return
*/
bool isSuccess(SLresult &result);
/**
* 创建混音器
* @return SLresult
*/
SLresult createMix();
/**
* 创建播放器
* @param numChannels 声道数
* @param samplesRate 采样率 SL_SAMPLINGRATE_XX
* @param bitsPerSample 采样位数(量化格式) SL_PCMSAMPLEFORMAT_FIXED_XX
* @param channelMask 立体声掩码 SL_SPEAKER_XX | SL_SPEAKER_XX
* @return SLresult
*/
SLresult createPlayer(int numChannels, long samplesRate, int bitsPerSample, int channelMask);
//播放
SLresult play();
//暂停
SLresult pause();
//停止
SLresult stop();
/**
* 播放器会不断调用此函数,我们需要在此回调中不断给缓冲区填充数据
* @param bufferQueueItf
* @param pContext
*/
SLresult
registerCallback(slAndroidSimpleBufferQueueCallback callback);
~OpenslHelper();
};
#endif //FFMPEGAPPLICATION_OPENSL_HELPER_H
_opensl_helper.cpp
//
// Created by aruba on 2020/7/6.
//
#include "_opensl_helper.h"
/**
* 是否成功
* @param result
* @return
*/
bool OpenslHelper::isSuccess(SLresult &result) {
if (result == SL_RESULT_SUCCESS) {
return true;
}
return false;
}
/**
* 创建opensl引擎接口
* @return SLresult
*/
SLresult OpenslHelper::createEngine() {
//创建引擎
result = slCreateEngine(&engine, 0, NULL, 0, NULL, NULL);
if (!isSuccess(result)) {
return result;
}
//实例化引擎,第二个参数为:是否异步
result = (*engine)->Realize(engine, SL_BOOLEAN_FALSE);
if (!isSuccess(result)) {
return result;
}
//获取引擎接口
result = (*engine)->GetInterface(engine, SL_IID_ENGINE, &engineInterface);
if (!isSuccess(result)) {
return result;
}
return result;
}
/**
* 创建混音器
* @return SLresult
*/
SLresult OpenslHelper::createMix() {
//获取混音器
result = (*engineInterface)->CreateOutputMix(engineInterface, &mix, 0,
0, 0);
if (!isSuccess(result)) {
return result;
}
//实例化混音器
result = (*mix)->Realize(mix, SL_BOOLEAN_FALSE);
if (!isSuccess(result)) {
return result;
}
//获取环境混响混音器接口
SLresult environmentalResult = (*mix)->GetInterface(mix, SL_IID_ENVIRONMENTALREVERB,
&environmentalReverbItf);
if (isSuccess(environmentalResult)) {
//给混音器设置环境
(*environmentalReverbItf)->SetEnvironmentalReverbProperties(environmentalReverbItf,
&settings);
}
return result;
}
/**
* 创建播放器
* @param numChannels 声道数
* @param samplesRate 采样率 SL_SAMPLINGRATE_XX
* @param bitsPerSample 采样位数(量化格式) SL_PCMSAMPLEFORMAT_FIXED_XX
* @param channelMask 立体声掩码 SL_SPEAKER_XX | SL_SPEAKER_XX
* @return SLresult
*/
SLresult OpenslHelper::createPlayer(int numChannels, long samplesRate, int bitsPerSample,
int channelMask) {
//1.关联音频流缓冲区 设为2是防止延迟 可以在播放另一个缓冲区时填充新数据
SLDataLocator_AndroidSimpleBufferQueue buffQueque = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
2};
//缓冲区格式
// typedef struct SLDataFormat_PCM_ {
// SLuint32 formatType; //格式pcm
// SLuint32 numChannels; //声道数
// SLuint32 samplesPerSec; //采样率
// SLuint32 bitsPerSample; //采样位数(量化格式)
// SLuint32 containerSize; //包含位数
// SLuint32 channelMask; //立体声
// SLuint32 endianness; //结束标志位
// } SLDataFormat_PCM;
SLDataFormat_PCM dataFormat_pcm = {SL_DATAFORMAT_PCM, numChannels, samplesRate, bitsPerSample,
bitsPerSample, channelMask,
SL_BYTEORDER_LITTLEENDIAN};
//存放缓冲区地址和格式地址的结构体
// typedef struct SLDataSource_ {
// void *pLocator;//缓冲区
// void *pFormat;//格式
// } SLDataSource;
SLDataSource audioSrc = {&buffQueque, &dataFormat_pcm};
//2.关联混音器
// typedef struct SLDataLocator_OutputMix {
// SLuint32 locatorType;
// SLObjectItf outputMix;
// } SLDataLocator_OutputMix;
SLDataLocator_OutputMix dataLocator_outputMix = {SL_DATALOCATOR_OUTPUTMIX, mix};
//混音器快捷方式
// typedef struct SLDataSink_ {
// void *pLocator;
// void *pFormat;
// } SLDataSink;
SLDataSink audioSnk = {&dataLocator_outputMix, NULL};
//3.通过引擎接口创建播放器
// SLresult (*CreateAudioPlayer) (
// SLEngineItf self,
// SLObjectItf * pPlayer,
// SLDataSource *pAudioSrc,
// SLDataSink *pAudioSnk,
// SLuint32 numInterfaces,
// const SLInterfaceID * pInterfaceIds,
// const SLboolean * pInterfaceRequired
// );
SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND, SL_IID_VOLUME};
SLboolean required[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*engineInterface)->CreateAudioPlayer(engineInterface, &player, &audioSrc, &audioSnk,
3, ids,
required);
if (!isSuccess(result)) {
return result;
}
//播放器实例化
result = (*player)->Realize(player, SL_BOOLEAN_FALSE);
if (!isSuccess(result)) {
return result;
}
//获取播放接口
result = (*player)->GetInterface(player, SL_IID_PLAY, &playInterface);
if (!isSuccess(result)) {
return result;
}
result = (*player)->GetInterface(player, SL_IID_VOLUME, &volumeItf);
if (!isSuccess(result)) {
return result;
}
//注册缓冲区
result = (*player)->GetInterface(player, SL_IID_BUFFERQUEUE, &bufferQueueItf);
if (!isSuccess(result)) {
return result;
}
return result;
}
/**
* 设置回调接口
* @return SLresult
*/
SLresult OpenslHelper::registerCallback(slAndroidSimpleBufferQueueCallback callback) {
//设置回调接口
result = (*bufferQueueItf)->RegisterCallback(bufferQueueItf, callback, NULL);
return result;
}
//播放
SLresult OpenslHelper::play() {
playState = SL_PLAYSTATE_PLAYING;
result = (*playInterface)->SetPlayState(playInterface, SL_PLAYSTATE_PLAYING);
return result;
}
//暂停
SLresult OpenslHelper::pause() {
playState = SL_PLAYSTATE_PAUSED;
result = (*playInterface)->SetPlayState(playInterface, SL_PLAYSTATE_PAUSED);
return result;
}
//停止
SLresult OpenslHelper::stop() {
playState = SL_PLAYSTATE_STOPPED;
result = (*playInterface)->SetPlayState(playInterface, SL_PLAYSTATE_STOPPED);
return result;
}
//析构
OpenslHelper::~OpenslHelper() {
//播放器
if (player != NULL) {
(*player)->Destroy(player);
player = NULL;
//播放接口
playInterface = NULL;
//缓冲区
bufferQueueItf = NULL;
//音量
volumeItf = NULL;
}
//混音器
if (mix != NULL) {
(*mix)->Destroy(mix);
mix = NULL;
//环境混响混音器接口
environmentalReverbItf = NULL;
}
//opensl引擎
if (engine != NULL) {
(*engine)->Destroy(engine);
engine = NULL;
//引擎接口
engineInterface = NULL;
}
result = NULL;
}
运用我们之前的ffmpeg代码,调用opensl进行播放
#include <jni.h>
#include <string>
#include <android/log.h>
#include <android/native_window_jni.h>
#include <unistd.h>
#include <pthread.h>
#include <stdio.h>
extern "C" {
//编码
#include "libavcodec/avcodec.h"
//封装格式处理
#include "libavformat/avformat.h"
#include "libswresample/swresample.h"
//像素处理
#include "libswscale/swscale.h"
#include "_opensl_helper.h"
}
OpenslHelper helper;
uint8_t *out;
int buff_size;
//char *filePath;
AVFormatContext *formatContext;
AVCodecContext *codecContext;
int audio_stream_idx;
AVPacket *pkt;
AVFrame *picture;
SwrContext *swrContext;
int channel_count;
int out_size;
void playerCallback(SLAndroidSimpleBufferQueueItf bq, void *pContext);
/**
* 音频解码PCM,OpenSL播放
*/
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_ffmpegapplication_PcmPlayActivity_playByOpenSL(JNIEnv *env, jobject instance,
jstring inputFilePath_) {
//初始化opensl
SLresult result = helper.createEngine();
if (!helper.isSuccess(result)) {
LOGE("createEngine失败");
return;
}
result = helper.createMix();
if (!helper.isSuccess(result)) {
LOGE("createMix失败");
return;
}
const char *inputFilePath = env->GetStringUTFChars(inputFilePath_, 0);
// const int size = sizeof(inputFilePath);
// filePath = static_cast<char *>(malloc(size));
// memcpy(filePath, inputFilePath, size);
//注册FFmpeg中各大组件
av_register_all();
//打开文件
formatContext = avformat_alloc_context();
if (avformat_open_input(&formatContext, inputFilePath, NULL, NULL) != 0) {
LOGE("打开失败");
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//将文件信息填充进AVFormatContext
if (avformat_find_stream_info(formatContext, NULL) < 0) {
LOGE("获取文件信息失败");
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//获取视频流的编解码器上下文
codecContext = NULL;
audio_stream_idx = -1;
for (int i = 0; i < formatContext->nb_streams; ++i) {
if (formatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {//如果是音频流
codecContext = formatContext->streams[i]->codec;
audio_stream_idx = i;
break;
}
}
if (codecContext == NULL) {
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//根据编解码器上下文的id获取视频流解码器
AVCodec *codec = avcodec_find_decoder(codecContext->codec_id);
//打开解码器
if (avcodec_open2(codecContext, codec, NULL) < 0) {
LOGE("解码失败");
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
//开始读每一帧
//存放压缩数据
pkt = (AVPacket *) (av_malloc(sizeof(AVPacket)));
av_init_packet(pkt);
//存放解压数据
picture = av_frame_alloc();
//音频转码组件上下文
swrContext = swr_alloc();
//AV_CH_LAYOUT_STEREO:双声道 AV_SAMPLE_FMT_S16:量化格式 16位 codecContext->sample_rate:采样率 Hz
swr_alloc_set_opts(swrContext, AV_CH_LAYOUT_STEREO, AV_SAMPLE_FMT_S16,
codecContext->sample_rate,//输出采样率和输入采样率应相同
codecContext->channel_layout, codecContext->sample_fmt,
codecContext->sample_rate, 0, NULL
);
swr_init(swrContext);
//原音频通道数
channel_count = av_get_channel_layout_nb_channels(codecContext->channel_layout);
//单通道最大存放转码数据 所占字节 = 采样率*量化格式 / 8
out_size = 44100 * 16 / 8;
out = (uint8_t *) (av_malloc(out_size));
//开始播放
result = helper.createPlayer(2, SL_SAMPLINGRATE_44_1, SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT);
if (!helper.isSuccess(result)) {
LOGE("createPlayer失败");
//释放资源
av_free(out);
out = NULL;
swr_free(&swrContext);
av_frame_free(&picture);
avcodec_close(codecContext);
avformat_free_context(formatContext);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
return;
}
helper.registerCallback(playerCallback);
helper.play();
playerCallback(helper.bufferQueueItf, NULL);
env->ReleaseStringUTFChars(inputFilePath_, inputFilePath);
}
void release() {
//释放资源
av_free_packet(pkt);
av_freep(out);
out = NULL;
swr_free(&swrContext);
av_frame_free(&picture);
avcodec_close(codecContext);
avformat_free_context(formatContext);
}
void getData(uint8_t **out, int *buff_size) {
if (out == NULL || buff_size == NULL) {
return;
}
int picture_ptr = 0;
while (av_read_frame(formatContext, pkt) == 0) {//读到每一帧的压缩数据存放在AVPacket
if (pkt->stream_index == audio_stream_idx) {
//解码
avcodec_decode_audio4(codecContext, picture, &picture_ptr, pkt);
LOGE("picture_ptr %d", picture_ptr);
if (picture_ptr > 0) {
//转码
swr_convert(swrContext, out, out_size,
(const uint8_t **) (picture->data), picture->nb_samples);
//缓冲区真实大小
*buff_size = av_samples_get_buffer_size(NULL, channel_count, picture->nb_samples,
AV_SAMPLE_FMT_S16, 1);
break;
}
}
}
av_free_packet(pkt);
}
/**
* 播放器会不断调用此函数,我们需要在此回调中不断给缓冲区填充数据
* @param bufferQueueItf
* @param pContext
*/
void playerCallback(SLAndroidSimpleBufferQueueItf bq, void *pContext) {
if (helper.playState == SL_PLAYSTATE_PLAYING) {
getData(&out, &buff_size);
if (out != NULL && buff_size != 0) {
(*bq)->Enqueue(bq, out, (SLuint32) (buff_size));
}
} else if(helper.playState == SL_PLAYSTATE_STOPPED){
release();
helper.~SLresult();
helper = NULL;
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_aruba_ffmpegapplication_PcmPlayActivity_stopByOpenSL(JNIEnv *env, jobject instance) {
helper.stop();
}
网友评论