一. MainActivity
public class MainActivity extends AppCompatActivity {
private AudioTrack audioTrack;
static {
System.loadLibrary("native-lib");
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
checkPermission();
Button btn = (Button)findViewById(R.id.btn);
btn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
File file = new File(Environment.getExternalStorageDirectory(), "input.mp3");
playSound(file.getAbsolutePath());
}
});
}
public boolean checkPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M && checkSelfPermission(
Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{
Manifest.permission.READ_EXTERNAL_STORAGE,
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.CAMERA
}, 1);
}
return false;
}
public native void playSound(String input);
public void createTrack(int sampleRateInHz,int nb_channals) {
Log.e("liuyi", "createTrack");
int channaleConfig;
if (nb_channals == 1) {
channaleConfig = AudioFormat.CHANNEL_OUT_MONO;
} else if (nb_channals == 2) {
channaleConfig = AudioFormat.CHANNEL_OUT_STEREO;
}else {
channaleConfig = AudioFormat.CHANNEL_OUT_MONO;
}
int buffersize= AudioTrack.getMinBufferSize(sampleRateInHz,
channaleConfig, AudioFormat.ENCODING_PCM_16BIT);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,sampleRateInHz,channaleConfig, AudioFormat.ENCODING_PCM_16BIT
,buffersize,
AudioTrack.MODE_STREAM);
audioTrack.play();
}
public void playTrack(byte[] buffer, int lenth) {
Log.e("liuyi", "playTrack");
if (audioTrack != null && audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {
audioTrack.write(buffer, 0, lenth);
}
}
}
二. native-lib.cpp
#include <jni.h>
#include <string>
extern "C"
{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libavutil/imgutils.h"
#include "libswscale/swscale.h"
#include <libavutil/time.h>
#include "libswresample/swresample.h"
}
#include <android/log.h>
#include <android/native_window_jni.h>
#define LOGD(...) __android_log_print(ANDROID_LOG_INFO,"liuyi",__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, "liuyi", __VA_ARGS__)
static AVFormatContext *avFormatContext;
static AVCodecContext *avCodecContext;
AVCodec *vCodec;
ANativeWindow* nativeWindow;
ANativeWindow_Buffer windowBuffer;
static AVPacket *avPacket;
static AVFrame *avFrame, *rgbFrame;
uint8_t *outbuffer;
struct SwsContext *swsContext;
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_ffmpeg_MainActivity_playSound(JNIEnv *env, jobject instance, jstring input_) {
const char *input = env->GetStringUTFChars(input_, 0);
av_register_all();
AVFormatContext *pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, input, NULL, NULL) != 0) {
LOGE("%s","打开输入音频文件失败");
return;
}
if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
LOGE("%s","获取音频信息失败");
return;
}
int audio_stream_idx=-1;
int i=0;
for (int i = 0; i < pFormatCtx->nb_streams; ++i) {
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
LOGE(" 找到音频id %d", pFormatCtx->streams[i]->codec->codec_type);
audio_stream_idx=i;
break;
}
}
AVCodecContext *pCodecCtx= pFormatCtx->streams[audio_stream_idx]->codec;
//获取解码器
AVCodec *pCodex = avcodec_find_decoder(pCodecCtx->codec_id);
//打开解码器
if (avcodec_open2(pCodecCtx, pCodex, NULL)<0) {
return;
}
AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
AVFrame *frame = av_frame_alloc();
int out_channer_nb = av_get_channel_layout_nb_channels(AV_CH_LAYOUT_STEREO);
//转换器上下文
SwrContext *swrContext = swr_alloc();
uint64_t out_ch_layout=AV_CH_LAYOUT_STEREO;
enum AVSampleFormat out_formart=AV_SAMPLE_FMT_S16;
int out_sample_rate = pCodecCtx->sample_rate;
swr_alloc_set_opts(swrContext, out_ch_layout, out_formart, out_sample_rate,
pCodecCtx->channel_layout, pCodecCtx->sample_fmt, pCodecCtx->sample_rate, 0,NULL
);
swr_init(swrContext);
uint8_t *out_buffer = (uint8_t *) av_malloc(44100 * 2);
jclass david_player = env->GetObjectClass(instance);
jmethodID createAudio = env->GetMethodID(david_player, "createTrack", "(II)V");
env->CallVoidMethod(instance, createAudio, 44100, out_channer_nb);
jmethodID audio_write = env->GetMethodID(david_player, "playTrack", "([BI)V");
while (av_read_frame(pFormatCtx, packet) >= 0) {
if (packet->stream_index == audio_stream_idx) {
int ret = avcodec_send_packet(pCodecCtx, packet);
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF){
LOGE("解码出错");
break;
}
ret = avcodec_receive_frame(pCodecCtx, frame);
if (ret == AVERROR(EAGAIN)) {
continue;
} else if (ret < 0) {
break;
}
if (ret>=0) {
if (ret>=0) {
swr_convert(swrContext, &out_buffer, 44100 * 2, (const uint8_t **) (frame->data),
frame->nb_samples);
int size = av_samples_get_buffer_size(NULL, out_channer_nb, frame->nb_samples,
AV_SAMPLE_FMT_S16, 1);
jbyteArray audio_sample_array = env->NewByteArray(size);
env->SetByteArrayRegion(audio_sample_array, 0, size,
reinterpret_cast<const jbyte *>(out_buffer));
env->CallVoidMethod(instance, audio_write, audio_sample_array, size);
env->DeleteLocalRef(audio_sample_array);
}
}
}
}
av_frame_free(&frame);
av_free(packet);
swr_free(&swrContext);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
env->ReleaseStringUTFChars(input_, input);
}
三.代码地址
https://gitee.com/luisliuyi/android-ffmpeg02.git
网友评论