一. 环境说明
平台:v7
ffmpeg版本:4.0.2
二.activity_main.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
tools:context=".MainActivity">
<TextView
android:id="@+id/tv_state"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="-"
android:textSize="20dp"
android:layout_marginLeft="20dp"
android:layout_marginRight="20dp"
android:layout_marginTop="10dp"
/>
<SurfaceView
android:id="@+id/surfaceView"
android:layout_width="match_parent"
android:layout_height="200dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="30dp"
android:layout_margin="5dp">
<TextView
android:id="@+id/tv_time"
android:layout_width="wrap_content"
android:layout_height="match_parent"
android:gravity="center"
android:text="00:00/00:00"
android:visibility="gone" />
<SeekBar
android:id="@+id/seekBar"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_weight="1"
android:max="100"
android:visibility="gone" />
</LinearLayout>
</LinearLayout>
三.MainActivity.java
package com.derry.player;
import androidx.appcompat.app.AppCompatActivity;
import android.annotation.SuppressLint;
import android.graphics.Color;
import android.os.Bundle;
import android.os.Environment;
import android.view.SurfaceView;
import android.view.View;
import android.view.WindowManager;
import android.widget.SeekBar;
import android.widget.TextView;
import java.io.File;
public class MainActivity extends AppCompatActivity implements SeekBar.OnSeekBarChangeListener {
private DerryPlayer player;
private TextView tv_state;
private SurfaceView surfaceView;
private SeekBar seekBar;
private TextView tv_time;
private boolean isTouch;
private int duration;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON, WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
tv_state = findViewById(R.id.tv_state);
surfaceView = findViewById(R.id.surfaceView);
tv_time = findViewById(R.id.tv_time);
seekBar = findViewById(R.id.seekBar);
seekBar.setOnSeekBarChangeListener(this);
player = new DerryPlayer();
player.setSurfaceView(surfaceView);
player.setDataSource(
new File(Environment.getExternalStorageDirectory() + File.separator + "20220302_130317.flv")
.getAbsolutePath());
player.setOnPreparedListener(new DerryPlayer.OnPreparedListener() {
@Override
public void onPrepared() {
duration = player.getDuration();
runOnUiThread(new Runnable() {
@Override
public void run() {
if (duration != 0) {
tv_time.setText("00:00/" + getMinutes(duration) + ":" + getSeconds(duration));
tv_time.setVisibility(View.VISIBLE);
seekBar.setVisibility(View.VISIBLE);
}
tv_state.setTextColor(Color.GREEN);
tv_state.setText("恭喜init初始化成功");
}
});
player.start();
}
});
player.setOnErrorListener(new DerryPlayer.OnErrorListener() {
@Override
public void onError(final String errorInfo) {
runOnUiThread(new Runnable() {
@Override
public void run() {
tv_state.setTextColor(Color.RED);
tv_state.setText("哎呀,错误啦,错误:" + errorInfo);
}
});
}
});
player.setOnOnProgressListener(new DerryPlayer.OnProgressListener() {
@Override
public void onProgress(final int progress) {
if (!isTouch) {
runOnUiThread(new Runnable() {
@SuppressLint("SetTextI18n")
@Override
public void run() {
if (duration != 0) {
tv_time.setText(getMinutes(progress) + ":" + getSeconds(progress)
+ "/" +
getMinutes(duration) + ":" + getSeconds(duration));
seekBar.setProgress(progress * 100 / duration);
}
}
});
}
}
});
}
@Override
protected void onResume() {
super.onResume();
player.prepare();
}
@Override
protected void onStop() {
super.onStop();
player.stop();
}
@Override
protected void onDestroy() {
super.onDestroy();
player.release();
}
private String getMinutes(int duration) {
int minutes = duration / 60;
if (minutes <= 9) {
return "0" + minutes;
}
return "" + minutes;
}
private String getSeconds(int duration) {
int seconds = duration % 60;
if (seconds <= 9) {
return "0" + seconds;
}
return "" + seconds;
}
/**
* 当前拖动条进度发送了改变 回调此函数
* @param seekBar 控件
* @param progress 1~100
* @param fromUser 是否用户拖拽导致的改变
*/
@SuppressLint("SetTextI18n")
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser) {
// progress 是进度条的进度 (0 - 100) ------> 秒 分 的效果
tv_time.setText(getMinutes(progress * duration / 100)
+ ":" +
getSeconds(progress * duration / 100) + "/" +
getMinutes(duration) + ":" + getSeconds(duration));
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
isTouch = true;
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
isTouch = false;
int seekBarProgress = seekBar.getProgress();
int playProgress = seekBarProgress * duration / 100;
player.seek(playProgress);
}
}
四.DerryPlayer.java
package com.derry.player;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class DerryPlayer implements SurfaceHolder.Callback {
private static final int FFMPEG_CAN_NOT_OPEN_URL = 1;
private static final int FFMPEG_CAN_NOT_FIND_STREAMS = 2;
private static final int FFMPEG_FIND_DECODER_FAIL = 3;
private static final int FFMPEG_ALLOC_CODEC_CONTEXT_FAIL = 4;
private static final int FFMPEG_CODEC_CONTEXT_PARAMETERS_FAIL = 6;
private static final int FFMPEG_OPEN_DECODER_FAIL = 7;
private static final int FFMPEG_NOMEDIA = 8;
static {
System.loadLibrary("native-lib");
}
private OnPreparedListener onPreparedListener;
private OnErrorListener onErrorListener;
private SurfaceHolder surfaceHolder;
public DerryPlayer() {
}
private String dataSource;
public void setDataSource(String dataSource) {
this.dataSource = dataSource;
}
/**
* 播放前的 准备工作
*/
public void prepare() {
prepareNative(dataSource);
}
/**
* 开始播放
*/
public void start() {
startNative();
}
/**
* 停止播放
*/
public void stop() {
stopNative();
}
/**
* 释放资源
*/
public void release() {
releaseNative();
}
/**
* 给jni反射调用的 准备成功
*/
public void onPrepared() {
if (onPreparedListener != null) {
onPreparedListener.onPrepared();
}
}
/**
* 设置准备OK的监听方法
*/
public void setOnPreparedListener(OnPreparedListener onPreparedListener) {
this.onPreparedListener = onPreparedListener;
}
/**
* 获取总时长
* @return
*/
public int getDuration() {
return getDurationNative();
}
public void seek(int playProgress) {
seekNative(playProgress);
}
/**
* 准备OK的监听
*/
public interface OnPreparedListener {
void onPrepared();
}
/**
* 给jni反射调用的
*/
public void onError(int errorCode) {
if (null != this.onErrorListener) {
String msg = null;
switch (errorCode) {
case FFMPEG_CAN_NOT_OPEN_URL:
msg = "打不开视频";
break;
case FFMPEG_CAN_NOT_FIND_STREAMS:
msg = "找不到流媒体";
break;
case FFMPEG_FIND_DECODER_FAIL:
msg = "找不到解码器";
break;
case FFMPEG_ALLOC_CODEC_CONTEXT_FAIL:
msg = "无法根据解码器创建上下文";
break;
case FFMPEG_CODEC_CONTEXT_PARAMETERS_FAIL:
msg = "根据流信息 配置上下文参数失败";
break;
case FFMPEG_OPEN_DECODER_FAIL:
msg = "打开解码器失败";
break;
case FFMPEG_NOMEDIA:
msg = "没有音视频";
break;
}
onErrorListener.onError(msg);
}
}
interface OnErrorListener {
void onError(String errorCode);
}
public void setOnErrorListener(OnErrorListener onErrorListener) {
this.onErrorListener = onErrorListener;
}
/**
* set SurfaceView
* @param surfaceView
*/
public void setSurfaceView(SurfaceView surfaceView) {
if (this.surfaceHolder != null) {
surfaceHolder.removeCallback(this); // 清除上一次的
}
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(this); // 监听
}
@Override
public void surfaceCreated(SurfaceHolder surfaceHolder) {
}
@Override
public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) {
setSurfaceNative(surfaceHolder.getSurface());
}
@Override
public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
}
/**
* 给jni反射调用的 准备成功
*/
public void onProgress(int progress) {
if (onProgressListener != null) {
onProgressListener.onProgress(progress);
}
}
private OnProgressListener onProgressListener;
public interface OnProgressListener {
void onProgress(int progress);
}
/**
* 设置准备播放时进度的监听
*/
public void setOnOnProgressListener(OnProgressListener onProgressListener) {
this.onProgressListener = onProgressListener;
}
//>>>>>>>>>>> native函数区域
private native void prepareNative(String dataSource);
private native void startNative();
private native void stopNative();
private native void releaseNative();
private native void setSurfaceNative(Surface surface);
private native int getDurationNative();
private native void seekNative(int playValue);
}
五.build.gradle
apply plugin: 'com.android.application'
android {
compileSdkVersion 28
buildToolsVersion "29.0.0"
defaultConfig {
applicationId "com.derry.player"
minSdkVersion 22
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags ""
abiFilters "armeabi-v7a" // CMakeLists.txt 指定编译的是此平台
}
}
ndk{
abiFilters("armeabi-v7a") // apk/lib/libnative-lib.so 指定编译的是此平台
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
externalNativeBuild {
cmake {
path "src/main/cpp/CMakeLists.txt"
version "3.10.2"
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
}
五.CMakeLists.txt
cmake_minimum_required(VERSION 3.6.4111459)
set(FFMPEG ${CMAKE_SOURCE_DIR}/ffmpeg) # ffmpeg的路径
include_directories(${FFMPEG}/include) # 导入ffmpeg的头文件
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${FFMPEG}/libs/${ANDROID_ABI}") # ffmpeg库指定
# 批量导入 源文件
file(GLOB src_files *.cpp)
add_library(
native-lib
SHARED
${src_files})
target_link_libraries(
native-lib
avfilter avformat avcodec avutil swresample swscale rtmp android z
OpenSLES
log )
五.native-lib.cpp
#include <jni.h>
#include <string>
#include "DerryPlayer.h"
#include "log4c.h"
#include "JNICallbakcHelper.h"
#include <android/native_window_jni.h>
extern "C"{
#include <libavutil/avutil.h>
}
DerryPlayer *player = nullptr;
JavaVM *vm = nullptr;
ANativeWindow *window = nullptr;
pthread_mutex_t mutex = PTHREAD_MUTEX_INITIALIZER; // 静态初始化 锁
jint JNI_OnLoad(JavaVM * vm, void * args) {
::vm = vm;
return JNI_VERSION_1_6;
}
void renderFrame(uint8_t * src_data, int width, int height, int src_lineSize) {
pthread_mutex_lock(&mutex);
if (!window) {
pthread_mutex_unlock(&mutex);
}
ANativeWindow_setBuffersGeometry(window, width, height, WINDOW_FORMAT_RGBA_8888);
ANativeWindow_Buffer window_buffer;
if (ANativeWindow_lock(window, &window_buffer, 0)) {
ANativeWindow_release(window);
window = 0;
pthread_mutex_unlock(&mutex); // 解锁,怕出现死锁
return;
}
uint8_t *dst_data = static_cast<uint8_t *>(window_buffer.bits);
int dst_linesize = window_buffer.stride * 4;
for (int i = 0; i < window_buffer.height; ++i) {
memcpy(dst_data + i * dst_linesize, src_data + i * src_lineSize, dst_linesize); // OK的
}
// 数据刷新
ANativeWindow_unlockAndPost(window);
pthread_mutex_unlock(&mutex);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_derry_player_DerryPlayer_prepareNative(JNIEnv *env, jobject job, jstring data_source) {
const char * data_source_ = env->GetStringUTFChars(data_source, 0);
auto *helper = new JNICallbakcHelper(vm, env, job); // C++子线程回调 , C++主线程回调
player = new DerryPlayer(data_source_, helper);
player->setRenderCallback(renderFrame);
player->prepare();
env->ReleaseStringUTFChars(data_source, data_source_);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_derry_player_DerryPlayer_startNative(JNIEnv *env, jobject thiz) {
if (player) {
player->start();
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_derry_player_DerryPlayer_stopNative(JNIEnv *env, jobject thiz) {
if (player) {
player->stop();
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_derry_player_DerryPlayer_releaseNative(JNIEnv *env, jobject thiz) {
pthread_mutex_lock(&mutex);
if (window) {
ANativeWindow_release(window);
window = nullptr;
}
pthread_mutex_unlock(&mutex);
DELETE(player);
DELETE(vm);
DELETE(window);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_derry_player_DerryPlayer_setSurfaceNative(JNIEnv *env, jobject thiz, jobject surface) {
pthread_mutex_lock(&mutex);
if (window) {
ANativeWindow_release(window);
window = nullptr;
}
window = ANativeWindow_fromSurface(env, surface);
pthread_mutex_unlock(&mutex);
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_derry_player_DerryPlayer_getDurationNative(JNIEnv *env, jobject thiz) {
if (player) {
return player->getDuration();
}
return 0;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_derry_player_DerryPlayer_seekNative(JNIEnv *env, jobject thiz, jint play_value) {
if (player) {
player->seek(play_value);
}
}
六.BaseChannel.h
#ifndef DERRYPLAYER_BASECHANNEL_H
#define DERRYPLAYER_BASECHANNEL_H
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavutil/time.h>
};
#include "safe_queue.h"
#include "log4c.h"
#include "JNICallbakcHelper.h"
class BaseChannel {
public:
int stream_index;
SafeQueue<AVPacket *> packets;
SafeQueue<AVFrame *> frames;
bool isPlaying;
AVCodecContext *codecContext = 0;
AVRational time_base; // TODO 音视频同步 时间基
JNICallbakcHelper *jniCallbakcHelper = 0;
void setJNICallbakcHelper(JNICallbakcHelper *jniCallbakcHelper) {
this->jniCallbakcHelper = jniCallbakcHelper;
}
BaseChannel(int stream_index, AVCodecContext *codecContext, AVRational time_base)
:
stream_index(stream_index),
codecContext(codecContext) ,
time_base(time_base)
{
packets.setReleaseCallback(releaseAVPacket);
frames.setReleaseCallback(releaseAVFrame);
}
virtual ~BaseChannel() {
packets.clear();
frames.clear();
}
/**
* 释放 队列中 所有的 AVPacket *
* @param packet
*/
static void releaseAVPacket(AVPacket **p) {
if (p) {
av_packet_free(p);=
*p = 0;
}
}
/**
* 释放 队列中 所有的 AVFrame *
* @param packet
*/
static void releaseAVFrame(AVFrame **f) {
if (f) {
av_frame_free(f);=
*f = 0;
}
}
};
#endif //DERRYPLAYER_BASECHANNEL_H
七.AudioChannel.h
#ifndef DERRYPLAYER_AUDIOCHANNEL_H
#define DERRYPLAYER_AUDIOCHANNEL_H
#include "BaseChannel.h"
#include "JNICallbakcHelper.h"
#include <SLES/OpenSLES.h>
#include <SLES/OpenSLES_Android.h>
extern "C" {
#include <libswresample/swresample.h> // 对音频数据进行转换(重采样)
}
class AudioChannel : public BaseChannel {
private:
pthread_t pid_audio_decode;
pthread_t pid_audio_play;
public:
int out_channels;
int out_sample_size;
int out_sample_rate;
int out_buffers_size;
uint8_t *out_buffers = 0;
SwrContext *swr_ctx = 0;
public:
//引擎
SLObjectItf engineObject = 0;
// 引擎接口
SLEngineItf engineInterface = 0;
// 混音器
SLObjectItf outputMixObject = 0;
// 播放器
SLObjectItf bqPlayerObject = 0;
// 播放器接口
SLPlayItf bqPlayerPlay = 0;
// 播放器队列接口
SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue = 0;
double audio_time; // TODO 音视频同步 1.1
public:
AudioChannel(int stream_index, AVCodecContext *codecContext, AVRational rational);
~AudioChannel();
void stop();
void start();
void audio_decode();
void audio_play();
int getPCM();
};
#endif //DERRYPLAYER_AUDIOCHANNEL_H
八.AudioChannel.cpp
#ifndef DERRYPLAYER_VIDEOCHANNEL_H
#define DERRYPLAYER_VIDEOCHANNEL_H
#include "BaseChannel.h"
#include "AudioChannel.h"
extern "C" {
#include <libswscale/swscale.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
};
typedef void(*RenderCallback) (uint8_t *, int, int, int);
class VideoChannel : public BaseChannel {
private:
pthread_t pid_video_decode;
pthread_t pid_video_play;
RenderCallback renderCallback;
int fps;
AudioChannel *audio_channel = 0;
public:
VideoChannel(int stream_index, AVCodecContext *codecContext, AVRational rational, int i);
~VideoChannel();
void start();
void stop();
void video_decode();
void video_play();
void setRenderCallback(RenderCallback renderCallback);
void setAudioChannel(AudioChannel *audio_channel);
};
#endif //DERRYPLAYER_VIDEOCHANNEL_H
九.VideoChannel.h
#ifndef DERRYPLAYER_VIDEOCHANNEL_H
#define DERRYPLAYER_VIDEOCHANNEL_H
#include "BaseChannel.h"
#include "AudioChannel.h"
extern "C" {
#include <libswscale/swscale.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
};
typedef void(*RenderCallback) (uint8_t *, int, int, int);
class VideoChannel : public BaseChannel {
private:
pthread_t pid_video_decode;
pthread_t pid_video_play;
RenderCallback renderCallback;
int fps;
AudioChannel *audio_channel = 0;
public:
VideoChannel(int stream_index, AVCodecContext *codecContext, AVRational rational, int i);
~VideoChannel();
void start();
void stop();
void video_decode();
void video_play();
void setRenderCallback(RenderCallback renderCallback);
void setAudioChannel(AudioChannel *audio_channel);
};
#endif //DERRYPLAYER_VIDEOCHANNEL_H
十.VideoChannel.cpp
#include "VideoChannel.h"
/**
* 丢包 AVFrame * 原始包 很简单,因为不需要考虑 关键帧
* @param q
*/
void dropAVFrame(queue<AVFrame *> &q) {
if (!q.empty()) {
AVFrame *frame = q.front();
BaseChannel::releaseAVFrame(&frame);
q.pop();
}
}
/**
* 丢包 AVPacket * 压缩包 考虑关键帧
* @param q
*/
void dropAVPacket(queue<AVPacket *> &q) {
while (!q.empty()) {
AVPacket *pkt = q.front();
if (pkt->flags != AV_PKT_FLAG_KEY) { // 非关键帧,可以丢弃
BaseChannel::releaseAVPacket(&pkt);
q.pop();
} else {
break; // 如果是关键帧,不能丢,那就结束
}
}
}
VideoChannel::VideoChannel(int stream_index, AVCodecContext *codecContext, AVRational time_base,
int fps)
: BaseChannel(stream_index, codecContext, time_base),
fps(fps)
{
frames.setSyncCallback(dropAVFrame);
packets.setSyncCallback(dropAVPacket);
}
VideoChannel::~VideoChannel() {
DELETE(audio_channel);
}
void VideoChannel::stop() {
pthread_join(pid_video_decode, nullptr);
pthread_join(pid_video_play, nullptr);
isPlaying = false;
packets.setWork(0);
frames.setWork(0);
packets.clear();
frames.clear();
}
void *task_video_decode(void *args) {
auto *video_channel = static_cast<VideoChannel *>(args);
video_channel->video_decode();
return nullptr;
}
void *task_video_play(void *args) {
auto *video_channel = static_cast<VideoChannel *>(args);
video_channel->video_play();
return nullptr;
}
// 视频:1.解码 2.播放
void VideoChannel::start() {
isPlaying = true;
// 队列开始工作了
packets.setWork(1);
frames.setWork(1);
// 第一个线程: 视频:取出队列的压缩包 进行解码 解码后的原始包 再push队列中去(视频:YUV)
pthread_create(&pid_video_decode, nullptr, task_video_decode, this);
// 第二线线程:视频:从队列取出原始包,播放
pthread_create(&pid_video_play, nullptr, task_video_play, this);
}
void VideoChannel::video_decode() {
AVPacket *pkt = nullptr;
while (isPlaying) {
if (isPlaying && frames.size() > 100) {
av_usleep(10 * 1000);
continue;
}
int ret = packets.getQueueAndDel(pkt);
if (!isPlaying) {
break;
}
if (!ret) {
continue;
}
ret = avcodec_send_packet(codecContext, pkt);
if (ret) {
break;
}
AVFrame *frame = av_frame_alloc(); // AVFrame: 解码后的视频原始数据包
ret = avcodec_receive_frame(codecContext, frame);
if (ret == AVERROR(EAGAIN)) {
// B帧 B帧参考前面成功 B帧参考后面失败 可能是P帧没有出来,再拿一次就行了
continue;
} else if (ret != 0) {
if (frame) {
releaseAVFrame(&frame);
}
break;
}
frames.insertToQueue(frame);
if(pkt) {
av_packet_unref(pkt);
releaseAVPacket(&pkt);
pkt = nullptr;
}
}
if(pkt) {
av_packet_unref(pkt);
releaseAVPacket(&pkt);
pkt = nullptr;
}
}
void VideoChannel::video_play() {
LOGE("video_play start" )
AVFrame *frame = 0;
uint8_t *dst_data[4]; // RGBA
int dst_linesize[4]; // RGBA
av_image_alloc(dst_data, dst_linesize,
codecContext->width, codecContext->height, AV_PIX_FMT_RGBA, 1);
// yuv -> rgba
SwsContext *sws_ctx = sws_getContext(
// 下面是输入环节
codecContext->width,
codecContext->height,
codecContext->pix_fmt, // 自动获取 xxx.mp4 的像素格式 AV_PIX_FMT_YUV420P // 写死的
// 下面是输出环节
codecContext->width,
codecContext->height,
AV_PIX_FMT_RGBA,
SWS_BILINEAR, NULL, NULL, NULL);
while (isPlaying) {
if (frames.size() <= 1) {
av_usleep(1000 * 1000);
continue;
}
int ret = frames.getQueueAndDel(frame);
if (!isPlaying) {
break;
}
if (!ret) { // ret == 0
continue;
}
// 格式转换 yuv ---> rgba
sws_scale(sws_ctx,
// 下面是输入环节 YUV的数据
frame->data, frame->linesize,
0, codecContext->height,
// 下面是输出环节 成果:RGBA数据
dst_data,
dst_linesize
);
// TODO 音视频同步
// 公式:extra_delay = repeat_pict / (2*fps)
// 经验值 extra_delay:0.0400000
double extra_delay = frame->repeat_pict / (2 * fps); // 在之前的编码时,加入的额外延时时间取出来(可能获取不到)
double fps_delay = 1.0 / fps; // 根据fps得到延时时间(fps25 == 每秒25帧,计算每一帧的延时时间,0.040000)
double real_delay = fps_delay + extra_delay; // 当前帧的延时时间 0.040000
double video_time = frame->best_effort_timestamp * av_q2d(time_base);
double audio_time = audio_channel->audio_time;
// 判断两个时间差值,一个快一个慢(快的等慢的,慢的快点追) == 你追我赶
double time_diff = video_time - audio_time;
if (time_diff > 0) {
// 视频时间 > 音频时间: 要等音频,所以控制视频播放慢一点(等音频) 【睡眠】
if (time_diff > 1)
{ // 说明:音频预视频插件很大,TODO 拖动条 特色场景 音频 和 视频 差值很大,我不能睡眠那么久,否则是大Bug
// av_usleep((real_delay + time_diff) * 1000000);
// 如果 音频 和 视频 差值很大,我不会睡很久,我就是稍微睡一下
av_usleep((real_delay * 2) * 1000000);
}
else
{ // 说明:0~1之间:音频与视频差距不大,所以可以那(当前帧实际延时时间 + 音视频差值)
av_usleep((real_delay + time_diff) * 1000000); // 单位是微妙:所以 * 1000000
}
} if (time_diff < 0) {
// // 视频时间 < 音频时间: 要追音频,所以控制视频播放快一点(追音频) 【丢包】
// // 丢帧:不能睡意丢,I帧是绝对不能丢
// // 丢包:在frames 和 packets 中的队列
//
// // 经验值 0.05
// // -0.234454 fabs == 0.234454
if (fabs(time_diff) <= 0.05) { // fabs对负数的操作(对浮点数取绝对值)
// 多线程(安全 同步丢包)
frames.sync();
continue; // 丢完取下一个包
}
} else {
// 百分百同步,这个基本上很难做的
LOGI("百分百同步了");
}
// 基础:数组被传递会退化成指针,默认就是去1元素
renderCallback(dst_data[0], codecContext->width, codecContext->height, dst_linesize[0]);
if (frame) {
releaseAVFrame(&frame);
av_frame_unref(frame);
frame = NULL;
}
}
if (frame) {
releaseAVFrame(&frame);
av_frame_unref(frame);
frame = NULL;
}
isPlaying =0;
av_free(&dst_data[0]);
sws_freeContext(sws_ctx);
}
void VideoChannel::setRenderCallback(RenderCallback renderCallback) {
this->renderCallback = renderCallback;
}
void VideoChannel::setAudioChannel(AudioChannel *audio_channel) {
this->audio_channel = audio_channel;
}
十一.代码地址:
链接:https://pan.baidu.com/s/14LmCIyk9LsWZyBoj5j7_Fw
提取码:6mz8
网友评论