一.CMakeLists.txt
cmake_minimum_required(VERSION 3.4.1)
add_subdirectory(librtmp)
file(GLOB cpp_source *.cpp)
include_directories(${CMAKE_SOURCE_DIR}/x264/${ANDROID_ABI}/include)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/x264/${ANDROID_ABI}/lib")
add_library(
native-lib
SHARED
${cpp_source}
)
target_link_libraries(
native-lib
log
rtmp
x264)
二.native-lib.cpp
#include <jni.h>
#include <string>
#include <android/log.h>
#include <pthread.h>
#include "VideoChannel.h"
#include "safe_queue.h"
extern "C"{
#include "librtmp/rtmp.h"
}
#define LOGE(...) __android_log_print(ANDROID_LOG_INFO,"liuyi",__VA_ARGS__)
VideoChannel *videoChannel = 0;
int isStart = 0;
pthread_t pid;
//推流标志位
int readyPushing = 0;
uint32_t start_time;
//阻塞式队列
SafeQueue<RTMPPacket *> packets;
JavaCallHelper *helper = 0;
//虚拟机的引用
JavaVM *javaVM = 0;
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
javaVM = vm;
LOGE("保存虚拟机的引用");
return JNI_VERSION_1_4;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1init(JNIEnv *env, jobject thiz) {
helper = new JavaCallHelper(javaVM, env, thiz);
videoChannel = new VideoChannel;
videoChannel->javaCallHelper = helper;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1pushVideo(JNIEnv *env, jobject thiz,
jbyteArray data_) {
if (!videoChannel || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
videoChannel->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
void releasePackets(RTMPPacket *&packet) {
if (packet) {
RTMPPacket_Free(packet);
delete packet;
packet = 0;
}
}
void *start(void *args) {
char *url = static_cast<char *>(args);
RTMP *rtmp = 0;
do {
rtmp = RTMP_Alloc();
if (!rtmp) {
LOGE("rtmp创建失败");
break;
}
RTMP_Init(rtmp);
//设置超时时间 5s
rtmp->Link.timeout = 5;
int ret = RTMP_SetupURL(rtmp, url);
if (!ret) {
LOGE("rtmp设置地址失败:%s", url);
break;
}
//开启输出模式
RTMP_EnableWrite(rtmp);
ret = RTMP_Connect(rtmp, 0);
if (!ret) {
LOGE("rtmp连接地址失败:%s", url);
break;
}
ret = RTMP_ConnectStream(rtmp, 0);
LOGE("rtmp连接成功----------->:%s", url);
if (!ret) {
LOGE("rtmp连接流失败:%s", url);
break;
}
//准备好了 可以开始推流了
readyPushing = 1;
//记录一个开始推流的时间
start_time = RTMP_GetTime();
packets.setWork(1);
RTMPPacket *packet = 0;
while (isStart) {
packets.pop(packet);
if (!isStart) {
break;
}
if (!packet) {
continue;
}
// 给rtmp的流id
packet->m_nInfoField2 = rtmp->m_stream_id;
ret = RTMP_SendPacket(rtmp, packet, 1);
releasePackets(packet);
if (!ret) {
LOGE("发送数据失败");
break;
}
}
releasePackets(packet);
}while (0);
if (rtmp) {
RTMP_Close(rtmp);
RTMP_Free(rtmp);
}
delete url;
return 0;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1start(JNIEnv *env, jobject thiz, jstring path_) {
if (isStart) {
return;
}
const char *path = env->GetStringUTFChars(path_, 0);
char *url = new char[strlen(path) + 1];
strcpy(url, path);
isStart = 1;
pthread_create(&pid, 0, start, url);
env->ReleaseStringUTFChars(path_, path);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1setVideoEncInfo(JNIEnv *env, jobject thiz,
jint width, jint height,
jint fps, jint bitrate) {
if (videoChannel) {
videoChannel->setVideoEncInfo(width, height, fps, bitrate);
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1stop(JNIEnv *env, jobject thiz) {
// TODO: implement native_stop()
}extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1release(JNIEnv *env, jobject thiz) {
// TODO: implement native_release()
}
三.VideoChannel.cpp
//
// Created by Luis on 2022/4/25.
//
#include "VideoChannel.h"
#include <cstring>
void VideoChannel::setVideoEncInfo(int width, int height, int fps, int bitrate) {
mWidth = width;
mHeight = height;
mFps = fps;
mBitrate = bitrate;
ySize = width * height;
uvSize = ySize / 4;
if (videoCodec) {
x264_encoder_close(videoCodec);
videoCodec = 0;
}
x264_param_t param;
x264_param_default_preset(¶m, "ultrafast", "zerolatency"); //编码器 速度
param.i_level_idc = 32;//编码等级
param.i_csp = X264_CSP_I420;// 选取显示格式
param.i_width = width;
param.i_height = height;
param.i_bframe = 0;//B帧
param.rc.i_rc_method = X264_RC_ABR; //cpu ABR 平均
param.rc.i_bitrate = bitrate / 1024;
param.i_fps_num = fps;
param.i_fps_den = 1; // 帧率 时间
param.i_timebase_den = param.i_fps_num;// 分母
param.i_timebase_num = param.i_fps_den;// 分子
param.b_vfr_input = 0;//用fps而不是时间戳来计算帧间距离
param.i_keyint_max = fps * 2;//I帧间隔
param.b_repeat_headers = 1;// 是否复制sps和pps放在每个关键帧的前面 该参数设置是让每个关键帧(I帧)都附带sps/pps。
param.i_threads = 1;//多线程
x264_param_apply_profile(¶m, "baseline");
//打开编码器
videoCodec = x264_encoder_open(¶m);
pic_in = new x264_picture_t;
x264_picture_alloc(pic_in, X264_CSP_I420, width, height);
}
void VideoChannel::encodeData(int8_t *data) {
memcpy(pic_in->img.plane[0], data, ySize);//y数据
for (int i = 0; i < uvSize; ++i) {
*(pic_in->img.plane[1] + i) = *(data + ySize + i * 2 + 1);//u数据
*(pic_in->img.plane[2] + i) = *(data + ySize + i * 2);//v数据
}
int pi_nal;
x264_nal_t *pp_nals;
x264_picture_t pic_out;//编码出的数据
x264_encoder_encode(videoCodec, &pp_nals, &pi_nal, pic_in, &pic_out);
if (pi_nal > 0) {
for (int i = 0; i < pi_nal; ++i) {
javaCallHelper->postH264(reinterpret_cast<char *>(pp_nals[i].p_payload), pp_nals[i].i_payload);
}
}
return;
}
VideoChannel::VideoChannel() {
}
VideoChannel::~VideoChannel() {
}
四.代码地址
https://gitee.com/luisliuyi/android-x264.git
网友评论