一.VideoChanel.java
public class VideoChanel implements Preview.OnPreviewOutputUpdateListener, ImageAnalysis.Analyzer {
private TextureView textureView;
private LivePusher livePusher;
int width = 480;
int height = 640;
private HandlerThread handlerThread;
private CameraX.LensFacing currentFacing = CameraX.LensFacing.BACK;
private boolean isLiving;
private byte[] y;
private byte[] u;
private byte[] v;
private byte[] nv21;
byte[] nv21_rotated;
public VideoChanel(LifecycleOwner lifecycleOwner, TextureView textureView, LivePusher livePusher) {
this.textureView = textureView;
this.livePusher = livePusher;
handlerThread = new HandlerThread("Analyze-thread");
handlerThread.start();
CameraX.bindToLifecycle(lifecycleOwner, getPreView(), getAnalysis());
}
private Preview getPreView() {
PreviewConfig previewConfig = new PreviewConfig.Builder()
.setTargetResolution(new Size(width, height))
.setLensFacing(currentFacing).build();
Preview preview = new Preview(previewConfig);
preview.setOnPreviewOutputUpdateListener(this);
return preview;
}
private ImageAnalysis getAnalysis() {
ImageAnalysisConfig imageAnalysisConfig = new ImageAnalysisConfig.Builder()
.setCallbackHandler(new Handler(handlerThread.getLooper()))
.setLensFacing(currentFacing)
.setImageReaderMode(ImageAnalysis.ImageReaderMode.ACQUIRE_LATEST_IMAGE)
.setTargetResolution(new Size(width, height))
.build();
ImageAnalysis imageAnalysis = new ImageAnalysis(imageAnalysisConfig);
imageAnalysis.setAnalyzer(this);
return imageAnalysis;
}
@Override
public void onUpdated(Preview.PreviewOutput output) {
SurfaceTexture surfaceTexture = output.getSurfaceTexture();
if (textureView.getSurfaceTexture() != surfaceTexture) {
if (textureView.isAvailable()) {
// 当切换摄像头时,会报错
ViewGroup parent = (ViewGroup) textureView.getParent();
parent.removeView(textureView);
parent.addView(textureView, 0);
parent.requestLayout();
}
textureView.setSurfaceTexture(surfaceTexture);
}
}
public void startLive() {
isLiving = true;
}
public void stopLive() {
isLiving = false;
}
private ReentrantLock lock = new ReentrantLock();
@Override
public void analyze(ImageProxy image, int rotationDegrees) {
if (!isLiving) {
return;
}
lock.lock();
ImageProxy.PlaneProxy[] planes = image.getPlanes();
if (y == null) {
y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
livePusher.native_setVideoEncInfo(image.getHeight(), image.getWidth(), 10, 640_000);
}
if (image.getPlanes()[0].getBuffer().remaining() == y.length) {
planes[0].getBuffer().get(y);
planes[1].getBuffer().get(u);
planes[2].getBuffer().get(v);
int stride = planes[0].getRowStride();
Size size = new Size(image.getWidth(), image.getHeight());
int width = size.getHeight();
int heigth = image.getWidth();
if (nv21 == null) {
nv21 = new byte[heigth * width * 3 / 2];
nv21_rotated = new byte[heigth * width * 3 / 2];
}
ImageUtil.yuvToNv21(y, u, v, nv21, heigth, width);
ImageUtil.nv21_rotate_to_90(nv21, nv21_rotated, heigth, width);
this.livePusher.native_pushVideo(nv21_rotated);
}
lock.unlock();
}
public void switchCamera() {
}
}
二.AudioChannel.java
public class AudioChannel {
private LivePusher livePusher;
private int sampleRate;
private int channelConfig;
private int minBufferSize;
private byte[] buffer;
private Handler handler;
private HandlerThread handlerThread;
private AudioRecord audioRecord;
public AudioChannel(int sampleRate, int channels, LivePusher livePusher) {
this.livePusher = livePusher;
this.sampleRate = sampleRate;
this.channelConfig = channels == 2 ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO;
this.minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,AudioFormat.ENCODING_PCM_16BIT);
int inputByteNum = livePusher.initAudioEnc(sampleRate, channels);
this.buffer = new byte[inputByteNum];
this.minBufferSize = inputByteNum > minBufferSize ? inputByteNum : minBufferSize;
handlerThread = new HandlerThread("Audio-Record");
handlerThread.start();
handler = new Handler(handlerThread.getLooper());
}
public void start() {
handler.post(new Runnable() {
@Override
public void run() {
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize);
audioRecord.startRecording();
while (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
// len实际长度len 打印下这个值 录音不成功
int len = audioRecord.read(buffer, 0, buffer.length);
if (len > 0) {
livePusher.sendAudio(buffer, len/2);
}
}
}
});
}
}
三.LivePusher.java
public class LivePusher {
static {
System.loadLibrary("native-lib");
}
public LivePusher() {
native_init();
}
public void startLive(String path) {
native_start(path);
}
public void sendAudio(byte[] buffer, int len) {
nativeSendAudio(buffer, len);
}
public native void native_init();
public native void native_start(String path);
public native void native_setVideoEncInfo(int width, int height, int fps, int bitrate);
public native int initAudioEnc(int sampleRate, int channels);
public native void native_pushVideo(byte[] data);
private native void nativeSendAudio(byte[] buffer, int len);
public native void native_stop();
public native void native_release();
}
四.native-lib.cpp
#include <jni.h>
#include <string>
#include <android/log.h>
#include <malloc.h>
extern "C"{
#include "librtmp/rtmp.h"
}
#include "safe_queue.h"
#include "VideoChannel.h"
#include "AudioChannel.h"
#include "maniulog.h"
VideoChannel *videoChannel = 0;
AudioChannel *audioChannel = 0;
int isStart = 0;
//记录子线程的对象
pthread_t pid;
//推流标志位
int readyPushing = 0;
//阻塞式队列
SafeQueue<RTMPPacket *> packets;
uint32_t start_time;
RTMP *rtmp = 0;
//虚拟机的引用
JavaVM *javaVM = 0;
void callBack(RTMPPacket *packet) {
if (packet) {
if (packets.size() > 50) {
packets.clear();
}
if (packet->m_packetType == RTMP_PACKET_TYPE_AUDIO ) {
LOGE("发送音频");
} else {
LOGE("发送视频");
}
packet->m_nTimeStamp = RTMP_GetTime() - start_time;
packets.push(packet);
}
}
//RTMPPacket释放
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
javaVM = vm;
LOGE("保存虚拟机的引用");
return JNI_VERSION_1_4;
}
void releasePackets(RTMPPacket *&packet) {
if (packet) {
RTMPPacket_Free(packet);
delete packet;
packet = 0;
}
}
void *start(void *args) {
char *url = static_cast<char *>(args);
do {
rtmp = RTMP_Alloc();
if (!rtmp) {
LOGE("rtmp创建失败");
break;
}
RTMP_Init(rtmp);
//设置超时时间 5s
rtmp->Link.timeout = 5;
int ret = RTMP_SetupURL(rtmp, url);
if (!ret) {
LOGE("rtmp设置地址失败:%s", url);
break;
}
//开启输出模式
RTMP_EnableWrite(rtmp);
ret = RTMP_Connect(rtmp, 0);
if (!ret) {
LOGE("rtmp连接地址失败:%s", url);
break;
}
ret = RTMP_ConnectStream(rtmp, 0);
LOGE("rtmp连接成功----------->:%s", url);
if (!ret) {
LOGE("rtmp连接流失败:%s", url);
break;
}
//准备好了 可以开始推流了
readyPushing = 1;
//记录一个开始推流的时间
start_time = RTMP_GetTime();
packets.setWork(1);
RTMPPacket *packet = 0;
RTMPPacket *audioHeader =audioChannel->getAudioConfig();
callBack(audioHeader);
//循环从队列取包 然后发送
while (isStart) {
packets.pop(packet);
if (!isStart) {
break;
}
if (!packet) {
continue;
}
// 给rtmp的流id
packet->m_nInfoField2 = rtmp->m_stream_id;
//发送包 1:加入队列发送
ret = RTMP_SendPacket(rtmp, packet, 1);
releasePackets(packet);
if (!ret) {
LOGE("发送数据失败");
break;
}
}
releasePackets(packet);
} while (0);
if (rtmp) {
RTMP_Close(rtmp);
RTMP_Free(rtmp);
}
delete url;
return 0;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1init(JNIEnv *env, jobject thiz) {
// 实例化编码层
videoChannel = new VideoChannel;
videoChannel->setVideoCallback(callBack);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1pushVideo(JNIEnv *env, jobject thiz,
jbyteArray data_) {
if (!videoChannel || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
videoChannel->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1start(JNIEnv *env, jobject thiz, jstring path_) {
if (isStart) {
return;
}
const char *path = env->GetStringUTFChars(path_, 0);
char *url = new char[strlen(path) + 1];
strcpy(url, path);
// 开始直播
isStart = 1;
//开子线程链接B站服务器
pthread_create(&pid, 0, start, url);
env->ReleaseStringUTFChars(path_, path);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1setVideoEncInfo(JNIEnv *env, jobject thiz,
jint width, jint height,
jint fps, jint bitrate) {
if (videoChannel) {
videoChannel->setVideoEncInfo(width, height, fps, bitrate);
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1stop(JNIEnv *env, jobject thiz) {
// TODO: implement native_stop()
}extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_native_1release(JNIEnv *env, jobject thiz) {
if(rtmp) {
RTMP_Close(rtmp);
RTMP_Free(rtmp);
rtmp = 0;
}
if (videoChannel) {
delete (videoChannel);
videoChannel = 0;
}
}
extern "C"
JNIEXPORT jint JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_initAudioEnc(JNIEnv *env, jobject thiz, jint sample_rate,
jint channels) {
audioChannel = new AudioChannel();
audioChannel->setCallback(callBack);
audioChannel->openCodec(sample_rate, channels);
return audioChannel->getInputByteNum();
}
extern "C"
JNIEXPORT void JNICALL
Java_com_luisliuyi_demo_camera1_LivePusher_nativeSendAudio(JNIEnv *env, jobject thiz,
jbyteArray buffer, jint len) {
jbyte *data=env->GetByteArrayElements(buffer, 0);
audioChannel->encode(reinterpret_cast<int32_t *>(data), len);
env->ReleaseByteArrayElements(buffer, data, 0);
}
五.VideoChannel.cpp
void VideoChannel::setVideoEncInfo(int width, int height, int fps, int bitrate) {
mWidth = width;
mHeight = height;
mFps = fps;
mBitrate = bitrate;
ySize = width * height;
uvSize = ySize / 4;
if (videoCodec) {
x264_encoder_close(videoCodec);
videoCodec = 0;
}
x264_param_t param;
x264_param_default_preset(¶m, "ultrafast", "zerolatency"); //编码器 速度
param.i_level_idc = 32;//编码等级
param.i_csp = X264_CSP_I420;// 选取显示格式
param.i_width = width;
param.i_height = height;
param.i_bframe = 0;//B帧
param.rc.i_rc_method = X264_RC_ABR; //cpu ABR 平均
param.rc.i_bitrate = bitrate / 1024;
param.i_fps_num = fps;
param.i_fps_den = 1; // 帧率 时间
param.i_timebase_den = param.i_fps_num;// 分母
param.i_timebase_num = param.i_fps_den;// 分子
param.b_vfr_input = 0;//用fps而不是时间戳来计算帧间距离
param.i_keyint_max = fps * 2;//I帧间隔
param.b_repeat_headers = 1;// 是否复制sps和pps放在每个关键帧的前面 该参数设置是让每个关键帧(I帧)都附带sps/pps。
param.i_threads = 1;//多线程
x264_param_apply_profile(¶m, "baseline");
//打开编码器
videoCodec = x264_encoder_open(¶m);
pic_in = new x264_picture_t;
x264_picture_alloc(pic_in, X264_CSP_I420, width, height);
}
void VideoChannel::encodeData(int8_t *data) {
memcpy(pic_in->img.plane[0], data, ySize);//y数据
for (int i = 0; i < uvSize; ++i) {
*(pic_in->img.plane[1] + i) = *(data + ySize + i * 2 + 1);//u数据
*(pic_in->img.plane[2] + i) = *(data + ySize + i * 2);//v数据
}
int pi_nal;
x264_nal_t *pp_nals;
x264_picture_t pic_out;//编码出的数据
x264_encoder_encode(videoCodec, &pp_nals, &pi_nal, pic_in, &pic_out);
uint8_t sps[100];
uint8_t pps[100];
int sps_len, pps_len;
if (pi_nal > 0) {
for (int i = 0; i < pi_nal; ++i) {
if (pp_nals[i].i_type == NAL_SPS) {
sps_len = pp_nals[i].i_payload - 4;
memcpy(sps, pp_nals[i].p_payload + 4, sps_len);
} else if (pp_nals[i].i_type == NAL_PPS) {
pps_len = pp_nals[i].i_payload - 4;
memcpy(pps, pp_nals[i].p_payload + 4, pps_len);
sendSpsPps(sps, pps, sps_len, pps_len);
} else{
//关键帧、非关键帧
sendFrame(pp_nals[i].i_type,pp_nals[i].i_payload,pp_nals[i].p_payload);
}
}
}
return;
}
void VideoChannel::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) {
RTMPPacket *packet = new RTMPPacket;
int bodysize = 13 + sps_len + 3 + pps_len;
RTMPPacket_Alloc(packet, bodysize);
int i = 0;
//固定头
packet->m_body[i++] = 0x17;
//类型
packet->m_body[i++] = 0x00;
//composition time 0x000000
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
packet->m_body[i++] = 0x00;
//版本
packet->m_body[i++] = 0x01;
//编码规格
packet->m_body[i++] = sps[1];
packet->m_body[i++] = sps[2];
packet->m_body[i++] = sps[3];
packet->m_body[i++] = 0xFF;
//整个sps
packet->m_body[i++] = 0xE1;
//sps长度
packet->m_body[i++] = (sps_len >> 8) & 0xff;
packet->m_body[i++] = sps_len & 0xff;
memcpy(&packet->m_body[i], sps, sps_len);
i += sps_len;
//pps
packet->m_body[i++] = 0x01;
packet->m_body[i++] = (pps_len >> 8) & 0xff;
packet->m_body[i++] = (pps_len) & 0xff;
memcpy(&packet->m_body[i], pps, pps_len);
//视频
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nBodySize = bodysize;
//随意分配一个管道(尽量避开rtmp.c中使用的)
packet->m_nChannel = 10;
//sps pps没有时间戳
packet->m_nTimeStamp = 0;
//不使用绝对时间
packet->m_hasAbsTimestamp = 0;
packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM;
if (this->callback) {
this->callback(packet);
}
}
void VideoChannel::sendFrame(int type, int payload, uint8_t *p_payload) {
//去掉 00 00 00 01 / 00 00 01
if (p_payload[2] == 0x00){
payload -= 4;
p_payload += 4;
} else if(p_payload[2] == 0x01){
payload -= 3;
p_payload += 3;
}
RTMPPacket *packet = new RTMPPacket;
int bodysize = 9 + payload;
RTMPPacket_Alloc(packet, bodysize);
RTMPPacket_Reset(packet);
packet->m_body[0] = 0x27;
//关键帧
if (type == NAL_SLICE_IDR) {
packet->m_body[0] = 0x17;
LOGE("关键帧");
}
//类型
packet->m_body[1] = 0x01;
//时间戳
packet->m_body[2] = 0x00;
packet->m_body[3] = 0x00;
packet->m_body[4] = 0x00;
//数据长度 int 4个字节 相当于把int转成4个字节的byte数组
packet->m_body[5] = (payload >> 24) & 0xff;
packet->m_body[6] = (payload >> 16) & 0xff;
packet->m_body[7] = (payload >> 8) & 0xff;
packet->m_body[8] = (payload) & 0xff;
//图片数据
memcpy(&packet->m_body[9],p_payload, payload);
packet->m_hasAbsTimestamp = 0;
packet->m_nBodySize = bodysize;
packet->m_packetType = RTMP_PACKET_TYPE_VIDEO;
packet->m_nChannel = 0x10;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
callback(packet);
}
VideoChannel::VideoChannel() {
}
VideoChannel::~VideoChannel() {
if (videoCodec) {
x264_encoder_close(videoCodec);
videoCodec = 0;
}
}
void VideoChannel::setVideoCallback(VideoChannel::VideoCallback callback) {
this->callback = callback;
}
六.AudioChannel.cpp
AudioChannel::AudioChannel() {
}
AudioChannel::~AudioChannel() {
}
void AudioChannel::openCodec(int sampleRate, int channels) {
unsigned long inputSamples;
codec= faacEncOpen(sampleRate, channels, &inputSamples, &maxOutputBytes);
inputByteNum = inputSamples * 2;
outputBuffer = static_cast<unsigned char *>(malloc(maxOutputBytes));
faacEncConfigurationPtr configurationPtr = faacEncGetCurrentConfiguration(codec);
configurationPtr->mpegVersion = MPEG4;
configurationPtr->aacObjectType = LOW;//编码等级
configurationPtr->outputFormat = 0;//输出aac裸流数据
configurationPtr->inputFormat = FAAC_INPUT_16BIT;//采样位数
faacEncSetConfiguration(codec, configurationPtr);
}
void AudioChannel::encode(int32_t *data, int len) {
//将pcm数据编码成aac数据
int bytelen=faacEncEncode(codec, data, len, outputBuffer, maxOutputBytes);
if (bytelen > 0) {
RTMPPacket *packet = new RTMPPacket;
RTMPPacket_Alloc(packet, bytelen + 2);
packet->m_body[0] = 0xAF;
packet->m_body[1] = 0x01;
memcpy(&packet->m_body[2], outputBuffer, bytelen);
packet->m_hasAbsTimestamp = 0;
packet->m_nBodySize = bytelen + 2;
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nChannel = 0x11;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
if (callback) {
// callback(packet);
} else {
LOGE("callback == null");
}
}
}
RTMPPacket *AudioChannel::getAudioConfig() {
u_char *buf;
u_long len;
faacEncGetDecoderSpecificInfo(codec, &buf, &len);//头帧的内容 {0x12 0x08}
RTMPPacket *packet = new RTMPPacket;
RTMPPacket_Alloc(packet, len + 2);
packet->m_body[0] = 0xAF;
packet->m_body[1] = 0x00;
memcpy(&packet->m_body[2], buf, len);
packet->m_hasAbsTimestamp = 0;
packet->m_nBodySize = len + 2;
packet->m_packetType = RTMP_PACKET_TYPE_AUDIO;
packet->m_nChannel = 0x11;
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
return packet;
}
七.代码地址
https://gitee.com/luisliuyi/android-camerax-x264-faac-rtmp.git
网友评论