2019-08-16 13-08-35 的屏幕截图.png
#include <iostream>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <math.h>
#include "stb_image.h"
#include "stb_image_write.h"
#include <linux/soundcard.h>
#include<unistd.h>
#include<fcntl.h>
#include<sys/types.h>
#include<sys/stat.h>
#include<stdlib.h>
#include<stdio.h>
#include<linux/soundcard.h>
#include <sys/ioctl.h>
extern "C"{
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
}
static void progressInput(GLFWwindow *window){
if(NULL != window){
if(GLFW_PRESS == glfwGetMouseButton(window,GLFW_MOUSE_BUTTON_LEFT)){
glfwTerminate();
}
}
}
static void Callback(GLFWwindow*,int x,int y){
glViewport(0, 0, x, y);
}
static void muxer(){
AVFormatContext *videoFormatContext = NULL;
avformat_open_input(&videoFormatContext,"/home/gangzhou/zaojiaoji.mp4",NULL,NULL);
if(NULL == videoFormatContext){
printf("avformat_open_input video error\n");
exit(0);
}
if(avformat_find_stream_info(videoFormatContext,NULL) < 0){
printf("avformat_find_stream_info video error\n");
exit(0);
}
AVFormatContext *audioFormatContext = NULL;
avformat_open_input(&audioFormatContext,"/home/gangzhou/mv.mp4",NULL,NULL);
if(NULL == audioFormatContext){
printf("avformat_open_input audio error\n");
exit(0);
}
if(avformat_find_stream_info(audioFormatContext,NULL) < 0){
printf("avformat_find_stream_info video error\n");
exit(0);
}
AVFormatContext *outAVFormatContext = NULL;
avformat_alloc_output_context2(&outAVFormatContext,NULL,NULL,"home/gangzhou/out.mp4");
if(NULL == outAVFormatContext){
printf("avformat_alloc_output_context2 video error\n");
exit(0);
}
int videoStreamIndex;
int outVideoStreamIndex;
int audioStreamIndex1;
int outAudioStreamIndex1;
for (int i = 0;i < videoFormatContext->nb_streams;++i){
AVStream *avStream = videoFormatContext->streams[i];
if(AVMEDIA_TYPE_VIDEO == avStream->codecpar->codec_type){
AVStream *outStream = avformat_new_stream(outAVFormatContext
,avStream->codec->codec);
videoStreamIndex = i;
if(NULL == outStream){
printf("avformat_new_stream video error\n");
exit(0);
}
outVideoStreamIndex = outStream->index;
if(avcodec_parameters_copy(outStream->codecpar,avStream->codecpar) < 0){
printf("avcodec_parameters_copy video error\n");
exit(0);
}
outStream->time_base.num = avStream->time_base.num;
outStream->time_base.den = avStream->time_base.den;
outStream->duration = avStream->duration;
outStream->avg_frame_rate.num = avStream->avg_frame_rate.num;
outStream->avg_frame_rate.den = avStream->avg_frame_rate.den;
}else if(AVMEDIA_TYPE_AUDIO == avStream->codecpar->codec_type){
AVStream *outStream = avformat_new_stream(outAVFormatContext
,avcodec_find_encoder(avStream->codecpar->codec_id));
audioStreamIndex1 = i;
if(NULL == outStream){
printf("avformat_new_stream audio error\n");
exit(0);
}
outAudioStreamIndex1 = outStream->index;
if(avcodec_parameters_copy(outStream->codecpar,avStream->codecpar) < 0){
printf("avcodec_parameters_copy audio error\n");
exit(0);
}
break;
}
}
int audioStreamIndex;
int outAudioStreamIndex;
for (int i = 0;i < audioFormatContext->nb_streams;++i){
AVStream *avStream = audioFormatContext->streams[i];
if(AVMEDIA_TYPE_AUDIO == avStream->codecpar->codec_type){
AVStream *outStream = avformat_new_stream(outAVFormatContext
,avcodec_find_encoder(avStream->codecpar->codec_id));
audioStreamIndex = i;
if(NULL == outStream){
printf("avformat_new_stream audio error\n");
exit(0);
}
outAudioStreamIndex = outStream->index;
if(avcodec_parameters_copy(outStream->codecpar,avStream->codecpar) < 0){
printf("avcodec_parameters_copy audio error\n");
exit(0);
}
break;
}
}
if(!(outAVFormatContext->flags & AVFMT_NOFILE)){
if(avio_open(&outAVFormatContext->pb,"/home/gangzhou/out.mp4",AVIO_FLAG_WRITE) < 0){
printf("avio_open error\n");
exit(0);
}
}
if(avformat_write_header(outAVFormatContext,NULL) < 0){
printf("avformat_write_header error\n");
exit(0);
}
AVPacket *avPacket = av_packet_alloc();
while (1){
if(!av_read_frame(videoFormatContext,avPacket)){
if(avPacket->stream_index == videoStreamIndex){
avPacket->stream_index = outVideoStreamIndex;
avPacket->pos = -1;
av_interleaved_write_frame(outAVFormatContext,avPacket);
av_packet_unref(avPacket);
}
if(avPacket->stream_index == audioStreamIndex1) {
avPacket->stream_index = outAudioStreamIndex1;
av_interleaved_write_frame(outAVFormatContext, avPacket);
av_packet_unref(avPacket);
}
} else{
break;
}
if(!av_read_frame(audioFormatContext,avPacket)){
if(avPacket->stream_index == audioStreamIndex) {
avPacket->stream_index = outAudioStreamIndex;
av_interleaved_write_frame(outAVFormatContext, avPacket);
av_packet_unref(avPacket);
}
} else{
break;
}
}
av_write_trailer(outAVFormatContext);
av_packet_free(&avPacket);
avformat_free_context(videoFormatContext);
avformat_free_context(audioFormatContext);
avformat_free_context(outAVFormatContext);
}
int main() {
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
GLFWwindow* window = glfwCreateWindow(800, 600, "LearnOpenGL", NULL, NULL);
if (window == NULL)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
if (!gladLoadGLLoader((GLADloadproc)glfwGetProcAddress))
{
std::cout << "Failed to initialize GLAD" << std::endl;
return -1;
}
glfwSetFramebufferSizeCallback(window,Callback);
glViewport(0, 0, 800, 600);
glClearColor(1.0f, 0.3f, 0.3f, 1.0f);
float vertices[] = {-1.0f,-1.0f,0.0f,0.0f,1.0f,
1.0f,-1.0f,0.0f,1.0f,1.0f,
1.0f,1.0f,0.2f,1.0f,0.0f,
-1.0f,1.0f,0.2f,0.0f,0.0f};
unsigned int vertexShader = glCreateShader(GL_VERTEX_SHADER);
char *vertexShaderSource = "#version 330 core \n layout(location = 0) in vec3 aPos; layout(location = 1) in vec2 tPos;out vec2 TexCoord;out vec4 color;void main(){gl_Position = vec4(aPos,1.0); color = vec4(1.0,aPos.y,aPos.z,1.0);TexCoord = tPos;} ";
glShaderSource(vertexShader,1,&vertexShaderSource,NULL);
glCompileShader(vertexShader);
int status;
char info[512] = {0};
glGetShaderiv(vertexShader,GL_COMPILE_STATUS,&status);
if(!status){
glGetShaderInfoLog(vertexShader,512,NULL,info);
printf("%s\n",info);
glfwTerminate();
}
unsigned int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
char *fragmentShaderSource = "#version 330 core \n out vec4 FragColor;uniform vec4 uniColor;uniform sampler2D texture1;uniform sampler2D texture2;in vec4 color; in vec2 TexCoord; void main() {FragColor = mix(texture(texture1,TexCoord),texture(texture2,TexCoord),0.2) * uniColor;}";
glShaderSource(fragmentShader,1,&fragmentShaderSource,NULL);
glCompileShader(fragmentShader);
glGetShaderiv(fragmentShader,GL_COMPILE_STATUS,&status);
if(!status){
glGetShaderInfoLog(fragmentShader,512,NULL,info);
printf("%s\n",info);
glfwTerminate();
}
unsigned int shaderProgram = glCreateProgram();
glAttachShader(shaderProgram,vertexShader);
glAttachShader(shaderProgram,fragmentShader);
glLinkProgram(shaderProgram);
glDeleteShader(vertexShader);
glDeleteShader(fragmentShader);
glGetProgramiv(fragmentShader,GL_COMPILE_STATUS,&status);
if(!status){
glGetProgramInfoLog(fragmentShader,512,NULL,info);
printf("%s\n",info);
glfwTerminate();
}
int uniColorLocation = glGetUniformLocation(shaderProgram,"uniColor");
unsigned int indices[] = {0,1,2,2,3,0};
unsigned int VAO;
glGenVertexArrays(1,&VAO);
glBindVertexArray(VAO);
unsigned int VBO;
glGenBuffers(1,&VBO);
glBindBuffer(GL_ARRAY_BUFFER,VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices),vertices,GL_STATIC_DRAW);
unsigned int EBO;
glGenBuffers(1,&EBO);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER,EBO);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices),indices,GL_STATIC_DRAW);
glVertexAttribPointer(0,3,GL_FLOAT,GL_FALSE, 5 * sizeof(float),(void*)0);
glEnableVertexAttribArray(0);
glVertexAttribPointer(1,2,GL_FLOAT,GL_FALSE, 5 * sizeof(float),(void*)(3 * sizeof(float)));
glEnableVertexAttribArray(1);
int imageW;
int imageH;
int nrChannels;
// stbi_set_flip_vertically_on_load(true);
stbi_uc *uc = stbi_load("/home/gangzhou/CLionProjects/untitled1/1.jpg"
,&imageW,&imageH,&nrChannels,0);
unsigned int texture;
glActiveTexture(GL_TEXTURE0);
glGenTextures(1,&texture);
glBindTexture(GL_TEXTURE_2D,texture);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGB,imageW,imageH,0,GL_RGB,GL_UNSIGNED_BYTE,uc);
glGenerateMipmap(GL_TEXTURE_2D);
stbi_image_free(uc);
uc = stbi_load("/home/gangzhou/CLionProjects/untitled1/2.jpg"
,&imageW,&imageH,&nrChannels,0);
unsigned int texture1;
glActiveTexture(GL_TEXTURE1);
glGenTextures(1,&texture1);
glBindTexture(GL_TEXTURE_2D,texture1);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGB,imageW,imageH,0,GL_RGB,GL_UNSIGNED_BYTE,uc);
glGenerateMipmap(GL_TEXTURE_2D);
stbi_image_free(uc);
avcodec_register_all();
avformat_network_init();
muxer();
int videoStreamIndex = 0;
{
AVFormatContext *avFormatContext = avformat_alloc_context();
if(avformat_open_input(&avFormatContext,
"/home/gangzhou/桌面/zaojiaoji.mp4",NULL,NULL)){
printf("avformat_open_input error \n");
glfwTerminate();
}
if(avformat_find_stream_info(avFormatContext,NULL) < 0){
printf("avformat_find_stream_info error \n");
glfwTerminate();
}
for (int i = 0; i < avFormatContext->nb_streams; ++i) {
if(AVMEDIA_TYPE_VIDEO == avFormatContext->streams[i]->codecpar->codec_type){
videoStreamIndex = i;
printf("find video stream %d\n"
,videoStreamIndex);
break;
}
}
AVCodec *videoCodec = avcodec_find_decoder(avFormatContext->
streams[videoStreamIndex]->codecpar->codec_id);
if(NULL == videoCodec){
printf("avcodec_find_decoder error\n");
glfwTerminate();
}
AVCodecContext *videoDecodeContext = avcodec_alloc_context3(videoCodec);
if(NULL == videoDecodeContext){
printf("avcodec_alloc_context3 error\n");
glfwTerminate();
}
if(avcodec_parameters_to_context(videoDecodeContext
,avFormatContext->streams[videoStreamIndex]->codecpar) < 0){
printf("avcodec_parameters_to_context error\n");
glfwTerminate();
}
if(avcodec_open2(videoDecodeContext,videoCodec,NULL)){
printf("avcodec_open2 error\n");
glfwTerminate();
}
AVPacket* avPacket = av_packet_alloc();
if(NULL == avPacket){
printf("av_packet_alloc error\n");
glfwTerminate();
}
AVFrame* avFrame = av_frame_alloc();
if(NULL == avFrame){
printf("av_frame_alloc error\n");
glfwTerminate();
}
struct SwsContext *swsContext = sws_getContext(videoDecodeContext->width
,videoDecodeContext->height,videoDecodeContext->pix_fmt,videoDecodeContext->width
,videoDecodeContext->height,AV_PIX_FMT_RGB24,SWS_BICUBIC,NULL,NULL,NULL);
uint8_t *dst[4];
int dstStride[4];
if(av_image_alloc(dst,dstStride,videoDecodeContext->width,videoDecodeContext->height
,AV_PIX_FMT_RGB24,1) < 0){
printf("av_image_alloc error\n");
glfwTerminate();
}
glViewport(0, 0, videoDecodeContext->width,videoDecodeContext->height);
glfwSetWindowSize(window,videoDecodeContext->width,videoDecodeContext->height);
FILE *wFile = fopen("/home/gangzhou/CLionProjects/untitled1/video","wb");
if(-1 == status){
printf("SOUND_PCM_WRITE_BITS error\n");
exit(0);
}
while (!av_read_frame(avFormatContext,avPacket)){
if(avPacket->stream_index == videoStreamIndex){
int ret = avcodec_send_packet(videoDecodeContext,avPacket);
if(ret){
printf("avcodec_send_packet error\n");
glfwTerminate();
}
while (ret >= 0){
ret = avcodec_receive_frame(videoDecodeContext,avFrame);
if(!ret){
sws_scale(swsContext,(const uint8_t * const *)avFrame->data,avFrame->linesize,0
,avFrame->height,dst,dstStride);
usleep(30000);
glDeleteTextures(1,&texture1);
glActiveTexture(GL_TEXTURE1);
glGenTextures(1,&texture1);
glBindTexture(GL_TEXTURE_2D,texture1);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D,0,GL_RGB,avFrame->width
,avFrame->height,0,GL_RGB,GL_UNSIGNED_BYTE,dst[0]);
glGenerateMipmap(GL_TEXTURE_2D);
glClearColor(1.0f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
float value = sin(glfwGetTime()) /2 +0.5f;
glUniform1i(glGetUniformLocation(shaderProgram,"texture1"),1);
glUniform4f(uniColorLocation,1.0f,value,1.0f,0.5f);
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES,6,GL_UNSIGNED_INT,0);
glBindVertexArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
av_frame_unref(avFrame);
}
}
}
av_packet_unref(avPacket);
}
}
while(!glfwWindowShouldClose(window))
{
progressInput(window);
glClearColor(1.0f, 0.3f, 0.3f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glUseProgram(shaderProgram);
float value = sin(glfwGetTime()) /2 +0.5f;
glUniform1i(glGetUniformLocation(shaderProgram,"texture1"),1);
glUniform4f(uniColorLocation,1.0f,value,1.0f,0.5f);
glBindVertexArray(VAO);
glDrawElements(GL_TRIANGLES,6,GL_UNSIGNED_INT,0);
glBindVertexArray(0);
glfwSwapBuffers(window);
glfwPollEvents();
}
glfwTerminate();
return 0;
}
网友评论