add render

master
DESKTOP-4RNDQIC\29019 2021-10-17 00:28:03 +08:00
parent b6e6335b89
commit 09a6fdb6ec
9 changed files with 353 additions and 180 deletions

View File

@ -6,7 +6,34 @@
#include "CPlayWidget.h"
// 顶点着色器源码
// ????????????
#define UNIFORM_2D "uniform sampler2D"
#define VERTEX_SHADER_IN "attribute"
#define VERTEX_SHADER_OUT "varying"
#define FRAGMENT_SHADER_IN "varying"
#define FRAGMENT_SHADER_OUT
#define FRAGMENT_SHADER_COLOR "gl_FragColor"
#define FRAGMENT_SHADER_TEXTURE "texture2D"
static const char kI420FragmentShaderSource[] =
FRAGMENT_SHADER_IN " vec2 textureOut;\n"
UNIFORM_2D " tex_y;\n"
UNIFORM_2D " tex_u;\n"
UNIFORM_2D " tex_v;\n"
FRAGMENT_SHADER_OUT
"void main() {\n"
" float y, u, v, r, g, b;\n"
" y = " FRAGMENT_SHADER_TEXTURE "(tex_y, textureOut).r;\n"
" u = " FRAGMENT_SHADER_TEXTURE "(tex_u, textureOut).r;\n"
" v = " FRAGMENT_SHADER_TEXTURE "(tex_v, textureOut).r;\n"
" y = y * 1.1643828125;\n"
" r = y + 1.59602734375 * v - 0.87078515625;\n"
" g = y - 0.39176171875 * u - 0.81296875 * v + 0.52959375;\n"
" b = y + 2.017234375 * u - 1.081390625;\n"
" " FRAGMENT_SHADER_COLOR " = vec4(r, g, b, 1.0);\n"
" }\n";
const char *vsrcyuv = "attribute vec4 vertexIn; \
attribute vec2 textureIn; \
@ -18,8 +45,6 @@ void main(void) \
}";
// 片段着色器源码
const char *fsrcyuv = "varying vec2 textureOut; \
uniform sampler2D tex_y; \
uniform sampler2D tex_u; \
@ -37,8 +62,8 @@ void main(void) \
gl_FragColor = vec4(rgb, 1); \
}";
// rgb片段着色器源码
// 注意MEDIASUBTYPE_RGB32 是bgr的所以需要再进行一次转换
// rgb???????????
// ???MEDIASUBTYPE_RGB32 ??bgr?????????????????????
const char *fsrcrgb = "varying vec2 textureOut; \
@ -48,21 +73,22 @@ const char *fsrcrgb = "varying vec2 textureOut; \
gl_FragColor = texture(rgbdata, textureOut); \
}";
void CPlayWidget::OnUpdateFrame() {
this->PlayOneFrame();
}
void CPlayWidget::OnPaintData(const uint8_t *data, uint32_t len)
void CPlayWidget::OnPaintData(const rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer)
{
/*
if(nullptr == m_pBufYuv420p)
{
m_pBufYuv420p = new unsigned char[len];
qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n",
len, m_nVideoW, m_nVideoW);
memcpy(m_pBufYuv420p, data,len);
//刷新界面,触发paintGL接口
//??????,????paintGL???
update();
}
*/
m_buffer = buffer;
update();
}
CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) {
@ -80,61 +106,36 @@ CPlayWidget::CPlayWidget(QWidget *parent):QOpenGLWidget(parent) {
m_pTextureY = NULL;
m_pTextureU = NULL;
m_pTextureV = NULL;
m_pYuvFile = NULL;
m_nVideoH = 0;
m_nVideoW = 0;
mType = TYPE_YUV420P;
connect(&this->tm,SIGNAL(timeout()),this,SLOT(OnUpdateFrame()));
//tm.start(1000);
}
CPlayWidget::~CPlayWidget() {
}
void CPlayWidget::PlayOneFrame() {//函数功能读取一张yuv图像数据进行显示,每单击一次,就显示一张图片
if(NULL == m_pYuvFile)
{
//打开yuv视频文件 注意修改文件路径
// m_pYuvFile = fopen("F://OpenglYuvDemo//1920_1080.yuv", "rb");
m_pYuvFile = fopen("F://md_sample_sp420_1080p.yuv", "rb");
//根据yuv视频数据的分辨率设置宽高,demo当中是1080p这个地方要注意跟实际数据分辨率对应上
// m_nVideoW = 1920;
// m_nVideoH = 1080;
}
//申请内存存一帧yuv图像数据,其大小为分辨率的1.5倍
int nLen = m_nVideoW*m_nVideoH*3/2;
if(nullptr == m_pBufYuv420p)
{
m_pBufYuv420p = new unsigned char[nLen];
qDebug("CPlayWidget::PlayOneFrame new data memory. Len=%d width=%d height=%d\n",
nLen, m_nVideoW, m_nVideoW);
}
//将一帧yuv图像读到内存中
if(NULL == m_pYuvFile)
{
qFatal("read yuv file err.may be path is wrong!\n");
return;
}
fread(m_pBufYuv420p, 1, nLen, m_pYuvFile);
//刷新界面,触发paintGL接口
update();
return;
}
int CPlayWidget::SetDataType(CPlayWidget::IMG_TYPE type){
this->mType = type;
return 0;
}
int CPlayWidget::OnCameraData(uint8_t *dat, uint32_t size)
int CPlayWidget::OnCameraData( rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer)
{
memcpy(this->m_pBufRgb32,dat,size);
m_buffer = buffer;
memcpy(this->m_pBufYuv420p,buffer->GetI420()->DataY(),640*480);
memcpy(this->m_pBufYuv420p + 640*480 ,buffer->GetI420()->DataU(),640*480/4);
memcpy(this->m_pBufYuv420p+ 640*480 + 640*480/4,buffer->GetI420()->DataV(),640*480/4);
update();
return 0;
}
int CPlayWidget::OnCameraData(uint8_t *p)
{
memcpy(m_pBufYuv420p,p,640*480/2*3);
update();
return 0;
}
int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
@ -147,6 +148,9 @@ int CPlayWidget::SetImgSize(uint32_t width, uint32_t height)
if(mType == TYPE_YUV420P){
m_pBufYuv420p = new uint8_t[width * height *3/2];
}
if(mType == TYPE_I420){
m_pBufYuv420p = new uint8_t[width * height *3/2];
}
return 0;
}
@ -159,7 +163,7 @@ U = - 0.1687 R - 0.3313 G + 0.5 B + 128
V = 0.5 R - 0.4187 G - 0.0813 B + 128
RGB YUV (256) :
????????RGB ?????????YUV (256????) ????:
R = Y + 1.402 (Cr-128)
@ -172,20 +176,20 @@ void CPlayWidget::initializeGL()
initializeOpenGLFunctions();
glEnable(GL_DEPTH_TEST);
//现代opengl渲染管线依赖着色器来处理传入的数据
//着色器就是使用openGL着色语言(OpenGL Shading Language, GLSL)编写的一个小函数,
// GLSL是构成所有OpenGL着色器的语言,具体的GLSL语言的语法需要读者查找相关资料
//初始化顶点着色器 对象
//???opengl???????????????????????????????
//??????????????openGL???????(OpenGL Shading Language, GLSL)??д?????С????,
// GLSL?ǹ???????OpenGL???????????,?????GLSL????????????????????????
//?????????????? ????
m_pVSHader = new QOpenGLShader(QOpenGLShader::Vertex, this);
//编译顶点着色器程序
//???????????????
bool bCompile = m_pVSHader->compileSourceCode(vsrcyuv);
if(!bCompile)
{
// todo 设置错误状态
// todo ?????????
}
//初始化片段着色器 功能gpu中yuv转换成rgb
//????????????? ????gpu??yuv?????rgb
m_pFSHader = new QOpenGLShader(QOpenGLShader::Fragment, this);
if(mType == TYPE_RGB32){
bCompile = m_pFSHader->compileSourceCode(fsrcrgb);
@ -193,32 +197,35 @@ void CPlayWidget::initializeGL()
if(mType == TYPE_YUV420P){
bCompile = m_pFSHader->compileSourceCode(fsrcyuv);
}
if(mType == TYPE_I420){
bCompile = m_pFSHader->compileSourceCode(kI420FragmentShaderSource);
}
if(!bCompile)
{
// todo 设置错误状态
// todo ?????????
}
#define PROGRAM_VERTEX_ATTRIBUTE 0
#define PROGRAM_TEXCOORD_ATTRIBUTE 1
//创建着色器程序容器
//?????????????????
m_pShaderProgram = new QOpenGLShaderProgram;
//将片段着色器添加到程序容器
//???????????????????????
m_pShaderProgram->addShader(m_pFSHader);
//将顶点着色器添加到程序容器
//????????????????????????
m_pShaderProgram->addShader(m_pVSHader);
//绑定属性vertexIn到指定位置ATTRIB_VERTEX,该属性在顶点着色源码其中有声明
//??????vertexIn?????λ??ATTRIB_VERTEX,???????????????????????????
m_pShaderProgram->bindAttributeLocation("vertexIn", ATTRIB_VERTEX);
//绑定属性textureIn到指定位置ATTRIB_TEXTURE,该属性在顶点着色源码其中有声明
//??????textureIn?????λ??ATTRIB_TEXTURE,???????????????????????????
m_pShaderProgram->bindAttributeLocation("textureIn", ATTRIB_TEXTURE);
//链接所有所有添入到的着色器程序
//???????????????????????????
m_pShaderProgram->link();
//激活所有链接
//????????????
m_pShaderProgram->bind();
@ -228,82 +235,90 @@ void CPlayWidget::initializeGL()
if(this->mType == TYPE_RGB32){
initShaderRgb();
}
glClearColor(0.0,0.0,0.0,0.0);//设置背景色
if(this->mType == TYPE_I420){
initShaderI420();
}
glClearColor(0.0,0.0,0.0,0.0);//????????
}
void CPlayWidget::resizeGL(int w, int h)
{
if(h == 0)// 防止被零除
if(h == 0)// ????????
{
h = 1;// 将高设为1
h = 1;// ???????1
}
//设置视口
//???????
glViewport(0,0, w,h);
}
void CPlayWidget::paintGL()
{
// if(!m_buffer) {
// return;
// }
if(mType == TYPE_YUV420P)
loadYuvTexture();
if(mType == TYPE_RGB32){
loadRgbTexture();
loadRgbTexture();
}
if(mType == TYPE_I420){
loadYuvTexture();
}
//使用顶点数组方式绘制图形
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
return;
}
void CPlayWidget::initShaderYuv()
{
//读取着色器中的数据变量tex_y, tex_u, tex_v的位置,这些变量的声明可以在
//片段着色器源码中可以看到
textureUniformY = m_pShaderProgram->uniformLocation("tex_y");
textureUniformU = m_pShaderProgram->uniformLocation("tex_u");
textureUniformV = m_pShaderProgram->uniformLocation("tex_v");
// 顶点矩阵
static const GLfloat vertexVertices[] = {
//?????????е????????tex_y, tex_u, tex_v??λ??,??Щ????????????????
//????????????п??????
textureUniformY = m_pShaderProgram->uniformLocation("tex_y");
textureUniformU = m_pShaderProgram->uniformLocation("tex_u");
textureUniformV = m_pShaderProgram->uniformLocation("tex_v");
// ???????
static const GLfloat vertexVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
//纹理矩阵
static const GLfloat textureVertices[] = {
};
//????????
static const GLfloat textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
//设置属性ATTRIB_VERTEX的顶点矩阵值以及格式
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
//设置属性ATTRIB_TEXTURE的纹理矩阵值以及格式
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
//启用ATTRIB_VERTEX属性的数据,默认是关闭的
glEnableVertexAttribArray(ATTRIB_VERTEX);
//启用ATTRIB_TEXTURE属性的数据,默认是关闭的
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//分别创建y,u,v纹理对象
m_pTextureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureY->create();
m_pTextureU->create();
m_pTextureV->create();
//获取返回y分量的纹理索引值
id_y = m_pTextureY->textureId();
//获取返回u分量的纹理索引值
id_u = m_pTextureU->textureId();
//获取返回v分量的纹理索引值
id_v = m_pTextureV->textureId();
};
//????????ATTRIB_VERTEX???????????????
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
//????????ATTRIB_TEXTURE?????????????????
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
//????ATTRIB_VERTEX?????????,????ǹ???
glEnableVertexAttribArray(ATTRIB_VERTEX);
//????ATTRIB_TEXTURE?????????,????ǹ???
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//????y,u,v????????
m_pTextureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureY->create();
m_pTextureU->create();
m_pTextureV->create();
//???????y???????????????
id_y = m_pTextureY->textureId();
//???????u???????????????
id_u = m_pTextureU->textureId();
//???????v???????????????
id_v = m_pTextureV->textureId();
}
void CPlayWidget::initShaderRgb()
{
//读取着色器中的数据变量tex_y, tex_u, tex_v的位置,这些变量的声明可以在
//片段着色器源码中可以看到
//?????????е????????tex_y, tex_u, tex_v??λ??,??Щ????????????????
//????????????п??????
textureUniformRGB = m_pShaderProgram->uniformLocation("rgbdata");
// 顶点矩阵
// ???????
static const GLfloat vertexVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
@ -311,38 +326,133 @@ void CPlayWidget::initShaderRgb()
1.0f, 1.0f,
};
//纹理矩阵
//????????
static const GLfloat textureVertices[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
//设置属性ATTRIB_VERTEX的顶点矩阵值以及格式
//????????ATTRIB_VERTEX???????????????
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
//设置属性ATTRIB_TEXTURE的纹理矩阵值以及格式
//????????ATTRIB_TEXTURE?????????????????
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
//启用ATTRIB_VERTEX属性的数据,默认是关闭的
//????ATTRIB_VERTEX?????????,????ǹ???
glEnableVertexAttribArray(ATTRIB_VERTEX);
//启用ATTRIB_TEXTURE属性的数据,默认是关闭的
//????ATTRIB_TEXTURE?????????,????ǹ???
glEnableVertexAttribArray(ATTRIB_TEXTURE);
//分别创建y,u,v纹理对象
//????y,u,v????????
m_pTextureRGB = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureRGB->create();
//获取返回y分量的纹理索引值
//???????y???????????????
id_rgb = m_pTextureRGB->textureId();
}
void CPlayWidget::initShaderI420()
{
textureUniformY = m_pShaderProgram->uniformLocation("tex_y");
textureUniformU = m_pShaderProgram->uniformLocation("tex_u");
textureUniformV = m_pShaderProgram->uniformLocation("tex_v");
static const GLfloat vertexVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
static const GLfloat textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
};
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, vertexVertices);
glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, textureVertices);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glEnableVertexAttribArray(ATTRIB_TEXTURE);
m_pTextureY = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureU = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureV = new QOpenGLTexture(QOpenGLTexture::Target2D);
m_pTextureY->create();
m_pTextureU->create();
m_pTextureV->create();
id_y = m_pTextureY->textureId();
id_u = m_pTextureU->textureId();
id_v = m_pTextureV->textureId();
}
int CPlayWidget::loadYuvTexture()
{
//加载y数据纹理
//激活纹理单元GL_TEXTURE0
if(nullptr == m_pBufYuv420p)
return 0;
glActiveTexture(GL_TEXTURE0);
//使用来自y数据生成纹理
glBindTexture(GL_TEXTURE_2D, id_y);
//使用内存中m_pBufYuv420p数据创建真正的y数据纹理
// glPixelStorei(GL_UNPACK_ROW_LENGTH, m_buffer->StrideY());
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
m_nVideoW,
m_nVideoH,
0,
GL_RED,
GL_UNSIGNED_BYTE,
m_pBufYuv420p);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, id_u);
// glPixelStorei(GL_UNPACK_ROW_LENGTH, m_buffer->StrideU());
glTexImage2D(GL_TEXTURE_2D,
0, GL_RED,
m_nVideoW/2,
m_nVideoH/2,
0,
GL_RED,
GL_UNSIGNED_BYTE,
m_pBufYuv420p + m_nVideoW*m_nVideoH);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// v分量
glActiveTexture(GL_TEXTURE2);//???????????GL_TEXTURE2
glBindTexture(GL_TEXTURE_2D, id_v);
// glPixelStorei(GL_UNPACK_ROW_LENGTH, m_buffer->StrideV());
glTexImage2D(GL_TEXTURE_2D,
0, GL_RED,
m_nVideoW/2,
m_nVideoH/2,
0, GL_RED,
GL_UNSIGNED_BYTE,
m_pBufYuv420p + m_nVideoW*m_nVideoH + m_nVideoW*m_nVideoH/4);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glUniform1i(textureUniformY, 0);
//???u???????????
glUniform1i(textureUniformU, 1);
//???v???????????
glUniform1i(textureUniformV, 2);
return 0;
}
int CPlayWidget::loadRtcI420Texture()
{
if(nullptr == m_buffer)
return 0;
glActiveTexture(GL_TEXTURE0);
//???????y????????????
glBindTexture(GL_TEXTURE_2D, id_y);
glPixelStorei(GL_UNPACK_ROW_LENGTH, m_buffer->StrideY());
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RED,
m_nVideoW,
@ -350,15 +460,16 @@ int CPlayWidget::loadYuvTexture()
0,
GL_RED,
GL_UNSIGNED_BYTE,
m_pBufYuv420p);
m_buffer->DataY());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//加载u数据纹理
glActiveTexture(GL_TEXTURE1);//激活纹理单元GL_TEXTURE1
//????u????????
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, id_u);
glPixelStorei(GL_UNPACK_ROW_LENGTH, m_buffer->StrideU());
glTexImage2D(GL_TEXTURE_2D,
0, GL_RED,
m_nVideoW/2,
@ -366,43 +477,39 @@ int CPlayWidget::loadYuvTexture()
0,
GL_RED,
GL_UNSIGNED_BYTE,
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH);
m_buffer->DataU());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//加载v数据纹理
glActiveTexture(GL_TEXTURE2);//激活纹理单元GL_TEXTURE2
// v分量
glActiveTexture(GL_TEXTURE2);//???????????GL_TEXTURE2
glBindTexture(GL_TEXTURE_2D, id_v);
glPixelStorei(GL_UNPACK_ROW_LENGTH, m_buffer->StrideV());
glTexImage2D(GL_TEXTURE_2D,
0, GL_RED,
m_nVideoW/2,
m_nVideoH/2,
0, GL_RED,
GL_UNSIGNED_BYTE,
(char*)m_pBufYuv420p+m_nVideoW*m_nVideoH*5/4);
m_buffer->DataV());
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
//指定y纹理要使用新值 只能用0,1,2等表示纹理单元的索引这是opengl不人性化的地方
//0对应纹理单元GL_TEXTURE0 1对应纹理单元GL_TEXTURE1 2对应纹理的单元
glUniform1i(textureUniformY, 0);
//指定u纹理要使用新值
//???u???????????
glUniform1i(textureUniformU, 1);
//指定v纹理要使用新值
//???v???????????
glUniform1i(textureUniformV, 2);
return 0;
}
int CPlayWidget::loadRgbTexture()
{
//加载rgb数据纹理
//激活纹理单元GL_TEXTURE0
glActiveTexture(GL_TEXTURE0);
//使用来自y数据生成纹理
glBindTexture(GL_TEXTURE_2D, id_rgb);
//使用内存中m_pBufYuv420p数据创建真正的y数据纹理
glTexImage2D(GL_TEXTURE_2D,
0,
GL_RGBA,

View File

@ -1,3 +1,11 @@
/*
* @Author: your name
* @Date: 2021-10-05 19:32:04
* @LastEditTime: 2021-10-16 21:34:14
* @LastEditors: your name
* @Description: In User Settings Edit
* @FilePath: \src\cplaywidget.h
*/
#ifndef GLPLAYWIDGET_H
#define GLPLAYWIDGET_H
#include <QOpenGLWidget>
@ -8,6 +16,8 @@
#include <QTimer>
#include "api/video/i420_buffer.h"
#define ATTRIB_VERTEX 3
#define ATTRIB_TEXTURE 4
@ -16,18 +26,19 @@ class CPlayWidget:public QOpenGLWidget,protected QOpenGLFunctions
{
Q_OBJECT
public slots:
void OnUpdateFrame();
void OnPaintData(const uint8_t *data,uint32_t len);
void OnPaintData(const rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer);
public:
typedef enum{
TYPE_YUV420P,
TYPE_RGB32,
TYPE_I420,
}IMG_TYPE;
CPlayWidget(QWidget* parent);
~CPlayWidget();
void PlayOneFrame();
int SetDataType(IMG_TYPE);
int OnCameraData(uint8_t *dat, uint32_t size) ;
int OnCameraData(rtc::scoped_refptr<webrtc::I420BufferInterface> &buffer);
int OnCameraData(uint8_t *);
int SetImgSize(uint32_t width,uint32_t );
protected:
QTimer tm;
@ -35,39 +46,39 @@ protected:
void resizeGL(int w, int h) override;
void paintGL() override;
private:
IMG_TYPE mType; // 目前只支持到RGB32,YUV420P
GLuint textureUniformY; //y纹理数据位置
GLuint textureUniformU; //u纹理数据位置
GLuint textureUniformV; //v纹理数据位置
GLuint textureUniformRGB; //RGB纹理位置
IMG_TYPE mType; //YUV420P
GLuint textureUniformY; //
GLuint textureUniformU; //
GLuint textureUniformV; //
GLuint textureUniformRGB; //
GLuint textureUnifromRGB; //rgb32 的纹理位置
GLuint textureUnifromRGB; //rgb32
GLuint id_rgb;
GLuint id_y;
GLuint id_u;
GLuint id_v; //v纹理对象ID
GLuint id_v; //v<EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ID
QOpenGLTexture* m_pTextureRGB; //RGB 纹理是一整块的
QOpenGLTexture* m_pTextureRGB; //RGB
QOpenGLTexture* m_pTextureY; //y纹理对象
QOpenGLTexture* m_pTextureU; //u纹理对象
QOpenGLTexture* m_pTextureV; //v纹理对象
QOpenGLShader *m_pVSHader; //顶点着色器程序对象
QOpenGLShader *m_pFSHader; //片段着色器对象
QOpenGLShaderProgram *m_pShaderProgram; //着色器程序容器
int m_nVideoW; //视频分辨率宽
int m_nVideoH; //视频分辨率高
QOpenGLTexture* m_pTextureY; //
QOpenGLTexture* m_pTextureU; //
QOpenGLTexture* m_pTextureV; //
QOpenGLShader *m_pVSHader; //
QOpenGLShader *m_pFSHader; //
QOpenGLShaderProgram *m_pShaderProgram; //
int m_nVideoW; //
int m_nVideoH; //
unsigned char *m_pBufYuv420p;
unsigned char* m_pBufRgb32;
FILE* m_pYuvFile;
void initShaderYuv();
void initShaderRgb();
void initShaderI420();
int loadYuvTexture();
int loadRtcI420Texture();
int loadRgbTexture();
rtc::scoped_refptr<webrtc::I420BufferInterface> m_buffer;
};
#endif

View File

@ -10,7 +10,7 @@
#include "rtc_base/logging.h"
#include "video_capture.h"
#include "video_capturer_test.h"
#include <QMetaType>
# pragma comment(lib, "secur32.lib")
# pragma comment(lib, "winmm.lib")
@ -46,8 +46,6 @@ void EnumCapture()
}
}
int main(int argc, char *argv[])
{
const size_t kWidth = 1280;
@ -71,10 +69,17 @@ int main(int argc, char *argv[])
setbuf(stdout, NULL);
qRegisterMetaType<rtc::scoped_refptr<webrtc::I420BufferInterface>>("rtc::scoped_refptr<webrtc::I420BufferInterface>");
qRegisterMetaType<rtc::scoped_refptr<webrtc::I420BufferInterface>>("rtc::scoped_refptr<webrtc::I420BufferInterface>&");
QCoreApplication::setAttribute(Qt::AA_DisableHighDpiScaling);
QApplication a(argc, argv);
MainWindow w;
// QObject::connect((VcmCapturerTest*)capturer.get(),SIGNAL(UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>&)),&w,
// SLOT(OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>&)),Qt::ConnectionType::QueuedConnection);
QObject::connect((VcmCapturerTest*)capturer.get(),SIGNAL(UpdateFrame1(uint8_t*)),&w,
SLOT(OnUpdateFrame1( uint8_t *)),Qt::ConnectionType::QueuedConnection);
w.show();
return a.exec();
}

View File

@ -6,6 +6,8 @@ MainWindow::MainWindow(QWidget *parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
ui->openGLWidget->SetImgSize(640,480);
ui->openGLWidget->show();
}
MainWindow::~MainWindow()
@ -13,3 +15,15 @@ MainWindow::~MainWindow()
delete ui;
}
void MainWindow::OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer)
{
qDebug()<<"1234";
ui->openGLWidget->OnCameraData(buffer);
}
void MainWindow::OnUpdateFrame1(uint8_t *dat)
{
qDebug()<<"4321";
ui->openGLWidget->OnCameraData(dat);
}

View File

@ -2,7 +2,7 @@
#define MAINWINDOW_H
#include <QMainWindow>
#include "api/video/i420_buffer.h"
QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
@ -15,6 +15,9 @@ class MainWindow : public QMainWindow
public:
MainWindow(QWidget *parent = nullptr);
~MainWindow();
public slots:
void OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
void OnUpdateFrame1( uint8_t *);
private:
Ui::MainWindow *ui;

View File

@ -53,11 +53,18 @@
</layout>
</item>
<item>
<widget class="QOpenGLWidget" name="openGLWidget"/>
<widget class="CPlayWidget" name="openGLWidget"/>
</item>
</layout>
</widget>
</widget>
<customwidgets>
<customwidget>
<class>CPlayWidget</class>
<extends>QOpenGLWidget</extends>
<header>cplaywidget.h</header>
</customwidget>
</customwidgets>
<resources/>
<connections/>
</ui>

View File

@ -72,19 +72,32 @@ void VcmCapturerTest::Destroy() {
}
void VcmCapturerTest::OnFrame(const webrtc::VideoFrame& frame) {
static auto timestamp = std::chrono::duration_cast<std::chrono::milliseconds>(
static auto timestamp = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count();
static size_t cnt = 0;
static size_t cnt = 0;
RTC_LOG(LS_INFO) << "OnFrame "<<frame.width()<<" "<<frame.height()<<" "<<frame.size()<<" "<<frame.timestamp();
VideoCapturerTest::OnFrame(frame);
RTC_LOG(LS_INFO) << "OnFrame "<<frame.width()<<" "<<frame.height()<<" "
<<frame.size()<<" "<<frame.timestamp()<<frame.video_frame_buffer().get()->type()
<<" stride "<<frame.video_frame_buffer().get()->GetI420()->StrideY()<< frame.video_frame_buffer().get()->GetI420()->StrideU() ;
cnt++;
auto timestamp_curr = std::chrono::duration_cast<std::chrono::milliseconds>(
int m_height = frame.height();
int m_width = frame.width();
auto frameBuffer = frame.video_frame_buffer()->ToI420();
uint8_t *dat = new uint8_t[m_width*m_height/2*3];
memcpy(dat,frame.video_frame_buffer()->ToI420()->DataY(),m_height*m_width);
memcpy(dat + m_height*m_width,frame.video_frame_buffer()->ToI420()->DataU(),m_height*m_width/4);
memcpy(dat + m_height*m_width + m_height*m_width/4,
frame.video_frame_buffer()->ToI420()->DataV(),m_height*m_width/4);
emit(this->UpdateFrame1(dat));
VideoCapturerTest::OnFrame(frame);
cnt++;
auto timestamp_curr = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count();
if(timestamp_curr - timestamp > 1000) {
RTC_LOG(LS_INFO) << "FPS: " << cnt;
cnt = 0;
timestamp = timestamp_curr;
}
if(timestamp_curr - timestamp > 1000) {
RTC_LOG(LS_INFO) << "FPS: " << cnt;
cnt = 0;
timestamp = timestamp_curr;
}
}

View File

@ -5,14 +5,21 @@
#include <memory>
#include <QObject>
#include "modules/video_capture/video_capture.h"
#include "video_capturer_test.h"
class VcmCapturerTest : public VideoCapturerTest,
public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
static VcmCapturerTest* Create(size_t width,
class VcmCapturerTest : public QObject,
public VideoCapturerTest,
public rtc::VideoSinkInterface<webrtc::VideoFrame>
{
Q_OBJECT
public:
VcmCapturerTest();
static VcmCapturerTest* Create(size_t width,
size_t height,
size_t target_fps,
size_t capture_device_index);
@ -20,9 +27,12 @@ class VcmCapturerTest : public VideoCapturerTest,
virtual ~VcmCapturerTest();
void OnFrame(const webrtc::VideoFrame& frame) override;
signals:
void UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
void UpdateFrame1(uint8_t *dat);
private:
VcmCapturerTest();
bool Init(size_t width,
size_t height,

View File

@ -11,9 +11,12 @@
#include "media/base/video_broadcaster.h"
#include "media/base/video_adapter.h"
#include <mutex>
#include <QObject>
class VideoCapturerTest : public QObject,
public rtc::VideoSourceInterface<webrtc::VideoFrame>{
Q_OBJECT
class VideoCapturerTest : public rtc::VideoSourceInterface<webrtc::VideoFrame> {
public:
class FramePreprocessor {
public: