no message

master
zcy 2021-10-27 23:42:22 +08:00
parent f3fe679e1a
commit 387543ed11
6 changed files with 176 additions and 89 deletions

View File

@ -32,23 +32,24 @@ absl::optional<bool> MyCapturer::needs_denoising() const {
void MyCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result,
std::unique_ptr<webrtc::DesktopFrame> frame) {
if (result != webrtc::DesktopCapturer::Result::SUCCESS)
return;
if (result != webrtc::DesktopCapturer::Result::SUCCESS)
return;
int width = frame->size().width();
int height = frame->size().height();
int width = frame->size().width();
int height = frame->size().height();
if (!i420_buffer_.get() ||
i420_buffer_->width() * i420_buffer_->height() < width * height) {
i420_buffer_ = webrtc::I420Buffer::Create(width, height);
}
// libyuv::ConvertToI420(frame->data(), 0, i420_buffer_->MutableDataY(),
// i420_buffer_->StrideY(), i420_buffer_->MutableDataU(),
// i420_buffer_->StrideU(), i420_buffer_->MutableDataV(),
// i420_buffer_->StrideV(), 0, 0, width, height, width,
// height, libyuv::kRotate0, libyuv::FOURCC_ARGB);
if (!i420_buffer_.get() ||
i420_buffer_->width() * i420_buffer_->height() < width * height) {
i420_buffer_ = webrtc::I420Buffer::Create(width, height);
}
OnFrame(webrtc::VideoFrame(i420_buffer_, 0, 0, webrtc::kVideoRotation_0));
// libyuv::ConvertToI420(frame->data(), 0, i420_buffer_->MutableDataY(),
// i420_buffer_->StrideY(), i420_buffer_->MutableDataU(),
// i420_buffer_->StrideU(), i420_buffer_->MutableDataV(),
// i420_buffer_->StrideV(), 0, 0, width, height, width,
// height, libyuv::kRotate0, libyuv::FOURCC_ARGB);
OnFrame(webrtc::VideoFrame(i420_buffer_, 0, 0, webrtc::kVideoRotation_0));
}
void MyCapturer::OnMessage(rtc::Message* msg) {
@ -59,6 +60,6 @@ void MyCapturer::OnMessage(rtc::Message* msg) {
void MyCapturer::CaptureFrame() {
capturer_->CaptureFrame();
// rtc::Location loc(__FUNCTION__, __FILE__);
// rtc::Thread::Current()->PostDelayed(loc, 33, this, 0);
rtc::Location loc(__FUNCTION__, __FILE__,__LINE__);
rtc::Thread::Current()->PostDelayed(loc, 33, this, 0);
}

View File

@ -18,6 +18,36 @@
#include "rtc_base/win32_socket_server.h"
#include <QMetaType>
#include "absl/memory/memory.h"
#include "absl/types/optional.h"
#include "api/audio/audio_mixer.h"
#include "api/audio_codecs/audio_decoder_factory.h"
#include "api/audio_codecs/audio_encoder_factory.h"
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/audio_options.h"
#include "api/create_peerconnection_factory.h"
#include "api/rtp_sender_interface.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/audio_device/include/audio_device.h"
#include "modules/audio_processing/include/audio_processing.h"
#include "modules/video_capture/video_capture.h"
#include "modules/video_capture/video_capture_factory.h"
#include "p2p/base/port_allocator.h"
#include "pc/video_track_source.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/ref_counted_object.h"
#include "rtc_base/rtc_certificate_generator.h"
#include <QMainWindow>
#include "api/video/i420_buffer.h"
#include "signal_client.h"
#include <QStandardItemModel>
#pragma comment (lib,"advapi32.lib")
extern"C"
@ -64,6 +94,8 @@ void EnumCapture()
void InitCustomMetaType(){
qRegisterMetaType<rtc::scoped_refptr<webrtc::I420BufferInterface>>("rtc::scoped_refptr<webrtc::I420BufferInterface>");
qRegisterMetaType<rtc::scoped_refptr<webrtc::I420BufferInterface>>("rtc::scoped_refptr<webrtc::I420BufferInterface>&");
qRegisterMetaType<webrtc::VideoTrackInterface*>("webrtc::VideoTrackInterface*");
qRegisterMetaType<webrtc::MediaStreamTrackInterface*>("webrtc::MediaStreamTrackInterface*");
}
int main(int argc, char *argv[])
@ -88,7 +120,6 @@ int main(int argc, char *argv[])
// }
// }
rtc::WinsockInitializer winsock_init;
rtc::Win32SocketServer w32_ss;
rtc::Win32Thread w32_thread(&w32_ss);

View File

@ -34,7 +34,11 @@ MainWindow::MainWindow(QWidget *parent)
this,SLOT(itemClicked(QModelIndex)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnOfferSdp(QString)),
this,SLOT(on_local_sdp(QString)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnRemoteTrack(webrtc::MediaStreamTrackInterface*)),
this,SLOT(on_track_add(webrtc::MediaStreamTrackInterface*)));
connect((WebrtcHanlder*)(mHandler.get()),SIGNAL(OnLocalTrack(webrtc::VideoTrackInterface* )),
this,SLOT(on_local_track_add(webrtc::VideoTrackInterface* )));
mHandler.get()->setParent(this);
}
MainWindow::~MainWindow()
@ -115,7 +119,7 @@ int WebrtcHanlder::InitWebrtc()
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_(
m_peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
// main_wnd_->StartLocalRenderer(video_track_);
// mParent->on_local_track_add(video_track_);
result_or_error = m_peer_connection_->AddTrack(video_track_, { kStreamId });
if (!result_or_error.ok()) {
qDebug() << "Failed to add video track to PeerConnection: "
@ -128,41 +132,6 @@ int WebrtcHanlder::InitWebrtc()
}
int WebrtcHanlder::AddTrack()
{
if (!m_peer_connection_->GetSenders().empty()) {
return -1; // Already added tracks.
}
// add audio tracks
rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
m_peer_connection_factory_->CreateAudioTrack(
kAudioLabel, m_peer_connection_factory_->CreateAudioSource(
cricket::AudioOptions())));
auto result_or_error = m_peer_connection_->AddTrack(audio_track, {kStreamId});
if (!result_or_error.ok()) {
RTC_LOG(LS_ERROR) << "Failed to add audio track to PeerConnection: "
<< result_or_error.error().message();
}
//rtc::scoped_refptr<CapturerTrackSource> video_device =
// CapturerTrackSource::Create();
rtc::scoped_refptr<MyCapturer> video_device = new rtc::RefCountedObject<MyCapturer>();
if (video_device) {
video_device->startCapturer();
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track_(
m_peer_connection_factory_->CreateVideoTrack(kVideoLabel, video_device));
result_or_error = m_peer_connection_->AddTrack(video_track_, { kStreamId });
if (!result_or_error.ok()) {
RTC_LOG(LS_ERROR) << "Failed to add video track to PeerConnection: "
<< result_or_error.error().message();
}
} else {
RTC_LOG(LS_ERROR) << "OpenVideoCaptureDevice failed";
}
}
void WebrtcHanlder::SetSignalClient(SignalClient * cli)
{
@ -247,6 +216,11 @@ void WebrtcHanlder::SetRemoteCandidate(QString scandidate)
}
}
void WebrtcHanlder::SetParent(MainWindow *p)
{
mParent = p;
}
void WebrtcHanlder::CreateAnwer()
{
qDebug()<<"create answer";
@ -263,11 +237,12 @@ void WebrtcHanlder::OnSignalingChange(webrtc::PeerConnectionInterface::Signaling
}
void WebrtcHanlder::OnAddTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<webrtc::MediaStreamInterface> > &streams)
{
qDebug()<<"OnAddTrack";
RTC_LOG(INFO) << __FUNCTION__ << " " << receiver->id();
OnRemoteTrack(receiver->track().release());
}
void WebrtcHanlder::OnRemoveTrack(rtc::scoped_refptr<webrtc::RtpReceiverInterface> receiver)
@ -413,7 +388,6 @@ void MainWindow::itemClicked(QModelIndex index)
qDebug()<<"请先连接信令服务";
}
mHandler->CreateOffer();
//mSignalClient->SendSDPOffer();
qDebug()<<mModel->item(index.row())->text();
mRemoteName = mModel->item(index.row())->text();
mHandler->SetRemotePeerName(mRemoteName);
@ -426,3 +400,77 @@ void MainWindow::on_local_sdp(QString sdp)
qDebug()<<"sdp size is "<<sdp.size();
mSignalClient->SendSDPOffer(mRemoteName,sdp);
}
void MainWindow::on_track_add(webrtc::MediaStreamTrackInterface *data)
{
qDebug()<<"on_track_add"<<data->kind().c_str();
auto* track = reinterpret_cast<webrtc::MediaStreamTrackInterface*>(data);
if (track->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
auto* video_track = static_cast<webrtc::VideoTrackInterface*>(track);
remote_renderer_.reset(new VideoRenderer( 1, 1, video_track));
}
track->Release();
}
void MainWindow::on_local_track_add(webrtc::VideoTrackInterface* data)
{
qDebug()<<"on_local_track_add";
local_renderer_.reset(new VideoRenderer( 1, 1, data));
}
VideoRenderer::VideoRenderer(int width, int height, webrtc::VideoTrackInterface *track_to_render)
: rendered_track_(track_to_render)
{
::InitializeCriticalSection(&buffer_lock_);
ZeroMemory(&bmi_, sizeof(bmi_));
bmi_.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
bmi_.bmiHeader.biPlanes = 1;
bmi_.bmiHeader.biBitCount = 32;
bmi_.bmiHeader.biCompression = BI_RGB;
bmi_.bmiHeader.biWidth = width;
bmi_.bmiHeader.biHeight = -height;
bmi_.bmiHeader.biSizeImage =
width * height * (bmi_.bmiHeader.biBitCount >> 3);
rendered_track_->AddOrUpdateSink(this, rtc::VideoSinkWants());
}
VideoRenderer::~VideoRenderer()
{
rendered_track_->RemoveSink(this);
::DeleteCriticalSection(&buffer_lock_);
}
void VideoRenderer::OnFrame(const webrtc::VideoFrame &video_frame)
{
qDebug()<<"onframe";
{
AutoLock<VideoRenderer> lock(this);
rtc::scoped_refptr<webrtc::I420BufferInterface> buffer(
video_frame.video_frame_buffer()->ToI420());
if (video_frame.rotation() != webrtc::kVideoRotation_0) {
buffer = webrtc::I420Buffer::Rotate(*buffer, video_frame.rotation());
}
SetSize(buffer->width(), buffer->height());
RTC_DCHECK(image_.get() != NULL);
qDebug()<<buffer->width() << buffer->height();
// libyuv::I420ToARGB(buffer->DataY(), buffer->StrideY(), buffer->DataU(),
// buffer->StrideU(), buffer->DataV(), buffer->StrideV(),
// image_.get(),
// bmi_.bmiHeader.biWidth * bmi_.bmiHeader.biBitCount / 8,
// buffer->width(), buffer->height());
}
}
void VideoRenderer::SetSize(int width, int height)
{
if (width == bmi_.bmiHeader.biWidth && height == bmi_.bmiHeader.biHeight) {
return;
}
bmi_.bmiHeader.biWidth = width;
bmi_.bmiHeader.biHeight = -height;
bmi_.bmiHeader.biSizeImage =
width * height * (bmi_.bmiHeader.biBitCount >> 3);
image_.reset(new uint8_t[bmi_.bmiHeader.biSizeImage]);
}

View File

@ -38,7 +38,7 @@ QT_END_NAMESPACE
class VideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRenderer(HWND wnd,
VideoRenderer(
int width,
int height,
webrtc::VideoTrackInterface* track_to_render);
@ -80,7 +80,7 @@ class AutoLock {
protected:
T* obj_;
};
class MainWindow;
class WebrtcHanlder :public QObject,
public webrtc::PeerConnectionObserver,
@ -89,6 +89,8 @@ class WebrtcHanlder :public QObject,
signals:
void OnOfferSdp(QString);
void OnAnswerSdp(QString);
void OnRemoteTrack(webrtc::MediaStreamTrackInterface*);
void OnLocalTrack(webrtc::VideoTrackInterface* );
public:
void SetRemotePeerName(QString);
int InitWebrtc();
@ -97,7 +99,7 @@ public:
void CreateOffer();
void SetRemoteSdp(QString);
void SetRemoteCandidate(QString);
void SetParent(MainWindow *p);
void CreateAnwer();
protected:
~WebrtcHanlder();
@ -130,6 +132,7 @@ private:
m_peer_connection_factory_;
SignalClient *mClient;
QString mRemoteName;
MainWindow *mParent;
};
@ -145,7 +148,6 @@ public slots:
void OnUpdateFrame( rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
void OnUpdateFrame1( uint8_t *);
private slots:
void on_pushButton_clicked();
void signal_conneted();
void on_pushButton_2_clicked();
@ -153,6 +155,9 @@ private slots:
void signal_response(int,QJsonObject);
void itemClicked(QModelIndex);
void on_local_sdp(QString);
void on_track_add(webrtc::MediaStreamTrackInterface*);
void on_local_track_add(webrtc::VideoTrackInterface* );
private:
Ui::MainWindow *ui;
rtc::scoped_refptr<WebrtcHanlder> mHandler;
@ -162,6 +167,9 @@ private:
QString mRemoteName;
QString mRemoteNameCalled;
bool mCalling;
std::unique_ptr<VideoRenderer> local_renderer_;
std::unique_ptr<VideoRenderer> remote_renderer_;
};
#endif // MAINWINDOW_H

View File

@ -15,36 +15,37 @@ bool VcmCapturerTest::Init(size_t width,
size_t target_fps,
size_t capture_device_index)
{
std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> device_info(webrtc::VideoCaptureFactory::CreateDeviceInfo());
char device_name[256];
char unique_name[256];
std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo>
device_info(webrtc::VideoCaptureFactory::CreateDeviceInfo());
char device_name[256];
char unique_name[256];
if (device_info->GetDeviceName(static_cast<uint32_t>(capture_device_index),
if (device_info->GetDeviceName(static_cast<uint32_t>(capture_device_index),
device_name, sizeof(device_name), unique_name,
sizeof(unique_name)) != 0) {
Destroy();
return false;
}
}
vcm_ = webrtc::VideoCaptureFactory::Create(unique_name);
if (!vcm_) {
vcm_ = webrtc::VideoCaptureFactory::Create(unique_name);
if (!vcm_) {
return false;
}
vcm_->RegisterCaptureDataCallback(this);
}
vcm_->RegisterCaptureDataCallback(this);
device_info->GetCapability(vcm_->CurrentDeviceName(), 0, capability_);
capability_.width = static_cast<int32_t>(width);
capability_.height = static_cast<int32_t>(height);
capability_.maxFPS = static_cast<int32_t>(target_fps);
capability_.videoType = webrtc::VideoType::kI420;
device_info->GetCapability(vcm_->CurrentDeviceName(), 0, capability_);
capability_.width = static_cast<int32_t>(width);
capability_.height = static_cast<int32_t>(height);
capability_.maxFPS = static_cast<int32_t>(target_fps);
capability_.videoType = webrtc::VideoType::kI420;
if (vcm_->StartCapture(capability_) != 0) {
if (vcm_->StartCapture(capability_) != 0) {
Destroy();
return false;
}
}
RTC_CHECK(vcm_->CaptureStarted());
return true;
RTC_CHECK(vcm_->CaptureStarted());
return true;
}
VcmCapturerTest* VcmCapturerTest::Create(size_t width,

View File

@ -24,25 +24,23 @@ public:
size_t target_fps,
size_t capture_device_index);
virtual ~VcmCapturerTest();
virtual ~VcmCapturerTest();
void OnFrame(const webrtc::VideoFrame& frame) override;
void OnFrame(const webrtc::VideoFrame& frame) override;
signals:
void UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
void UpdateFrame1(uint8_t *dat);
void UpdateFrame(rtc::scoped_refptr<webrtc::I420BufferInterface>& buffer);
void UpdateFrame1(uint8_t *dat);
private:
private:
bool Init(size_t width,
bool Init(size_t width,
size_t height,
size_t target_fps,
size_t capture_device_index);
void Destroy();
void Destroy();
rtc::scoped_refptr<webrtc::VideoCaptureModule> vcm_;
webrtc::VideoCaptureCapability capability_;
rtc::scoped_refptr<webrtc::VideoCaptureModule> vcm_;
webrtc::VideoCaptureCapability capability_;
};