1、调试webrtc开源库

在这里插入图片描述

2、error C2220: 警告被视为错误 - 没有生成“object”文件

在这里插入图片描述

3、error C2024: “alignas” 属性仅适用于变量、数据成员和标记类型在这里插入图片描述

4、program files (x86)\windows kits\8.1\include\um\gdiplustypes.h(476): error C3861: “min”: 找不到标识符。

参考链接:http://blog.sina.com.cn/s/blog_4a082449010138jh.html
在这里插入图片描述

5、libcpmtd.lib(raisehan.obj) : error LNK2038: 检测到“RuntimeLibrary”的不匹配项: 值“MTd_StaticDebug”不匹配值“MDd_DynamicDebug

参考链接:https://www.cnblogs.com/lzpong/p/4631784.html
在这里插入图片描述

6、afxver_.h(77): fatal error C1189: #error: Please use the /MD switch for _AFXDLL builds

参考链接:https://www.cnblogs.com/zwh0214/p/6048360.html
在这里插入图片描述

7、屏蔽webrtc源码中的解码,实现自己的h264解码

在这里插入图片描述

8、运行报错:原因时ffmpeg的lib和dll不匹配导致的

在这里插入图片描述

9、webrtc当中max报错

在这里插入图片描述

10、抓包解析rtp和rtcp

在这里插入图片描述

11、sdl创建渲染时属性设置

LocalVideorenderer = SDL_CreateRenderer(LocalVideowin, -1, SDL_RENDERER_ACCELERATED); //SDL_RENDERER_SOFTWARE
写0;则是默认SDL_RENDERER_ACCELERATED硬件渲染,但是当前环境0直接奔溃需使用软件渲染才行。

12、sdl创建渲染纹理与实践渲染不在同一线程的时候则画面不会更新,

13、sdl使用软件渲染本地大概一分钟后会直接退出,

解决方法:不能放在回调函数里面进行渲染,还是得使用队列存储其他线程来进行渲染的方式。

14 局域网音视频demo

#include "CaptureVideo.h"
#include "stdafx.h"
#include "CaptureVideo.h"
#include <webrtc/base/scoped_ref_ptr.h>
#include <webrtc/video_decoder.h>
#include <webrtc/base/task_queue.h>
#include <webrtc/base/logging.h>
#include <webrtc/base/logsinks.h>
#include <webrtc/voice_engine/voice_engine_impl.h>
#include <webrtc/call.h>
#include <webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h>
#include <webrtc/system_wrappers/include/critical_section_wrapper.h>
#include <webrtc/system_wrappers/include/rw_lock_wrapper.h>
#include <webrtc/system_wrappers/include/event_wrapper.h>
#include <webrtc/config.h>
#include <webrtc/media/engine/webrtcvoe.h>
#include <webrtc/modules/audio_coding/codecs/builtin_audio_decoder_factory.h>
#include <webrtc/base/asyncpacketsocket.h>
#include <webrtc/video_encoder.h>
#include <webrtc/video_decoder.h>
#include <webrtc/modules/video_coding/codec_database.h>
#include <webrtc/test/frame_generator_capturer.h>
#include <webrtc/modules/video_capture/video_capture_factory.h>
#include <webrtc/modules/video_capture/video_capture.h>
#include <webrtc/test/channel_transport/channel_transport.h>
#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_defines.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/media/engine/webrtcvideoframefactory.h"
#include "webrtc/media/engine/webrtcvideocapturer.h"
#include "webrtc/system_wrappers/include/clock.h"
#include "SDL.h"
#include "SDL_thread.h"
#include <string>
#include "MFCShowVideoDlg.h"
#include <thread>
#ifdef __cplusplus
extern "C"
{
#endif // !__cplusplus
#include "libavutil/opt.h"
#include "libavutil/imgutils.h"
#include "libavformat/avformat.h"
#include "libavformat/avio.h"
#include "libavcodec/avcodec.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"//包含头文件
#include "libavutil/imgutils.h"
#include "libavutil/time.h"
#include "libavutil/channel_layout.h"
#include "libavutil/common.h"
#include "libavutil/frame.h"
#include "libavutil/samplefmt.h"
#ifdef __cplusplus
}
#endif // !__cplusplus
using namespace webrtc;
using namespace test;
using namespace std;


#pragma comment(lib,"legacy_stdio_definitions.lib")

extern "C" { FILE __iob_func[3] = { *stdin,*stdout,*stderr }; }

CMFCShowVideoDlg *dlg = NULL;

VoiceEngine* m_voe = NULL;
VoEBase* base1 = NULL;
VoECodec* codec = NULL;
VoEVolumeControl* volume = NULL;
VoERTP_RTCP* rtp_rtcp = NULL;
VoEAudioProcessing* apm = NULL;
VoENetwork* netw = NULL;
VoEFile* file = NULL;
VoEVideoSync* vsync = NULL;
VoEHardware* hardware = NULL;
VoEExternalMedia* xmedia = NULL;
VoENetEqStats* neteqst = NULL;
Call* _call = nullptr;

int cameraId = 0;
string strCameraUniqueName = "";

VideoSendStream *videoSendStream = NULL;
VideoCaptureInput*input = NULL;

VideoReceiveStream *videoReceiveStream = NULL;

Clock* clock_ = Clock::GetRealTimeClock();

int64_t delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() - clock_->TimeInMilliseconds());

string utf8ToGbk(const char *src_str)
{
	int len = MultiByteToWideChar(CP_UTF8, 0, src_str, -1, NULL, 0);
	wchar_t *wszGBK = new wchar_t[len + 1];
	memset(wszGBK, 0, len * 2 + 2);
	MultiByteToWideChar(CP_UTF8, 0, src_str, -1, wszGBK, len);
	len = WideCharToMultiByte(CP_ACP, 0, wszGBK, -1, NULL, 0, NULL, NULL);
	char *szGBK = new char[len + 1];
	memset(szGBK, 0, len + 1);
	WideCharToMultiByte(CP_ACP, 0, wszGBK, -1, szGBK, len, NULL, NULL);
	string strTemp(szGBK);
	if (wszGBK) delete[] wszGBK;
	if (szGBK) delete[] szGBK;
	return strTemp;
}

//SDL定义
SDL_Texture* Localtexture;
int LocalVideowidth, LocalVideoheight;
SDL_Window *LocalVideowin = NULL;
SDL_Renderer *LocalVideorenderer = NULL;
SDL_Rect LocalVideorect;

SDL_Texture* Remotetexture;
int RemoteVideowidth, RemoteVideoheight;
SDL_Window *RemoteVideowin = NULL;
SDL_Renderer *RemoteVideorenderer = NULL;
SDL_Rect RemoteVideorect;

const AVCodec *h264codec;
AVCodecParserContext *parser;
AVCodecContext *c = NULL;
AVFrame *frame;
AVPacket *pkt;
queue<uint8_t *> quVideoBuff;
void close()
{
	if (frame)
	{
		av_frame_free(&frame);
	}
	if (pkt)
	{
		av_packet_free(&pkt);
	}
	// Close the codec
	if (c)
	{
		avcodec_close(c);
	}
	if (parser)
	{
		av_parser_close(parser);
	}
	if (RemoteVideowin)
	{
		SDL_DestroyWindow(RemoteVideowin);
	}
	if (RemoteVideorenderer)
	{
		SDL_DestroyRenderer(RemoteVideorenderer);
	}
	if (Remotetexture)
	{
		SDL_DestroyTexture(Remotetexture);
	}

	if (LocalVideowin)
	{
		SDL_DestroyWindow(LocalVideowin);
	}
	if (LocalVideorenderer)
	{
		SDL_DestroyRenderer(LocalVideorenderer);
	}
	if (Localtexture)
	{
		SDL_DestroyTexture(Localtexture);
	}

	SDL_Quit();
}
FILE *fp_yuv;
int IsFistRecive = true;
static void decode(AVCodecContext *dec_ctx, AVFrame *frame1, AVPacket *pkt)
{
	char buf[1024];
	int ret;

	ret = avcodec_send_packet(dec_ctx, pkt);
	if (ret < 0) {
		fprintf(stderr, "Error sending a packet for decoding\n");
		exit(1);
	}

	while (ret >= 0) {
		ret = avcodec_receive_frame(dec_ctx, frame1);
		if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
			return;
		else if (ret < 0) {
			fprintf(stderr, "Error during decoding\n");
			exit(1);
		}
		//int y_size = frame1->width*frame1->height;
		//fwrite(frame1->data[0], 1, y_size, fp_yuv);    //Y   
		//fwrite(frame1->data[1], 1, y_size / 4, fp_yuv);  //U  
		//fwrite(frame1->data[2], 1, y_size / 4, fp_yuv);  //V 	
		if ((RemoteVideoheight != frame1->height) && (RemoteVideowidth != frame1->width))
		{
			IsFistRecive = false;
			//2.2、创建窗口
			RemoteVideowidth = frame1->width;
			RemoteVideoheight = frame1->height;
			RemoteVideowin = SDL_CreateWindowFrom(dlg->GetDlgItem(IDC_STATIC1)->GetSafeHwnd());
			if (!RemoteVideowin) {
				SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create window by SDL");
				return;
			}
			//2.3、创建渲染器
			RemoteVideorenderer = SDL_CreateRenderer(RemoteVideowin, -1, 0);
			if (!RemoteVideorenderer) {
				SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create Renderer by SDL");
				//要释放ffmpeg的相关内存
				return;
			}
			//2.4、创建纹理	 
			Remotetexture = SDL_CreateTexture(RemoteVideorenderer,
				SDL_PIXELFORMAT_IYUV,
				SDL_TEXTUREACCESS_STREAMING,
				RemoteVideowidth,
				RemoteVideoheight);
			RemoteVideorect.x = 0;
			RemoteVideorect.y = 0;
			RemoteVideorect.w = RemoteVideowidth;
			RemoteVideorect.h = RemoteVideoheight;
		}

		SDL_UpdateYUVTexture(Remotetexture, NULL,
			frame1->data[0], frame1->linesize[0],
			frame1->data[1], frame1->linesize[1],
			frame1->data[2], frame1->linesize[2]);
		SDL_RenderClear(RemoteVideorenderer);
		SDL_RenderCopy(RemoteVideorenderer, Remotetexture, NULL, &RemoteVideorect);
		SDL_RenderPresent(RemoteVideorenderer);
	}
}
void H264De(const uint8_t *data, size_t data_size)
{
	int ret;

	while (data_size > 0) {
		/*pkt = av_packet_alloc();
		frame = av_frame_alloc();*/

		ret = av_parser_parse2(parser, c, &pkt->data, &pkt->size,
			data, data_size, AV_NOPTS_VALUE, AV_NOPTS_VALUE, 0);
		if (ret < 0) {
			fprintf(stderr, "Error while parsing\n");
			exit(1);
		}
		data += ret;
		data_size -= ret;

		if (pkt->size)
			decode(c, frame, pkt);
		/*av_packet_free(&pkt);
		av_frame_free(&frame);*/
	}
}

std::thread			*m_pVideoThread = NULL;
int g_videobuffSize;
bool bIsStartDecode = true;
void VideoReadThread(void *param)
{
	uint8_t *data;
	uint16_t firsttime;
	uint16_t lasttime;
	while (bIsStartDecode)
	{
		firsttime = av_gettime();
		if (quVideoBuff.empty())
		{
			Sleep(2);
			continue;
		}
		data = quVideoBuff.front();
		H264De(data, g_videobuffSize);
		quVideoBuff.pop();
		free(data);
		lasttime = av_gettime();

		if (lasttime - firsttime > 63333)
		{
			av_usleep(63333 - lasttime);
		}
	}
}

/*
视频采集的数据回在VideoCaptureDataCallback::OnIncomingCapturedFrame回调中返回
*/
class CameraCaptureCallback : public VideoCaptureDataCallback {
public:
	CameraCaptureCallback()
	{
	}

	~CameraCaptureCallback()
	{

	}

	virtual void OnIncomingCapturedFrame(const int32_t id,
		const VideoFrame& videoFrame)
	{
		//printf("width:%d height:%d ntp_time_ms:%u render_time_ms:%u rotation:%d  %d \n", videoFrame.width(), videoFrame.height(), videoFrame.ntp_time_ms(), videoFrame.render_time_ms(), videoFrame.rotation());

		VideoFrame incoming_frame = videoFrame;

		int64_t current_time = clock_->TimeInMilliseconds();
		incoming_frame.set_render_time_ms(current_time);

		int64_t capture_ntp_time_ms;
		if (videoFrame.ntp_time_ms() != 0) {
			capture_ntp_time_ms = videoFrame.ntp_time_ms();
		}
		else if (videoFrame.render_time_ms() != 0) {
			capture_ntp_time_ms = videoFrame.render_time_ms() + delta_ntp_internal_ms_;
		}
		else {
			capture_ntp_time_ms = current_time + delta_ntp_internal_ms_;
		}
		incoming_frame.set_ntp_time_ms(capture_ntp_time_ms);

		incoming_frame.set_timestamp(
			90 * static_cast<uint32_t>(incoming_frame.ntp_time_ms()));

		rtc::scoped_refptr<webrtc::VideoFrameBuffer> vfb = videoFrame.video_frame_buffer();
		//将返回的数据更新到SDL纹理当中
		SDL_UpdateYUVTexture(Localtexture, NULL,
			vfb.get()->DataY(), vfb.get()->StrideY(),
			vfb.get()->DataU(), vfb.get()->StrideU(),
			vfb.get()->DataV(), vfb.get()->StrideV());

		//进行SDL刷新显示
		SDL_RenderClear(LocalVideorenderer);
		SDL_RenderCopy(LocalVideorenderer, Localtexture, NULL, &LocalVideorect);
		SDL_RenderPresent(LocalVideorenderer);

		// 类将捕获的帧发送到视频发送流。
		if (input)
			input->IncomingCapturedFrame(incoming_frame);
	}

	virtual void OnCaptureDelayChanged(const int32_t id,
		const int32_t delay)
	{

	}

};

class MyUdpTransport : public UdpTransport {
public:
	MyUdpTransport() {}
	~MyUdpTransport() {}

	virtual int32_t InitializeSendSockets(const char* ipAddr,
		const uint16_t rtpPort,
		const uint16_t rtcpPort = 0)
	{

	}


};

class MyTransport : public Transport {
public:
	MyTransport()
	{

	}

	~MyTransport()
	{

	}

	virtual bool SendRtp(const uint8_t* packet,
		size_t length,
		const PacketOptions& options)
	{
		return true;
	}

	virtual bool SendRtcp(const uint8_t* packet, size_t length)
	{
		return true;
	}
};

class MySendUdpTransportData : public UdpTransportData {
public:
	virtual ~MySendUdpTransportData() {};

	virtual void IncomingRTPPacket(const int8_t* incomingRtpPacket,
		const size_t rtpPacketLength,
		const char* fromIP,
		const uint16_t fromPort)
	{

	}

	virtual void IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
		const size_t rtcpPacketLength,
		const char* fromIP,
		const uint16_t fromPort)
	{

	}
};

/*
所有接收到的调用的RTP和RTCP数据包都应该插入到这个PacketReceiver中
*/
class MyRecvUdpTransportData : public UdpTransportData {
public:
	virtual ~MyRecvUdpTransportData() {};

	virtual void IncomingRTPPacket(const int8_t* incomingRtpPacket,
		const size_t rtpPacketLength,
		const char* fromIP,
		const uint16_t fromPort)
	{
		if (_call)
		{
			webrtc::PacketTime pt;
			_call->Receiver()->DeliverPacket(MediaType::VIDEO, (const uint8_t *)incomingRtpPacket, rtpPacketLength, pt);
		}
	}

	virtual void IncomingRTCPPacket(const int8_t* incomingRtcpPacket,
		const size_t rtcpPacketLength,
		const char* fromIP,
		const uint16_t fromPort)
	{
		if (_call)
		{
			webrtc::PacketTime pt;
			_call->Receiver()->DeliverPacket(MediaType::VIDEO, (const uint8_t *)incomingRtcpPacket, rtcpPacketLength, pt);
		}
	}
};

class MyEncodedFrameCallback : public  EncodedFrameObserver {
public:
	virtual ~MyEncodedFrameCallback() {}

	virtual void EncodedFrameCallback(const EncodedFrame& encoded_frame)
	{
		//uint8_t *g_videoBuff;
		H264De(encoded_frame.data_, encoded_frame.length_);
		//g_videoBuff  = (uint8_t*)malloc(encoded_frame.length_);
		//g_videobuffSize = encoded_frame.length_;
		memcpy(g_videoBuff, encoded_frame.data_, encoded_frame.length_);
		//quVideoBuff.push(g_videoBuff);
		printf("EncodedFrameCallback length:%d type:%d \n ", encoded_frame.length_, encoded_frame.frame_type_);
	}

	virtual void OnEncodeTiming(int64_t capture_ntp_ms, int encode_duration_ms) {}
};

uint8_t* m_uBuffer = NULL;
//解码之后的yuv数据
class MyI420FrameCallback : public I420FrameCallback {
public:
	virtual ~MyI420FrameCallback() {}
	// This function is called with a I420 frame allowing the user to modify the
	// frame content.
	virtual void FrameCallback(VideoFrame* video_frame)
	{
		//if (IsFistRecive)
		//{
		//	IsFistRecive = false;
		//	//2.2、创建窗口
		//	RemoteVideowidth = video_frame->width();
		//	RemoteVideoheight = video_frame->height();
		//	RemoteVideowin = SDL_CreateWindowFrom(dlg->GetDlgItem(IDC_STATIC1)->GetSafeHwnd());
		//	if (!RemoteVideowin) {
		//		SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create window by SDL");
		//		return;
		//	}
		//	//2.3、创建渲染器
		//	RemoteVideorenderer = SDL_CreateRenderer(RemoteVideowin, -1, 0);
		//	if (!RemoteVideorenderer) {
		//		SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create Renderer by SDL");
		//		//要释放ffmpeg的相关内存
		//		return;
		//	}
		//	//2.4、创建纹理	 
		//	Remotetexture = SDL_CreateTexture(RemoteVideorenderer,
		//		SDL_PIXELFORMAT_IYUV,
		//		SDL_TEXTUREACCESS_STREAMING,
		//		RemoteVideowidth,
		//		RemoteVideoheight);
		//	RemoteVideorect.x = 0;
		//	RemoteVideorect.y = 0;
		//	RemoteVideorect.w = RemoteVideowidth;
		//	RemoteVideorect.h = RemoteVideoheight;

		//	//m_uBuffer = (uint8_t*)malloc(RemoteVideowidth*RemoteVideoheight * 3 / 4);
		//}
		//采集端sdl视频渲染
		//rtc::scoped_refptr<webrtc::VideoFrameBuffer> vfb = video_frame->video_frame_buffer();

		
		//将返回的数据更新到SDL纹理当中
		//SDL_UpdateYUVTexture(Remotetexture, NULL,
		//	vfb.get()->DataY(), vfb.get()->StrideY(),
		//	vfb.get()->DataU(), vfb.get()->StrideU(),
		//	vfb.get()->DataV(), vfb.get()->StrideV());
		//SDL_UpdateTexture(Remotetexture, NULL, m_uBuffer, RemoteVideowidth*RemoteVideoheight * 3 / 4);
		进行SDL刷新显示
		//SDL_RenderClear(RemoteVideorenderer);
		//SDL_RenderCopy(RemoteVideorenderer, Remotetexture, NULL, &RemoteVideorect);
		//SDL_RenderPresent(RemoteVideorenderer);
		//printf("FrameCallback width:%d height:%d timestamp:%u\n ", video_frame->width(), video_frame->height(), video_frame->timestamp());
	}
};

CameraCaptureCallback *callback = new CameraCaptureCallback();
rtc::scoped_refptr<webrtc::VideoCaptureModule> module = NULL;
webrtc::VideoCodec _videoCodec;

MyTransport myTransport;
MySendUdpTransportData mySendUdpTransportData;
MyRecvUdpTransportData myRecvUdpTransportData;
MyEncodedFrameCallback myEncodeFrameCallback;
MyI420FrameCallback myI420FrameCallback;

void initdecode()
{
	av_register_all();
	avcodec_register_all();
	h264codec = avcodec_find_decoder(AV_CODEC_ID_H264);
	if (!h264codec) {
		fprintf(stderr, "Codec not found\n");
		exit(1);
	}
	parser = av_parser_init(h264codec->id);
	if (!parser) {
		fprintf(stderr, "parser not found\n");
		exit(1);
	}
	c = avcodec_alloc_context3(h264codec);
	if (!c) {
		fprintf(stderr, "Could not allocate video codec context\n");
		exit(1);
	}
	if (avcodec_open2(c, h264codec, NULL) < 0) {
		fprintf(stderr, "Could not open codec\n");
		exit(1);
	}
	pkt = av_packet_alloc();
	frame = av_frame_alloc();
}

void CaptureVideo::testVideoCap()
{
	initdecode();

	//m_pVideoThread = new std::thread(VideoReadThread, this);

	VideoCaptureModule::DeviceInfo* deviceInfo = VideoCaptureFactory::CreateDeviceInfo(0);

	int nNum = deviceInfo->NumberOfDevices();

	char deviceNameUTF8[128] = { 0 };
	char deviceUniqueIdUTF8[128] = { 0 };

	for (int i = 0; i < 1; i++)
	{
		if (deviceInfo->GetDeviceName(i, deviceNameUTF8, 128, deviceUniqueIdUTF8, 128, NULL, 0) == 0)
		{
			printf("camera: id:%d name:%s guid:%s \n", i, deviceNameUTF8, deviceUniqueIdUTF8);
		}
	}

	int32_t width;
	int32_t height;
	int32_t maxFPS;
	int32_t expectedCaptureDelay;
	RawVideoType rawType;
	VideoCodecType codecType;
	VideoCaptureCapability capability;

	int numOfCapabilyty = deviceInfo->NumberOfCapabilities(deviceUniqueIdUTF8);
	for (int i = 0; i < 1; i++)
	{
		deviceInfo->GetCapability(deviceUniqueIdUTF8, i, capability);
		printf("       capabilityId:%d  width:%d height:%d maxFPS:%d expectedCaptureDelay:%d rawType:%d codecType:%d \n",
			i, capability.width, capability.height, capability.maxFPS, capability.expectedCaptureDelay, capability.rawType, capability.codecType);
	}

	delete deviceInfo;

	LocalVideowidth = capability.width;
	LocalVideoheight = capability.height;

	//2、SDL显示相关初始化
	//2.1、初始化init
	if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {
		SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Could not initialize SDL - %s\n", SDL_GetError());
		return;
	}
	//2.2、创建窗口
	LocalVideowin = SDL_CreateWindowFrom(dlg->GetDlgItem(IDC_STATIC2)->GetSafeHwnd());
	if (!LocalVideowin) {
		SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create window by SDL");
		return;
	}
	//2.3、创建渲染器
	LocalVideorenderer = SDL_CreateRenderer(LocalVideowin, -1, 0);
	if (!LocalVideorenderer) {
		SDL_LogError(SDL_LOG_CATEGORY_APPLICATION, "Failed to create Renderer by SDL");
		//要释放ffmpeg的相关内存
		return;
	}
	//2.4、创建纹理	 
	Localtexture = SDL_CreateTexture(LocalVideorenderer,
		SDL_PIXELFORMAT_IYUV,
		SDL_TEXTUREACCESS_STREAMING,
		LocalVideowidth,
		LocalVideoheight);
	LocalVideorect.x = 0;
	LocalVideorect.y = 0;
	LocalVideorect.w = LocalVideowidth;
	LocalVideorect.h = LocalVideoheight;

	module = VideoCaptureFactory::Create(0, deviceUniqueIdUTF8);
	if (module)
	{
		module->RegisterCaptureDataCallback(*callback);
		module->StartCapture(capability);//开始捕获摄像头
	}

	//传输模块
	Call::Config callConfig;
	_call = Call::Create(callConfig);

	//初始化发送端口 sendSocket_transport_  是一对一对的 对于 发送有自己这边的端口及远端接口
	uint8_t socket_threads = 1;
	int return_value;
	UdpTransport*sendSocket_transport_ = UdpTransport::Create(1, socket_threads);
	static const int kNumReceiveSocketBuffers = 500;
	return_value = sendSocket_transport_->InitializeReceiveSockets(&mySendUdpTransportData, 5008);//设置接收RTP端口
	if (return_value == 0) {
		sendSocket_transport_->StartReceiving(kNumReceiveSocketBuffers);
	}
	sendSocket_transport_->InitializeSendSockets("192.168.36.98", 4008);//设置RTP的ip和端口

																		//视频发送流下信息配置
	VideoSendStream::Config streamConfig(sendSocket_transport_);
	streamConfig.encoder_settings.payload_name = "H264";//H264  VP8
	streamConfig.encoder_settings.payload_type = 121;
	streamConfig.encoder_settings.encoder = webrtc::VideoEncoder::Create(VideoEncoder::kH264);//kH264  kVp8
	streamConfig.rtp.ssrcs.push_back(1);
	//streamConfig.rtp.ssrcs.push_back(2);
	//streamConfig.rtp.ssrcs.push_back(3);

	//视频编码配置参数
	webrtc::VideoEncoderConfig encodeConfig;
	VCMCodecDataBase::Codec(webrtc::kVideoCodecH264, &_videoCodec);//kVideoCodecVP8 kVideoCodecH264
																   //encodeConfig.encoder_specific_settings =  new rtc::RefCountedObject<webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings >
	encodeConfig.content_type = VideoEncoderConfig::ContentType::kRealtimeVideo;

	VideoStream videoStream;

	///*videoStream.width = 320;
	//videoStream.height = 180;
	//videoStream.max_framerate = 30;
	//videoStream.min_bitrate_bps = 50000;
	//videoStream.target_bitrate_bps = videoStream.max_bitrate_bps = 150000;
	//videoStream.max_qp = 56;
	//encodeConfig.streams.push_back(videoStream);

	//videoStream.width = 640;
	//videoStream.height = 360;
	//videoStream.max_framerate = 30;
	//videoStream.min_bitrate_bps = 200000;
	//videoStream.target_bitrate_bps = videoStream.max_bitrate_bps = 450000;
	//videoStream.max_qp = 56;
	//encodeConfig.streams.push_back(videoStream);*/

	videoStream.width = 1280;
	videoStream.height = 720;
	videoStream.max_framerate = 30;
	videoStream.min_bitrate_bps = 700000;
	videoStream.target_bitrate_bps = videoStream.max_bitrate_bps = 1500000;
	videoStream.max_qp = 56;
	encodeConfig.streams.push_back(videoStream);

	根据传输模块创建视频发送流并启动
	videoSendStream = _call->CreateVideoSendStream(streamConfig, encodeConfig);
	input = videoSendStream->Input();
	videoSendStream->Start();

	//接收处理
	UdpTransport*receiveSocket_transport_ = UdpTransport::Create(1, socket_threads);
	return_value = receiveSocket_transport_->InitializeReceiveSockets(&myRecvUdpTransportData, 4008);
	if (return_value == 0) {
		receiveSocket_transport_->StartReceiving(kNumReceiveSocketBuffers);
	}
	receiveSocket_transport_->InitializeSendSockets("192.168.36.98", 5008);

	//接收端参数配置
	webrtc::VideoReceiveStream::Config receiveStreamConfig(receiveSocket_transport_);
	receiveStreamConfig.rtp.remote_ssrc = 1;// 要接收的同步源(流标识符)。
	receiveStreamConfig.rtp.local_ssrc = 2;// 发送方SSRC用于发送RTCP(如接收方报告)。

										   //接收视频的解码
	webrtc::VideoReceiveStream::Decoder decoder;
	decoder.payload_name = "H264";//H264 VP8
	decoder.payload_type = 121;//自定义的typeID
	decoder.decoder = VideoDecoder::Create(webrtc::VideoDecoder::DecoderType::kH264);//kH264  kVp8
	receiveStreamConfig.decoders.push_back(decoder);
	receiveStreamConfig.rtp.rtcp_xr.receiver_reference_time_report = true;
	receiveStreamConfig.rtp.nack.rtp_history_ms = 1000;
	receiveStreamConfig.pre_decode_callback = &myEncodeFrameCallback;
	//receiveStreamConfig.pre_render_callback = &myI420FrameCallback;//为每一帧解码调用的回调函数

	//根据传输模块创建视频接收流并开始
	videoReceiveStream = _call->CreateVideoReceiveStream(std::move(receiveStreamConfig));
	videoReceiveStream->Start();
}
void CaptureVideo::ReciveVideoCap()
{

}
CaptureVideo::CaptureVideo(LPVOID lpParam)
{
	dlg = (CMFCShowVideoDlg *)lpParam;
}


CaptureVideo::~CaptureVideo()
{
}
Logo

腾讯云面向开发者汇聚海量精品云计算使用和开发经验,营造开放的云计算技术生态圈。

更多推荐