本文展示如何使用 WebRTC、GStreamer 和 C++ 将摄像头流式传输到 HTML 页面。我们将使用 boost 来处理信令。本教程结束时,您应该对 WebRTC GStreamer 有一个简单的了解。
要求
- GStreamer 及其开发库
- Boost 库
- 用于构建项目的 CMake
- C++ 编译器
- 基本 C++ 知识
创建项目
首先,创建一个新目录存放项目文件:
mkdir webrtc-stream && cd webrtc-stream
创建一个名为 “CMakeLists.txt “的新文件来构建已完成的项目,在其中填入以下内容:
cmake_minimum_required(VERSION 3.10)
# Set the project name and version
project(webrtc_server VERSION 1.0)
# Specify the C++ standard
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_CXX_STANDARD_REQUIRED True)
# Find required packages
find_package(PkgConfig REQUIRED)
pkg_check_modules(GST REQUIRED gstreamer-1.0 gstreamer-webrtc-1.0 gstreamer-sdp-1.0)
find_package(Boost 1.65 REQUIRED COMPONENTS system filesystem json)
# Include directories
include_directories(${GST_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS})
# Add the executable
add_executable(webrtc_server main.cpp)
# Link libraries
target_link_libraries(webrtc_server ${GST_LIBRARIES} Boost::system Boost::filesystem Boost::json)
# Set properties
set_target_properties(webrtc_server PROPERTIES
CXX_STANDARD 14
CXX_STANDARD_REQUIRED ON
)
# Specify additional directories for the linker
link_directories(${GST_LIBRARY_DIRS})
# Print project info
message(STATUS "Project: ${PROJECT_NAME}")
message(STATUS "Version: ${PROJECT_VERSION}")
message(STATUS "C++ Standard: ${CMAKE_CXX_STANDARD}")
message(STATUS "Boost Libraries: ${Boost_LIBRARIES}")
message(STATUS "GStreamer Libraries: ${GST_LIBRARIES}")
上述步骤将所有必要的库链接在一起,以便将代码构建成可执行文件。
项目编码
开始为项目编写源代码,创建一个名为 “main.cpp “的新文件,首先导入 GStreamer、WebRTC、Boost 和标准库所需的头文件:
#define GST_USE_UNSTABLE_API
#include <gst/gst.h>
#include <gst/webrtc/webrtc.h>
#include <boost/beast.hpp>
#include <boost/asio.hpp>
#include <boost/json.hpp>
#include <iostream>
#include <thread>
namespace beast = boost::beast;
namespace http = beast::http;
namespace websocket = beast::websocket;
namespace net = boost::asio;
using tcp = net::ip::tcp;
using namespace boost::json;
接下来,定义以后会用到的常量,主要是 STUN 服务器和服务器监听的端口:
#define STUN_SERVER "stun://stun.l.google.com:19302"
#define SERVER_PORT 8000
现在,我们将为 GStreamer loop 和 pipeline声明全局变量:
GMainLoop *loop;
GstElement *pipeline, *webrtcbin;
接下来创建处理每个事件的函数。第一个是将 ICE 候选发送到 WebSocket 客户端的函数:
void send_ice_candidate_message(websocket::stream<tcp::socket>& ws, guint mlineindex, gchar *candidate)
{
std::cout << "Sending ICE candidate: mlineindex=" << mlineindex << ", candidate=" << candidate << std::endl;
object ice_json;
ice_json["candidate"] = candidate;
ice_json["sdpMLineIndex"] = mlineindex;
object msg_json;
msg_json["type"] = "candidate";
msg_json["ice"] = ice_json;
std::string text = serialize(msg_json);
ws.write(net::buffer(text));
std::cout << "ICE candidate sent" << std::endl;
}
下一个 “on_answer_created “函数处理 WebRTC answer 的创建,并将其发送回客户端:
void on_answer_created(GstPromise *promise, gpointer user_data)
{
std::cout << "Answer created" << std::endl;
websocket::stream<tcp::socket>* ws = static_cast<websocket::stream<tcp::socket>*>(user_data);
GstWebRTCSessionDescription *answer = NULL;
const GstStructure *reply = gst_promise_get_reply(promise);
gst_structure_get(reply, "answer", GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &answer, NULL);
GstPromise *local_promise = gst_promise_new();
g_signal_emit_by_name(webrtcbin, "set-local-description", answer, local_promise);
object sdp_json;
sdp_json["type"] = "answer";
sdp_json["sdp"] = gst_sdp_message_as_text(answer->sdp);
std::string text = serialize(sdp_json);
ws->write(net::buffer(text));
std::cout << "Local description set and answer sent: " << text << std::endl;
gst_webrtc_session_description_free(answer);
}
下一个函数只是处理协商事件的占位符,此示例中不需要此事件:
void on_negotiation_needed(GstElement *webrtc, gpointer user_data)
{
std::cout << "Negotiation needed" << std::endl;
}
“on_set_remote_description”函数设置远程描述并创建answer:
void on_set_remote_description(GstPromise *promise, gpointer user_data)
{
std::cout << "Remote description set, creating answer" << std::endl;
websocket::stream<tcp::socket>* ws = static_cast<websocket::stream<tcp::socket>*>(user_data);
GstPromise *answer_promise = gst_promise_new_with_change_func(on_answer_created, ws, NULL);
g_signal_emit_by_name(webrtcbin, "create-answer", NULL, answer_promise);
}
“on_ice_candidate”函数处理ICE候选事件并将其发送到WebSocket客户端:
void on_ice_candidate(GstElement *webrtc, guint mlineindex, gchar *candidate, gpointer user_data)
{
std::cout << "ICE candidate generated: mlineindex=" << mlineindex << ", candidate=" << candidate << std::endl;
websocket::stream<tcp::socket>* ws = static_cast<websocket::stream<tcp::socket>*>(user_data);
send_ice_candidate_message(*ws, mlineindex, candidate);
}
“handle_websocket_session”函数管理 WebSocket 连接,设置 GStreamer 管道并处理 SDP 和 ICE 消息:
void handle_websocket_session(tcp::socket socket)
{
try
{
websocket::stream<tcp::socket> ws{std::move(socket)};
ws.accept();
std::cout << "WebSocket connection accepted" << std::endl;
GstStateChangeReturn ret;
GError *error = NULL;
pipeline = gst_pipeline_new("pipeline");
GstElement *v4l2src = gst_element_factory_make("v4l2src", "source");
GstElement *videoconvert = gst_element_factory_make("videoconvert", "convert");
GstElement *queue = gst_element_factory_make("queue", "queue");
GstElement *vp8enc = gst_element_factory_make("vp8enc", "encoder");
GstElement *rtpvp8pay = gst_element_factory_make("rtpvp8pay", "pay");
webrtcbin = gst_element_factory_make("webrtcbin", "sendrecv");
if (!pipeline || !v4l2src || !videoconvert || !queue || !vp8enc || !rtpvp8pay || !webrtcbin)
{
g_printerr("Not all elements could be created.\n");
return;
}
g_object_set(v4l2src, "device", "/dev/video0", NULL);
g_object_set(vp8enc, "deadline", 1, NULL);
gst_bin_add_many(GST_BIN(pipeline), v4l2src, videoconvert, queue, vp8enc, rtpvp8pay, webrtcbin, NULL);
if (!gst_element_link_many(v4l2src, videoconvert, queue, vp8enc, rtpvp8pay, NULL))
{
g_printerr("Elements could not be linked.\n");
gst_object_unref(pipeline);
return;
}
GstPad *rtp_src_pad = gst_element_get_static_pad(rtpvp8pay, "src");
GstPad *webrtc_sink_pad = gst_element_get_request_pad(webrtcbin, "sink_%u");
gst_pad_link(rtp_src_pad, webrtc_sink_pad);
gst_object_unref(rtp_src_pad);
gst_object_unref(webrtc_sink_pad);
g_signal_connect(webrtcbin, "on-negotiation-needed", G_CALLBACK(on_negotiation_needed), &ws);
g_signal_connect(webrtcbin, "on-ice-candidate", G_CALLBACK(on_ice_candidate), &ws);
ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(pipeline);
return;
}
std::cout << "GStreamer pipeline set to playing" << std::endl;
for (;;)
{
beast::flat_buffer buffer;
ws.read(buffer);
auto text = beast::buffers_to_string(buffer.data());
value jv = parse(text);
object obj = jv.as_object();
std::string type = obj["type"].as_string().c_str();
if (type == "offer")
{
std::cout << "Received offer: " << text << std::endl;
std::string sdp = obj["sdp"].as_string().c_str();
GstSDPMessage *sdp_message;
gst_sdp_message_new_from_text(sdp.c_str(), &sdp_message);
GstWebRTCSessionDescription *offer = gst_webrtc_session_description_new(GST_WEBRTC_SDP_TYPE_OFFER, sdp_message);
GstPromise *promise = gst_promise_new_with_change_func(on_set_remote_description, &ws, NULL);
g_signal_emit_by_name(webrtcbin, "set-remote-description", offer, promise);
gst_webrtc_session_description_free(offer);
std::cout << "Setting remote description" << std::endl;
}
else if (type == "candidate")
{
std::cout << "Received ICE candidate: " << text << std::endl;
object ice = obj["ice"].as_object();
std::string candidate = ice["candidate"].as_string().c_str();
guint sdpMLineIndex = ice["sdpMLineIndex"].as_int64();
g_signal_emit_by_name(webrtcbin, "add-ice-candidate", sdpMLineIndex, candidate.c_str());
std::cout << "Added ICE candidate" << std::endl;
}
}
}
catch (beast::system_error const& se)
{
if (se.code() != websocket::error::closed)
{
std::cerr << "Error: " << se.code().message() << std::endl;
}
}
catch (std::exception const& e)
{
std::cerr << "Exception: " << e.what() << std::endl;
}
}
下一个“start_server”函数初始化服务器,接受 TCP 连接并产生新线程来处理每个连接:
void start_server()
{
try
{
net::io_context ioc{1};
tcp::acceptor acceptor{ioc, tcp::endpoint{tcp::v4(), SERVER_PORT}};
for (;;)
{
tcp::socket socket{ioc};
acceptor.accept(socket);
std::cout << "Accepted new TCP connection" << std::endl;
std::thread{handle_websocket_session, std::move(socket)}.detach();
}
}
catch (std::exception const& e)
{
std::cerr << "Exception: " << e.what() << std::endl;
}
}
最后我们只需要创建最终的主函数来初始化 GStreamer,启动服务器并运行主循环:
int main(int argc, char *argv[])
{
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
std::cout << "Starting WebRTC server" << std::endl;
std::thread server_thread(start_server);
g_main_loop_run(loop);
server_thread.join();
gst_element_set_state(pipeline, GST_STATE_NULL);
gst_object_unref(pipeline);
g_main_loop_unref(loop);
std::cout << "WebRTC server stopped" << std::endl;
return 0;
}
构建项目
要将上述源代码构建为可执行文件,首先创建一个名为 build 的新目录:
mkdir build && cd build
构建项目:
cmake ..
make
如果一切顺利,该项目应该可以成功构建,并且有一个可执行文件。
接下来需要创建一个页面来查看流。
创建前端
创建一个名为“public”的新目录,并在其中创建一个名为“index.html”的新 html 文件,并使用以下代码填充它:
<!DOCTYPE html>
<html>
<head>
<title>WebRTC Stream</title>
</head>
<body>
<video id="video" autoplay playsinline muted></video>
<script>
const video = document.getElementById('video');
const signaling = new WebSocket('ws://localhost:8000/ws');
let pc = new RTCPeerConnection({
iceServers: [{urls: 'stun:stun.l.google.com:19302'}]
});
signaling.onmessage = async (event) => {
const data = JSON.parse(event.data);
console.log('Received signaling message:', data);
if (data.type === 'answer') {
console.log('Setting remote description with answer');
await pc.setRemoteDescription(new RTCSessionDescription(data));
} else if (data.type === 'candidate') {
console.log('Adding ICE candidate:', data.ice);
await pc.addIceCandidate(new RTCIceCandidate(data.ice));
}
};
pc.onicecandidate = (event) => {
if (event.candidate) {
console.log('Sending ICE candidate:', event.candidate);
signaling.send(JSON.stringify({
type: 'candidate',
ice: event.candidate
}));
}
};
pc.ontrack = (event) => {
console.log('Received track:', event);
if (event.track.kind === 'video') {
console.log('Attaching video track to video element');
video.srcObject = event.streams[0];
video.play().catch(error => {
console.error('Error playing video:', error);
});
video.load();
}
};
pc.oniceconnectionstatechange = () => {
console.log('ICE connection state:', pc.iceConnectionState);
};
pc.onicegatheringstatechange = () => {
console.log('ICE gathering state:', pc.iceGatheringState);
};
pc.onsignalingstatechange = () => {
console.log('Signaling state:', pc.signalingState);
};
async function start() {
pc.addTransceiver('video', {direction: 'recvonly'});
const offer = await pc.createOffer();
console.log('Created offer:', offer);
await pc.setLocalDescription(offer);
console.log('Set local description with offer');
signaling.send(JSON.stringify({type: 'offer', sdp: pc.localDescription.sdp}));
}
start();
</script>
</body>
</html>
以上代码是与信令服务器进行通信,并在接收到远程流时在视频 HTML 元素中播放视频。
现在可以实际运行该项目了!
运行项目
执行以下命令运行项目:
./webrtc_server
使用一个 python 模块运行 html 页面:
python3 -m http.server 9999
在浏览器输入 http://localhost:9999,在加载时,可以看到摄像机在视频元素中显示,就像这样:
以上教程中,向大家展示了如何使用本地 C++、GStreamer 对摄像头进行流式处理,并在 HTML 页面中查看流。
源代码:https://github.com/ethand91/webrtc-gstreamer
作者:Ethan
本文来自作者投稿,版权归原作者所有。如需转载,请注明出处:https://www.nxrte.com/jishu/webrtc/49914.html