基于 WebRTC 的远程控制

 一直想做一个远程控制软件,最近小群也看到在聊这个。于是花了四五天搞了一个demo。原理如下如:

基于 WebRTC 的远程控制

把远程桌面映射到本地桌面,在本地可以直接操作远程电脑。PC1为本地电脑,PC2为远程电脑。

WebRTC 远程控制实现原理

1 桌面采集

桌面采集调用dx11的api实时捕获远程桌面,编码H265发送。


#include "d3d11_capture.h"
#include "video_input.h"
#include <iostream>
#include <stdio.h>
#include <assert.h>
#include <mmreg.h>
#include <windows.h>
#include <vector>
#include <d3d11.h>
#include <wrl.h>
#include <d3dcompiler.h>
#include <fstream> 
#include "av_register.h"
#include <iostream>
#include <chrono>
#include <tchar.h>

#pragma comment(lib, "d3d11.lib")
#pragma comment(lib, "dxgi.lib")
#pragma comment(lib, "d3dcompiler.lib")

static bool create_texture(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    D3D11_TEXTURE2D_DESC desc = { 0 };
    desc.Width = dc->out_desc.ModeDesc.Width;
    desc.Height = dc->out_desc.ModeDesc.Height;
    desc.MipLevels = 1;
    desc.ArraySize = 1;
    desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
    //desc.Format = DXGI_FORMAT_NV12;
    desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
    desc.SampleDesc.Count = 1;
    desc.Usage = D3D11_USAGE_DEFAULT;
    desc.CPUAccessFlags = 0;
    desc.MiscFlags = D3D11_RESOURCE_MISC_SHARED;

    HRESULT hr = dc->device->CreateTexture2D(&desc, nullptr, dc->shared_texture.GetAddressOf());
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to create texture.\n");
        return false;
    }
   
    desc.BindFlags = 0;
    desc.Usage = D3D11_USAGE_STAGING;
    desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
    desc.MiscFlags = 0;

    hr = dc->device->CreateTexture2D(&desc, nullptr, dc->rgba_texture.GetAddressOf());
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to create texture.\n");
        return false;
    }

    desc.Usage = D3D11_USAGE_DEFAULT;
    desc.CPUAccessFlags = 0;
    desc.BindFlags = D3D11_BIND_RENDER_TARGET;
    desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE;

    hr = dc->device->CreateTexture2D(&desc, nullptr, dc->gdi_texture.GetAddressOf());
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to create texture.\n");
        return false;
    }

    Microsoft::WRL::ComPtr<IDXGIResource> dxgi_resource;
    hr = dc->shared_texture->QueryInterface(__uuidof(IDXGIResource), reinterpret_cast<void**>(dxgi_resource.GetAddressOf()));
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to query IDXGIResource interface from texture.\n");
        return false;
    }

    hr = dxgi_resource->GetSharedHandle(&dc->shared_handle_);
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to get shared handle.\n");
        return false;
    }

    return true;
}
static bool initD3D11(void* ai) {
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    HRESULT hr = S_OK;
    D3D_FEATURE_LEVEL feature_level;
    int index = 0;
    Microsoft::WRL::ComPtr<IDXGIFactory> dxgi_factory;
    Microsoft::WRL::ComPtr<IDXGIAdapter> dxgi_adapter;
    Microsoft::WRL::ComPtr<IDXGIOutput>  dxgi_output;
    Microsoft::WRL::ComPtr<IDXGIOutput1> dxgi_output1;

    hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, 0, nullptr, 0, D3D11_SDK_VERSION,
        dc->device.GetAddressOf(), &feature_level, dc->context.GetAddressOf());
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to create d3d11 device.\n");
        return false;
    }

    hr = CreateDXGIFactory1(__uuidof(IDXGIFactory), (void**)dxgi_factory.GetAddressOf());
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to create dxgi factory.\n");
        return false;
    }

    do
    {
        if (dxgi_factory->EnumAdapters(index, dxgi_adapter.GetAddressOf()) != DXGI_ERROR_NOT_FOUND) {
            if (dxgi_adapter->EnumOutputs(dc->display_index, dxgi_output.GetAddressOf()) != DXGI_ERROR_NOT_FOUND) {
                if (dxgi_output.Get() != nullptr) {
                    break;
                }
            }
        }
    } while (0);

    if (dxgi_adapter.Get() == nullptr) {
        printf("[D3D11ScreenCapture] DXGI adapter not found.\n");
        return false;
    }

    if (dxgi_output.Get() == nullptr) {
        printf("[D3D11ScreenCapture] DXGI output not found.\n");
        return false;
    }


    hr = dxgi_output.Get()->QueryInterface(__uuidof(IDXGIOutput1), reinterpret_cast<void**>(dxgi_output1.GetAddressOf()));
    if (FAILED(hr)) {
        printf("[D3D11ScreenCapture] Failed to query interface dxgiOutput1.\n");
        return false;
    }

    hr = dxgi_output1->DuplicateOutput(dc->device.Get(), dc->output_duplication.GetAddressOf());
    if (FAILED(hr)) {
   
        printf("[D3D11ScreenCapture] Failed to get duplicate output.\n");
        return false;
    }
    dc->output_duplication->GetDesc(&dc->out_desc);

    if (!create_texture(dc)) {
        return false;
    }

    dc->width = dc->out_desc.ModeDesc.Width;
    dc->height = dc->out_desc.ModeDesc.Height;

    return true;
   
}
static int d3d11_capture_close(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    free(dc);
    return 0;
}

static void* d3d11_capture_open(void* window, int format, int width, int height, video_input_callback cb, void* param)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)malloc(sizeof(*dc));
    if (dc == NULL)
        return NULL;

    memset(dc, 0, sizeof(*dc)); 
 
    dc->cb = cb;
    dc->param = param;
  
    if (!initD3D11(dc)) {
        goto failed;
    }
    return dc;
failed:
    d3d11_capture_close(dc);
    return NULL
        ;
    
}


static void GetAdapters(std::vector<Microsoft::WRL::ComPtr<IDXGIAdapter1>>& a)
{
    Microsoft::WRL::ComPtr<IDXGIFactory1> df;
    CreateDXGIFactory1(__uuidof(IDXGIFactory1), (void**)&df);
    a.clear();
    if (!df)
        return;
    int L = 0;
    for (;;)
    {
        Microsoft::WRL::ComPtr<IDXGIAdapter1> lDxgiAdapter;
        df->EnumAdapters1(L, &lDxgiAdapter);
        if (!lDxgiAdapter)
            break;
        L++;
        a.push_back(lDxgiAdapter);
    }
    return;
}


void cleanupD3D11(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    dc->rgba_texture.Reset();
    dc->gdi_texture.Reset();
    dc->shared_texture.Reset();
    dc->output_duplication.Reset();
    dc->device.Reset();
    dc->context.Reset();
    dc->shared_handle_ = nullptr;
    memset(&dc->out_desc, 0, sizeof(dc->out_desc));
}


static void capture_frame(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    D3D11_MAPPED_SUBRESOURCE dsec = { 0 };

    HRESULT hr = dc->context->Map(dc->rgba_texture.Get(), 0, D3D11_MAP_READ, 0, &dsec);
    if (!FAILED(hr)) {
        if (dsec.pData != NULL) {
            int imagewidth = (int)dc->out_desc.ModeDesc.Width;
            int imageheight = (int)dc->out_desc.ModeDesc.Height;
            dc->imagesize = imagewidth * imageheight * 4;

            auto now = std::chrono::system_clock::now();
            auto duration = now.time_since_epoch();
            // 将时间戳转换为毫秒
            long millis = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();

            dc->cb(dc->param, dsec.pData, dc->imagesize, imagewidth, imageheight, millis);
        }
        dc->context->Unmap(dc->rgba_texture.Get(), 0);
    }


    dc->context->CopyResource(dc->shared_texture.Get(), dc->gdi_texture.Get());
}
static bool gdi_capture_rgb32(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    HDC hdcScreen = GetDC(HWND_DESKTOP);

   /* HWND hwnd = GetDesktopWindow();
    HDC hdcScreen = GetDC(hwnd);*/

    int width = GetDeviceCaps(hdcScreen, HORZRES);
    int height = GetDeviceCaps(hdcScreen, VERTRES);

    int rgb32Size = width * height * 4;

    std::vector<unsigned char> rgbBuffer(rgb32Size);

    HDC hdcMemory = CreateCompatibleDC(hdcScreen);
    HBITMAP bitmap = CreateCompatibleBitmap(hdcScreen, width, height);

    SelectObject(hdcMemory, bitmap);
    BitBlt(hdcMemory, 0, 0, width, height, hdcScreen, 0, 0, SRCCOPY);

    GetBitmapBits(bitmap, rgb32Size, &rgbBuffer[0]);

    ReleaseDC(HWND_DESKTOP, hdcMemory);
    DeleteDC(hdcMemory);
    ReleaseDC(HWND_DESKTOP, hdcScreen);
    DeleteObject(bitmap);

    auto now = std::chrono::system_clock::now();
    auto duration = now.time_since_epoch();
    // 将时间戳转换为毫秒
    long millis = std::chrono::duration_cast<std::chrono::milliseconds>(duration).count();

    dc->cb(dc->param, &rgbBuffer[0], rgb32Size, width, height, millis);
    return true;
}

static int direct_capture_acquire_frame(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    Microsoft::WRL::ComPtr<IDXGIResource> dxgi_resource;
    DXGI_OUTDUPL_FRAME_INFO frame_info;
    memset(&frame_info, 0, sizeof(DXGI_OUTDUPL_FRAME_INFO));
    HRESULT hr = DXGI_ERROR_INVALID_CALL;
    if (dc->output_duplication)
    {
        dc->output_duplication->ReleaseFrame();
        hr = dc->output_duplication->AcquireNextFrame(10, &frame_info, dxgi_resource.GetAddressOf());
    }

    if (FAILED(hr)) {
        if (hr == DXGI_ERROR_WAIT_TIMEOUT) {
            return -1;
        }else if (hr == DXGI_ERROR_INVALID_CALL
            || hr == DXGI_ERROR_ACCESS_LOST) {
        
            cleanupD3D11(dc);
            initD3D11(dc);
            gdi_capture_rgb32(dc);
            return -2;
        }
        return -3;
    }
    if (frame_info.AccumulatedFrames == 0 ||
        frame_info.LastPresentTime.QuadPart == 0) {
        // No image update, only cursor moved.
    }

    if (!dxgi_resource.Get()) {
        return -1;
    }

    Microsoft::WRL::ComPtr<ID3D11Texture2D> output_texture;
    hr = dxgi_resource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast<void**>(output_texture.GetAddressOf()));
    if (FAILED(hr)) {
        return -1;
    }
    dc->context->CopyResource(dc->gdi_texture.Get(), output_texture.Get());
    Microsoft::WRL::ComPtr<IDXGISurface1> surface1;
    hr = dc->gdi_texture->QueryInterface(__uuidof(IDXGISurface1), reinterpret_cast<void**>(surface1.GetAddressOf()));
    if (FAILED(hr)) {
        return -1;
    }

    CURSORINFO cursor_info = { 0 };
    cursor_info.cbSize = sizeof(CURSORINFO);
    if (GetCursorInfo(&cursor_info) == TRUE) {
        if (cursor_info.flags == CURSOR_SHOWING) {
            auto cursor_position = cursor_info.ptScreenPos;
            auto cursor_size = cursor_info.cbSize;
            HDC  hdc;
            surface1->GetDC(FALSE, &hdc);
            DrawIconEx(hdc, cursor_position.x - dc->monitor.left, cursor_position.y - dc->monitor.top,
                cursor_info.hCursor, 0, 0, 0, 0, DI_NORMAL | DI_DEFAULTSIZE);
            //
             SYSTEMTIME sysTime;
            GetLocalTime(&sysTime);
            // 将时间转换为字符串
            TCHAR timeString[9]; // 用于存储时间字符串的缓冲区
            swprintf(timeString, 9, _T("%02d:%02d:%02d"), sysTime.wHour, sysTime.wMinute, sysTime.wSecond);
            // 在图像上绘制时间
            HFONT hFont, hOldFont;
            LOGFONT lf;
            surface1->GetDC(FALSE, &hdc);
            // 获取当前字体
             hFont = (HFONT)GetStockObject(DEFAULT_GUI_FONT); // 可以根据需求选择合适的字体
            GetObject(hFont, sizeof(LOGFONT), &lf);
            // 改变字体大小
            lf.lfHeight = -24;  // 例如将字体大小改变为24
            HFONT hNewFont = CreateFontIndirect(&lf);

            // 选择新字体
            hOldFont = (HFONT)SelectObject(hdc, hNewFont);

            TextOut(hdc, 10, 10, timeString, wcslen(timeString));
            // 恢复原始字体
            SelectObject(hdc, hOldFont);
            // 释放新字体
            DeleteObject(hNewFont);
            //
            surface1->ReleaseDC(nullptr);
        }
    }

    dc->context->CopyResource(dc->rgba_texture.Get(), dc->gdi_texture.Get());
    capture_frame(dc);
    return 0;
}
static int d3d11_capture_start(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;
    //dc->thread = CreateThread(NULL, 0, direct_capture_worker, dc, 0, NULL);
    dc->started = true;
    dc->thread.reset(new std::thread([dc] {
        while (dc->started) {
            std::this_thread::sleep_for(std::chrono::milliseconds(10));
            direct_capture_acquire_frame(dc);
        }
        }));
    return 0;
}

static int d3d11_capture_stop(void* ai)
{
    struct d3d11_capture_t* dc;
    dc = (struct d3d11_capture_t*)ai;

    dc->started = false;
    if (dc->thread) {
        dc->thread->join();
        dc->thread.reset();
    }
    cleanupD3D11(dc);
    return 0;
}
extern "C" int d3d11_capture_register()
{
    HMODULE hD3d11 = LoadLibraryEx(L"d3d11.dll", NULL, 0);
    static video_input_t ai;
    memset(&ai, 0, sizeof(ai));
    ai.open = d3d11_capture_open;
    ai.close = d3d11_capture_close;
    ai.start = d3d11_capture_start;
    ai.stop = d3d11_capture_stop;
    return av_set_class(AV_VIDEO_CAPTURE, "directcapture", &ai);
    return 0;
}

2 鼠标键盘事件

本地电脑是在浏览器上操作,这里设计之前有两种方案

  •  鼠标键盘封装成rtcp 鼠标键盘封装成私有字符串第一种由于原生的webrtc并没有提供发送数据接口,只能采取datachannel 发送数据,所以采用第二种方式发送

class KeyMouseCtrl {
    constructor(master ,dom) {
        this.keyboardenable = true;
        this.mouseenable = true;
        this.bmousedown = 0;
        this.videoDom = dom;
        this.master_ = master;
        this.addKeyboardListeners();
        this.addGlobalMouseListeners();
        this.addVideoListeners();
    }

    addKeyboardListeners() {
        if (this.keyboardenable) {
            document.addEventListener("keydown", this.logKey.bind(this));
        }
    }

    addGlobalMouseListeners() {
        document.addEventListener("mousewheel", this.handleMouseWheelGlobal.bind(this));
        document.addEventListener("contextmenu", event => event.preventDefault());
        document.addEventListener("DOMMouseScroll", this.handleMouseWheelGlobal.bind(this));
    }

    addVideoListeners() {
        if (this.mouseenable) {
            const myPics = this.videoDom;
            myPics.addEventListener("mousemove", e => this.handleMouseMove(e));
            myPics.addEventListener("mouseup", e => this.handleMouseUp(e));
            myPics.addEventListener("mousedown", e => this.handleMouseDown(e));
            myPics.addEventListener("wheel", e => this.handleMouseWheel(e));
            myPics.addEventListener("click", e => e.preventDefault());
        }
    }

    logKey(e) {
        console.log("keyboard event", e.key, e.keyCode);
     
        const data = {
            type: "keyboard",
            data: {
                key: e.key,
                code: e.keyCode,
                shiftKey: e.shiftKey ? 1 : 0,
                ctrlKey: e.ctrlKey ? 1 : 0,
                altKey: e.altKey ? 1 : 0,
                metaKey: e.metaKey ? 1 : 0,
                isDown: this.bmousedown
            }
        };
        this.send(JSON.stringify(data));

        this.preventEventDefault(e);
    }

    handleMouseMove(e) {
        const boundRect = this.videoDom.getBoundingClientRect();
        console.log("mouse move", this.bmousedown, e.offsetX, e.offsetY);
        this.sendMouseEvent("MOUSEMOVE", e, boundRect);
    }

    handleMouseUp(e) {
        const boundRect = this.videoDom.getBoundingClientRect();
        this.bmousedown = 0;
        console.log("mouseup", this.bmousedown, e.offsetX, e.offsetY);
        this.sendMouseEvent("MOUSEUP", e, boundRect);
    }
    
    handleMouseDown(e) {
        const boundRect = this.videoDom.getBoundingClientRect();
        console.log("mousedown", e.offsetX, e.offsetY);
        this.bmousedown = 1;
        this.sendMouseEvent("MOUSEDOWN", e, boundRect);
    }

    handleMouseWheelGlobal(e) {
        console.log("mouse wheel global", e.wheelDelta || -e.detail * 40);
        this.preventEventDefault(e);
    }

    handleMouseWheel(e) {
        console.log("wheel", e.wheelDelta || -e.detail * 40);
        console.log(e.offsetX, e.offsetY);
        const boundRect = this.videoDom.getBoundingClientRect();
        this.sendMouseWheelEvent(e, boundRect);
        this.preventEventDefault(e);
    }
    sendMouseEvent(type, e, boundRect) {
        // Your code to send the mouse event

        const data = {
            type,
            data: {
                isLeft: e.button === 0 ? 1 : 0,
                isMiddle: e.button === 1 ? 1 : 0,
                isRight: e.button === 2 ? 1 : 0,
                isDown: this.bmousedown,
                x: e.offsetX,
                y: e.offsetY,
                width: Math.round(boundRect.width),
                height: Math.round(boundRect.height)
            }
        };
        this.send(JSON.stringify(data));
    }

    sendMouseWheelEvent(e, boundRect) {
        const data = {
            type: "MOUSEWHEEL",
            data: {
                wheelDelta: e.wheelDelta || -e.detail * 40, // Replace this with appropriate wheel data based on your needs
                x: e.offsetX,
                y: e.offsetY,
                width: Math.round(boundRect.width),
                height: Math.round(boundRect.height)
            }
        };
        this.send(JSON.stringify(data));
    }

    preventEventDefault(event) {
        event.preventDefault();
        if (event.stopPropagation) event.stopPropagation();
        event.cancelBubble = true;
        event.returnValue = false;
        return false;
    }

    send(data) {
        // Your code to send the data
        this.master_.events.emit("KeyMouseCtrl", data)
    }
}
export default KeyMouseCtrl;

3 接收端处理键盘事件


InputEvent event;
std::string type = root["type"];
event.type = (char*)type.c_str();  
if (strcasecmp(event.type, "keyboard") == 0) {
  auto data = root["data"];
  if (!root["data"]["key"].is_null() && root["data"]["key"].is_string()) {

    std::string key = root["data"]["key"];
    strncpy(&event.u.keyboardEvent.key , key.c_str(), key.length());
  }
  if (!root["data"]["code"].is_null() && root["data"]["code"].is_number()) {
    event.u.keyboardEvent.code = root["data"]["code"];
  }
  if (!root["data"]["shiftKey"].is_null() && root["data"]["shiftKey"].is_number()) {
    event.u.keyboardEvent.shiftKey = root["data"]["shiftKey"];
  }
  if (!root["data"]["ctrlKey"].is_null() && root["data"]["ctrlKey"].is_number()) {
    event.u.keyboardEvent.ctrlKey = root["data"]["ctrlKey"];
  }
  if (!root["data"]["altKey"].is_null() && root["data"]["altKey"].is_number()) {
    event.u.keyboardEvent.altKey = root["data"]["altKey"];
  }
  if (!root["data"]["metaKey"].is_null() && root["data"]["metaKey"].is_number()) {
    event.u.keyboardEvent.metaKey = root["data"]["metaKey"];
  }
  if (!root["data"]["isDown"].is_null() && root["data"]["isDown"].is_number()) {
    event.u.keyboardEvent.isDown = root["data"]["isDown"];
  }
}
else
{

  
  if (!root["data"]["isLeft"].is_null() && root["data"]["isLeft"].is_number()) {
    event.u.mouseEvent.isLeft = root["data"]["isLeft"];
  }

  // 类似地,对其他字段进行检查
  if (!root["data"]["isMiddle"].is_null() && root["data"]["isMiddle"].is_number()) {
    event.u.mouseEvent.isMiddle = root["data"]["isMiddle"];
  }

  if (!root["data"]["isRight"].is_null() && root["data"]["isRight"].is_number()) {
    event.u.mouseEvent.isRight = root["data"]["isRight"];
  }

  if (!root["data"]["isDown"].is_null() && root["data"]["isDown"].is_number()) {
    event.u.mouseEvent.isDown = root["data"]["isDown"];
  }

  if (!root["data"]["x"].is_null() && root["data"]["x"].is_number()) {
    event.u.mouseEvent.x = root["data"]["x"];
  }

  if (!root["data"]["y"].is_null() && root["data"]["y"].is_number()) {
    event.u.mouseEvent.y = root["data"]["y"];
  }

  if (!root["data"]["width"].is_null() && root["data"]["width"].is_number()) {
    event.u.mouseEvent.width = root["data"]["width"];
  }

  if (!root["data"]["height"].is_null() && root["data"]["height"].is_number()) {
    event.u.mouseEvent.height = root["data"]["height"];
  }

  if (!root["data"]["wheelDelta"].is_null() && root["data"]["wheelDelta"].is_number()) {
    event.u.mouseEvent.wheelDelta = root["data"]["wheelDelta"];
  }
}

send_mouse_event(&event);

Demo 原型

基于 WebRTC 的远程控制

作者:Aliveyun
原文:https://mp.weixin.qq.com/s/zdchtl_k3ZY78OUm2cYsXA

版权声明:本文内容转自互联网,本文观点仅代表作者本人。本站仅提供信息存储空间服务,所有权归原作者所有。如发现本站有涉嫌抄袭侵权/违法违规的内容, 请发送邮件至1393616908@qq.com 举报,一经查实,本站将立刻删除。

(0)

相关推荐

  • ios webrtc源码怎么下载和编译

    WebRTC 支持 Windows, Mac OS X, Linux, Android 和 iOS 平台,本文以 iOS 平台为例来描述 WebRTC 的源码下载和编译过程。 安装…

    2023年6月28日
  • WebRTC 屏幕分享深度解析

    前言 今天突然发现自己对 WebRTC 的屏幕分享的底层工作原理有一个误解,之前,我一直以为屏幕分享就是简单的采集桌面的画面,然后编码发送就行了。实时上并不是如此简单,本文就来为大…

    2022年10月26日
  • WebRTC 架构格局正在发生变化

    现在有一种新型的 WebRTC 应用程序架构正在发展,称为 WebRTC Unbundling,尽管它可能不适用于所有应用程序场景,但至少在开发新的实时视频开发项目时应该考虑一下它。在过去,三种不同类型的 WebRTC 应用架构即符合标准的 WebRTC、开源媒体服务器和称为 CPaaS 的商业媒体服务器是基于 WebRTC 开发的选项,这三个仍然是有效的架构选择,WebRTC Unbundling 只是第四个选择,可以认为它是符合标准的 WebRTC选项的另一种形式。

    2022年4月28日
  • Elixir WebRTC 简介

    在过去的几个月里,我们一直在非常努力地开发 W3C WebRTC 规范的 Elixir 实现,现在我们很高兴地正式宣布它的第一个版本 —— ex_webrtc 的 0.1 版本! …

    2024年2月19日
  • WebRTC 中的信令(WebRTC信令服务器用什么开发)

    WebRTC 是一种功能强大的技术,可用于创建网络及其他领域的实时通信应用程序。从设计上讲,它是一种点对点技术,可在两个对等方之间建立连接。然而,WebRTC 没有为连接提供信令机…

    2023年12月22日
  • WebRTC信令服务器实现

    WebRTC音视频通信过程 信令服务器server 如果没有安装socket.io ,执行下面命令安装一下: socket.js 的内容如下: 运行 node server.js …

    2022年12月28日

发表回复

登录后才能评论