Repo created

This commit is contained in:
Fr4nz D13trich 2025-11-22 14:04:28 +01:00
parent 81b91f4139
commit f8c34fa5ee
22732 changed files with 4815320 additions and 2 deletions

View file

@ -0,0 +1,244 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/base_capturer_pipewire.h"
#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_capturer.h"
#include "modules/desktop_capture/linux/wayland/restore_token_manager.h"
#include "modules/portal/pipewire_utils.h"
#include "modules/portal/xdg_desktop_portal_utils.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/time_utils.h"
#include "rtc_base/trace_event.h"
namespace webrtc {
namespace {
using xdg_portal::RequestResponse;
using xdg_portal::ScreenCapturePortalInterface;
using xdg_portal::SessionDetails;
} // namespace
// static
bool BaseCapturerPipeWire::IsSupported() {
// Unfortunately, the best way we have to check if PipeWire is available is
// to try to initialize it.
// InitializePipeWire should prevent us from repeatedly initializing PipeWire,
// but we also don't really expect support to change without the application
// restarting.
static bool supported =
DesktopCapturer::IsRunningUnderWayland() && InitializePipeWire();
return supported;
}
BaseCapturerPipeWire::BaseCapturerPipeWire(const DesktopCaptureOptions& options,
CaptureType type)
: BaseCapturerPipeWire(options,
std::make_unique<ScreenCastPortal>(type, this)) {
is_screencast_portal_ = true;
}
BaseCapturerPipeWire::BaseCapturerPipeWire(
const DesktopCaptureOptions& options,
std::unique_ptr<ScreenCapturePortalInterface> portal)
: options_(options),
is_screencast_portal_(false),
portal_(std::move(portal)) {
source_id_ = RestoreTokenManager::GetInstance().GetUnusedId();
options_.screencast_stream()->SetUseDamageRegion(
options_.pipewire_use_damage_region());
}
BaseCapturerPipeWire::~BaseCapturerPipeWire() {
options_.screencast_stream()->StopScreenCastStream();
}
void BaseCapturerPipeWire::OnScreenCastRequestResult(RequestResponse result,
uint32_t stream_node_id,
int fd) {
is_portal_open_ = false;
// Reset the value of capturer_failed_ in case we succeed below. If we fail,
// then it'll set it to the right value again soon enough.
capturer_failed_ = false;
if (result != RequestResponse::kSuccess ||
!options_.screencast_stream()->StartScreenCastStream(
stream_node_id, fd, options_.get_width(), options_.get_height(),
options_.prefer_cursor_embedded(),
send_frames_immediately_ ? callback_ : nullptr)) {
capturer_failed_ = true;
RTC_LOG(LS_ERROR) << "ScreenCastPortal failed: "
<< static_cast<uint>(result);
} else if (ScreenCastPortal* screencast_portal = GetScreenCastPortal()) {
if (!screencast_portal->RestoreToken().empty()) {
const SourceId token_id =
selected_source_id_ ? selected_source_id_ : source_id_;
RestoreTokenManager::GetInstance().AddToken(
token_id, screencast_portal->RestoreToken());
}
}
if (!delegated_source_list_observer_)
return;
switch (result) {
case RequestResponse::kUnknown:
RTC_DCHECK_NOTREACHED();
break;
case RequestResponse::kSuccess:
delegated_source_list_observer_->OnSelection();
break;
case RequestResponse::kUserCancelled:
delegated_source_list_observer_->OnCancelled();
break;
case RequestResponse::kError:
delegated_source_list_observer_->OnError();
break;
}
}
void BaseCapturerPipeWire::OnScreenCastSessionClosed() {
if (!capturer_failed_) {
options_.screencast_stream()->StopScreenCastStream();
}
}
void BaseCapturerPipeWire::UpdateResolution(uint32_t width, uint32_t height) {
if (!capturer_failed_) {
options_.screencast_stream()->UpdateScreenCastStreamResolution(width,
height);
}
}
void BaseCapturerPipeWire::SetMaxFrameRate(uint32_t max_frame_rate) {
if (!capturer_failed_) {
options_.screencast_stream()->UpdateScreenCastStreamFrameRate(
max_frame_rate);
}
}
void BaseCapturerPipeWire::Start(Callback* callback) {
RTC_DCHECK(!callback_);
RTC_DCHECK(callback);
callback_ = callback;
if (ScreenCastPortal* screencast_portal = GetScreenCastPortal()) {
screencast_portal->SetPersistMode(
ScreenCastPortal::PersistMode::kTransient);
if (selected_source_id_) {
screencast_portal->SetRestoreToken(
RestoreTokenManager::GetInstance().GetToken(selected_source_id_));
}
}
is_portal_open_ = true;
portal_->Start();
}
void BaseCapturerPipeWire::CaptureFrame() {
TRACE_EVENT0("webrtc", "BaseCapturerPipeWire::CaptureFrame");
if (capturer_failed_) {
// This could be recoverable if the source list is re-summoned; but for our
// purposes this is fine, since it requires intervention to resolve and
// essentially starts a new capture.
callback_->OnCaptureResult(Result::ERROR_PERMANENT, nullptr);
return;
}
int64_t capture_start_time_nanos = rtc::TimeNanos();
std::unique_ptr<DesktopFrame> frame =
options_.screencast_stream()->CaptureFrame();
if (!frame || !frame->data()) {
callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr);
return;
}
// TODO(julien.isorce): http://crbug.com/945468. Set the icc profile on
// the frame, see ScreenCapturerX11::CaptureFrame.
frame->set_capturer_id(DesktopCapturerId::kWaylandCapturerLinux);
frame->set_capture_time_ms((rtc::TimeNanos() - capture_start_time_nanos) /
rtc::kNumNanosecsPerMillisec);
callback_->OnCaptureResult(Result::SUCCESS, std::move(frame));
}
bool BaseCapturerPipeWire::GetSourceList(SourceList* sources) {
RTC_DCHECK(sources->size() == 0);
// List of available screens is already presented by the xdg-desktop-portal,
// so we just need a (valid) source id for any callers to pass around, even
// though it doesn't mean anything to us. Until the user selects a source in
// xdg-desktop-portal we'll just end up returning empty frames. Note that "0"
// is often treated as a null/placeholder id, so we shouldn't use that.
// TODO(https://crbug.com/1297671): Reconsider type of ID when plumbing
// token that will enable stream re-use.
sources->push_back({source_id_});
return true;
}
bool BaseCapturerPipeWire::SelectSource(SourceId id) {
// Screen selection is handled by the xdg-desktop-portal.
selected_source_id_ = id;
return true;
}
DelegatedSourceListController*
BaseCapturerPipeWire::GetDelegatedSourceListController() {
return this;
}
void BaseCapturerPipeWire::Observe(Observer* observer) {
RTC_DCHECK(!delegated_source_list_observer_ || !observer);
delegated_source_list_observer_ = observer;
}
void BaseCapturerPipeWire::EnsureVisible() {
RTC_DCHECK(callback_);
if (is_portal_open_)
return;
// Clear any previously selected state/capture
portal_->Stop();
options_.screencast_stream()->StopScreenCastStream();
// Get a new source id to reflect that the source has changed.
source_id_ = RestoreTokenManager::GetInstance().GetUnusedId();
is_portal_open_ = true;
portal_->Start();
}
void BaseCapturerPipeWire::EnsureHidden() {
if (!is_portal_open_)
return;
is_portal_open_ = false;
portal_->Stop();
}
SessionDetails BaseCapturerPipeWire::GetSessionDetails() {
return portal_->GetSessionDetails();
}
ScreenCastPortal* BaseCapturerPipeWire::GetScreenCastPortal() {
return is_screencast_portal_ ? static_cast<ScreenCastPortal*>(portal_.get())
: nullptr;
}
void BaseCapturerPipeWire::SendFramesImmediately(bool send_frames_immediately) {
send_frames_immediately_ = send_frames_immediately;
}
} // namespace webrtc

View file

@ -0,0 +1,98 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_BASE_CAPTURER_PIPEWIRE_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_BASE_CAPTURER_PIPEWIRE_H_
#include "modules/desktop_capture/delegated_source_list_controller.h"
#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_capturer.h"
#include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h"
#include "modules/desktop_capture/linux/wayland/screencast_portal.h"
#include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h"
#include "modules/portal/portal_request_response.h"
#include "modules/portal/xdg_desktop_portal_utils.h"
#include "modules/portal/xdg_session_details.h"
namespace webrtc {
class RTC_EXPORT BaseCapturerPipeWire
: public DesktopCapturer,
public DelegatedSourceListController,
public ScreenCastPortal::PortalNotifier {
public:
// Returns whether or not the current system can support capture via PipeWire.
// This will only be true on Wayland systems that also have PipeWire
// available, and thus may require dlopening PipeWire to determine if it is
// available.
static bool IsSupported();
BaseCapturerPipeWire(const DesktopCaptureOptions& options, CaptureType type);
BaseCapturerPipeWire(
const DesktopCaptureOptions& options,
std::unique_ptr<xdg_portal::ScreenCapturePortalInterface> portal);
~BaseCapturerPipeWire() override;
BaseCapturerPipeWire(const BaseCapturerPipeWire&) = delete;
BaseCapturerPipeWire& operator=(const BaseCapturerPipeWire&) = delete;
// DesktopCapturer interface.
void Start(Callback* delegate) override;
void CaptureFrame() override;
bool GetSourceList(SourceList* sources) override;
bool SelectSource(SourceId id) override;
DelegatedSourceListController* GetDelegatedSourceListController() override;
void SetMaxFrameRate(uint32_t max_frame_rate) override;
// DelegatedSourceListController
void Observe(Observer* observer) override;
void EnsureVisible() override;
void EnsureHidden() override;
// ScreenCastPortal::PortalNotifier interface.
void OnScreenCastRequestResult(xdg_portal::RequestResponse result,
uint32_t stream_node_id,
int fd) override;
void OnScreenCastSessionClosed() override;
void UpdateResolution(uint32_t width, uint32_t height) override;
xdg_portal::SessionDetails GetSessionDetails();
// Notifies the callback about the available frames as soon as a frame is
// received.
void SendFramesImmediately(bool send_frames_immediately);
private:
ScreenCastPortal* GetScreenCastPortal();
DesktopCaptureOptions options_ = {};
Callback* callback_ = nullptr;
bool send_frames_immediately_ = false;
bool capturer_failed_ = false;
bool is_screencast_portal_ = false;
bool is_portal_open_ = false;
Observer* delegated_source_list_observer_ = nullptr;
// SourceId that is selected using SelectSource() and that we previously
// returned in GetSourceList(). This should be a SourceId that has a restore
// token associated with it and can be restored if we have required version
// of xdg-desktop-portal.
SourceId selected_source_id_ = 0;
// SourceID we randomly generate and that is returned in GetSourceList() as
// available source that will later get assigned to a restore token in order
// to be restored later using SelectSource().
SourceId source_id_ = 0;
std::unique_ptr<xdg_portal::ScreenCapturePortalInterface> portal_;
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_BASE_CAPTURER_PIPEWIRE_H_

View file

@ -0,0 +1,755 @@
/*
* Copyright 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/egl_dmabuf.h"
#include <asm/ioctl.h>
#include <dlfcn.h>
#include <fcntl.h>
#include <libdrm/drm_fourcc.h>
#include <linux/types.h>
#include <spa/param/video/format-utils.h>
#include <unistd.h>
#include <xf86drm.h>
#include "absl/memory/memory.h"
#include "absl/types/optional.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/sanitizer.h"
#include "rtc_base/string_encode.h"
namespace webrtc {
// EGL
typedef EGLBoolean (*eglBindAPI_func)(EGLenum api);
typedef EGLContext (*eglCreateContext_func)(EGLDisplay dpy,
EGLConfig config,
EGLContext share_context,
const EGLint* attrib_list);
typedef EGLBoolean (*eglDestroyContext_func)(EGLDisplay display,
EGLContext context);
typedef EGLBoolean (*eglTerminate_func)(EGLDisplay display);
typedef EGLImageKHR (*eglCreateImageKHR_func)(EGLDisplay dpy,
EGLContext ctx,
EGLenum target,
EGLClientBuffer buffer,
const EGLint* attrib_list);
typedef EGLBoolean (*eglDestroyImageKHR_func)(EGLDisplay dpy,
EGLImageKHR image);
typedef EGLint (*eglGetError_func)(void);
typedef void* (*eglGetProcAddress_func)(const char*);
typedef EGLDisplay (*eglGetPlatformDisplayEXT_func)(EGLenum platform,
void* native_display,
const EGLint* attrib_list);
typedef EGLDisplay (*eglGetPlatformDisplay_func)(EGLenum platform,
void* native_display,
const EGLAttrib* attrib_list);
typedef EGLBoolean (*eglInitialize_func)(EGLDisplay dpy,
EGLint* major,
EGLint* minor);
typedef EGLBoolean (*eglMakeCurrent_func)(EGLDisplay dpy,
EGLSurface draw,
EGLSurface read,
EGLContext ctx);
typedef EGLBoolean (*eglQueryDmaBufFormatsEXT_func)(EGLDisplay dpy,
EGLint max_formats,
EGLint* formats,
EGLint* num_formats);
typedef EGLBoolean (*eglQueryDmaBufModifiersEXT_func)(EGLDisplay dpy,
EGLint format,
EGLint max_modifiers,
EGLuint64KHR* modifiers,
EGLBoolean* external_only,
EGLint* num_modifiers);
typedef const char* (*eglQueryString_func)(EGLDisplay dpy, EGLint name);
typedef void (*glEGLImageTargetTexture2DOES_func)(GLenum target,
GLeglImageOES image);
// This doesn't follow naming conventions in WebRTC, where the naming
// should look like e.g. egl_bind_api instead of EglBindAPI, however
// we named them according to the exported functions they map to for
// consistency.
eglBindAPI_func EglBindAPI = nullptr;
eglCreateContext_func EglCreateContext = nullptr;
eglDestroyContext_func EglDestroyContext = nullptr;
eglTerminate_func EglTerminate = nullptr;
eglCreateImageKHR_func EglCreateImageKHR = nullptr;
eglDestroyImageKHR_func EglDestroyImageKHR = nullptr;
eglGetError_func EglGetError = nullptr;
eglGetProcAddress_func EglGetProcAddress = nullptr;
eglGetPlatformDisplayEXT_func EglGetPlatformDisplayEXT = nullptr;
eglGetPlatformDisplay_func EglGetPlatformDisplay = nullptr;
eglInitialize_func EglInitialize = nullptr;
eglMakeCurrent_func EglMakeCurrent = nullptr;
eglQueryDmaBufFormatsEXT_func EglQueryDmaBufFormatsEXT = nullptr;
eglQueryDmaBufModifiersEXT_func EglQueryDmaBufModifiersEXT = nullptr;
eglQueryString_func EglQueryString = nullptr;
glEGLImageTargetTexture2DOES_func GlEGLImageTargetTexture2DOES = nullptr;
// GL
typedef void (*glBindTexture_func)(GLenum target, GLuint texture);
typedef void (*glDeleteTextures_func)(GLsizei n, const GLuint* textures);
typedef void (*glGenTextures_func)(GLsizei n, GLuint* textures);
typedef GLenum (*glGetError_func)(void);
typedef const GLubyte* (*glGetString_func)(GLenum name);
typedef void (*glReadPixels_func)(GLint x,
GLint y,
GLsizei width,
GLsizei height,
GLenum format,
GLenum type,
void* data);
typedef void (*glGenFramebuffers_func)(GLsizei n, GLuint* ids);
typedef void (*glDeleteFramebuffers_func)(GLsizei n,
const GLuint* framebuffers);
typedef void (*glBindFramebuffer_func)(GLenum target, GLuint framebuffer);
typedef void (*glFramebufferTexture2D_func)(GLenum target,
GLenum attachment,
GLenum textarget,
GLuint texture,
GLint level);
typedef GLenum (*glCheckFramebufferStatus_func)(GLenum target);
typedef void (*glTexParameteri_func)(GLenum target, GLenum pname, GLint param);
typedef void* (*glXGetProcAddressARB_func)(const char*);
// This doesn't follow naming conventions in WebRTC, where the naming
// should look like e.g. egl_bind_api instead of EglBindAPI, however
// we named them according to the exported functions they map to for
// consistency.
glBindTexture_func GlBindTexture = nullptr;
glDeleteTextures_func GlDeleteTextures = nullptr;
glGenTextures_func GlGenTextures = nullptr;
glGetError_func GlGetError = nullptr;
glGetString_func GlGetString = nullptr;
glReadPixels_func GlReadPixels = nullptr;
glGenFramebuffers_func GlGenFramebuffers = nullptr;
glDeleteFramebuffers_func GlDeleteFramebuffers = nullptr;
glBindFramebuffer_func GlBindFramebuffer = nullptr;
glFramebufferTexture2D_func GlFramebufferTexture2D = nullptr;
glCheckFramebufferStatus_func GlCheckFramebufferStatus = nullptr;
glTexParameteri_func GlTexParameteri = nullptr;
glXGetProcAddressARB_func GlXGetProcAddressARB = nullptr;
static const std::string FormatGLError(GLenum err) {
switch (err) {
case GL_NO_ERROR:
return "GL_NO_ERROR";
case GL_INVALID_ENUM:
return "GL_INVALID_ENUM";
case GL_INVALID_VALUE:
return "GL_INVALID_VALUE";
case GL_INVALID_OPERATION:
return "GL_INVALID_OPERATION";
case GL_STACK_OVERFLOW:
return "GL_STACK_OVERFLOW";
case GL_STACK_UNDERFLOW:
return "GL_STACK_UNDERFLOW";
case GL_OUT_OF_MEMORY:
return "GL_OUT_OF_MEMORY";
default:
return "GL error code: " + std::to_string(err);
}
}
static const std::string FormatEGLError(EGLint err) {
switch (err) {
case EGL_NOT_INITIALIZED:
return "EGL_NOT_INITIALIZED";
case EGL_BAD_ACCESS:
return "EGL_BAD_ACCESS";
case EGL_BAD_ALLOC:
return "EGL_BAD_ALLOC";
case EGL_BAD_ATTRIBUTE:
return "EGL_BAD_ATTRIBUTE";
case EGL_BAD_CONTEXT:
return "EGL_BAD_CONTEXT";
case EGL_BAD_CONFIG:
return "EGL_BAD_CONFIG";
case EGL_BAD_CURRENT_SURFACE:
return "EGL_BAD_CURRENT_SURFACE";
case EGL_BAD_DISPLAY:
return "EGL_BAD_DISPLAY";
case EGL_BAD_SURFACE:
return "EGL_BAD_SURFACE";
case EGL_BAD_MATCH:
return "EGL_BAD_MATCH";
case EGL_BAD_PARAMETER:
return "EGL_BAD_PARAMETER";
case EGL_BAD_NATIVE_PIXMAP:
return "EGL_BAD_NATIVE_PIXMAP";
case EGL_BAD_NATIVE_WINDOW:
return "EGL_BAD_NATIVE_WINDOW";
case EGL_CONTEXT_LOST:
return "EGL_CONTEXT_LOST";
default:
return "EGL error code: " + std::to_string(err);
}
}
static uint32_t SpaPixelFormatToDrmFormat(uint32_t spa_format) {
switch (spa_format) {
case SPA_VIDEO_FORMAT_RGBA:
return DRM_FORMAT_ABGR8888;
case SPA_VIDEO_FORMAT_RGBx:
return DRM_FORMAT_XBGR8888;
case SPA_VIDEO_FORMAT_BGRA:
return DRM_FORMAT_ARGB8888;
case SPA_VIDEO_FORMAT_BGRx:
return DRM_FORMAT_XRGB8888;
default:
return DRM_FORMAT_INVALID;
}
}
static void CloseLibrary(void* library) {
if (library) {
dlclose(library);
library = nullptr;
}
}
static void* g_lib_egl = nullptr;
RTC_NO_SANITIZE("cfi-icall")
static bool OpenEGL() {
g_lib_egl = dlopen("libEGL.so.1", RTLD_NOW | RTLD_GLOBAL);
if (g_lib_egl) {
EglGetProcAddress =
(eglGetProcAddress_func)dlsym(g_lib_egl, "eglGetProcAddress");
return EglGetProcAddress;
}
return false;
}
RTC_NO_SANITIZE("cfi-icall")
static bool LoadEGL() {
if (OpenEGL()) {
EglBindAPI = (eglBindAPI_func)EglGetProcAddress("eglBindAPI");
EglCreateContext =
(eglCreateContext_func)EglGetProcAddress("eglCreateContext");
EglDestroyContext =
(eglDestroyContext_func)EglGetProcAddress("eglDestroyContext");
EglTerminate = (eglTerminate_func)EglGetProcAddress("eglTerminate");
EglCreateImageKHR =
(eglCreateImageKHR_func)EglGetProcAddress("eglCreateImageKHR");
EglDestroyImageKHR =
(eglDestroyImageKHR_func)EglGetProcAddress("eglDestroyImageKHR");
EglGetError = (eglGetError_func)EglGetProcAddress("eglGetError");
EglGetPlatformDisplayEXT = (eglGetPlatformDisplayEXT_func)EglGetProcAddress(
"eglGetPlatformDisplayEXT");
EglGetPlatformDisplay =
(eglGetPlatformDisplay_func)EglGetProcAddress("eglGetPlatformDisplay");
EglInitialize = (eglInitialize_func)EglGetProcAddress("eglInitialize");
EglMakeCurrent = (eglMakeCurrent_func)EglGetProcAddress("eglMakeCurrent");
EglQueryString = (eglQueryString_func)EglGetProcAddress("eglQueryString");
GlEGLImageTargetTexture2DOES =
(glEGLImageTargetTexture2DOES_func)EglGetProcAddress(
"glEGLImageTargetTexture2DOES");
return EglBindAPI && EglCreateContext && EglCreateImageKHR &&
EglTerminate && EglDestroyContext && EglDestroyImageKHR &&
EglGetError && EglGetPlatformDisplayEXT && EglGetPlatformDisplay &&
EglInitialize && EglMakeCurrent && EglQueryString &&
GlEGLImageTargetTexture2DOES;
}
return false;
}
static void* g_lib_gl = nullptr;
RTC_NO_SANITIZE("cfi-icall")
static bool OpenGL() {
std::vector<std::string> names = {"libGL.so.1", "libGL.so"};
for (const std::string& name : names) {
g_lib_gl = dlopen(name.c_str(), RTLD_NOW | RTLD_GLOBAL);
if (g_lib_gl) {
GlXGetProcAddressARB =
(glXGetProcAddressARB_func)dlsym(g_lib_gl, "glXGetProcAddressARB");
return GlXGetProcAddressARB;
}
}
return false;
}
RTC_NO_SANITIZE("cfi-icall")
static bool LoadGL() {
if (OpenGL()) {
GlGetString = (glGetString_func)GlXGetProcAddressARB("glGetString");
if (!GlGetString) {
return false;
}
GlBindTexture = (glBindTexture_func)GlXGetProcAddressARB("glBindTexture");
GlDeleteTextures =
(glDeleteTextures_func)GlXGetProcAddressARB("glDeleteTextures");
GlGenTextures = (glGenTextures_func)GlXGetProcAddressARB("glGenTextures");
GlGetError = (glGetError_func)GlXGetProcAddressARB("glGetError");
GlReadPixels = (glReadPixels_func)GlXGetProcAddressARB("glReadPixels");
GlGenFramebuffers =
(glGenFramebuffers_func)GlXGetProcAddressARB("glGenFramebuffers");
GlDeleteFramebuffers =
(glDeleteFramebuffers_func)GlXGetProcAddressARB("glDeleteFramebuffers");
GlBindFramebuffer =
(glBindFramebuffer_func)GlXGetProcAddressARB("glBindFramebuffer");
GlFramebufferTexture2D = (glFramebufferTexture2D_func)GlXGetProcAddressARB(
"glFramebufferTexture2D");
GlCheckFramebufferStatus =
(glCheckFramebufferStatus_func)GlXGetProcAddressARB(
"glCheckFramebufferStatus");
GlTexParameteri =
(glTexParameteri_func)GlXGetProcAddressARB("glTexParameteri");
return GlBindTexture && GlDeleteTextures && GlGenTextures && GlGetError &&
GlReadPixels && GlGenFramebuffers && GlDeleteFramebuffers &&
GlBindFramebuffer && GlFramebufferTexture2D &&
GlCheckFramebufferStatus && GlTexParameteri;
}
return false;
}
RTC_NO_SANITIZE("cfi-icall")
EglDmaBuf::EglDmaBuf() {
if (!LoadEGL()) {
RTC_LOG(LS_ERROR) << "Unable to load EGL entry functions.";
CloseLibrary(g_lib_egl);
return;
}
if (!LoadGL()) {
RTC_LOG(LS_ERROR) << "Failed to load OpenGL entry functions.";
CloseLibrary(g_lib_gl);
return;
}
if (!GetClientExtensions(EGL_NO_DISPLAY, EGL_EXTENSIONS)) {
return;
}
bool has_platform_base_ext = false;
bool has_platform_gbm_ext = false;
bool has_khr_platform_gbm_ext = false;
for (const auto& extension : egl_.extensions) {
if (extension == "EGL_EXT_platform_base") {
has_platform_base_ext = true;
continue;
} else if (extension == "EGL_MESA_platform_gbm") {
has_platform_gbm_ext = true;
continue;
} else if (extension == "EGL_KHR_platform_gbm") {
has_khr_platform_gbm_ext = true;
continue;
}
}
if (!has_platform_base_ext || !has_platform_gbm_ext ||
!has_khr_platform_gbm_ext) {
RTC_LOG(LS_ERROR) << "One of required EGL extensions is missing";
return;
}
egl_.display = EglGetPlatformDisplay(EGL_PLATFORM_WAYLAND_KHR,
(void*)EGL_DEFAULT_DISPLAY, nullptr);
if (egl_.display == EGL_NO_DISPLAY) {
RTC_LOG(LS_ERROR) << "Failed to obtain default EGL display: "
<< FormatEGLError(EglGetError()) << "\n"
<< "Defaulting to using first available render node";
absl::optional<std::string> render_node = GetRenderNode();
if (!render_node) {
return;
}
drm_fd_ = open(render_node->c_str(), O_RDWR);
if (drm_fd_ < 0) {
RTC_LOG(LS_ERROR) << "Failed to open drm render node: "
<< strerror(errno);
return;
}
gbm_device_ = gbm_create_device(drm_fd_);
if (!gbm_device_) {
RTC_LOG(LS_ERROR) << "Cannot create GBM device: " << strerror(errno);
close(drm_fd_);
return;
}
// Use eglGetPlatformDisplayEXT() to get the display pointer
// if the implementation supports it.
egl_.display =
EglGetPlatformDisplayEXT(EGL_PLATFORM_GBM_KHR, gbm_device_, nullptr);
}
if (egl_.display == EGL_NO_DISPLAY) {
RTC_LOG(LS_ERROR) << "Error during obtaining EGL display: "
<< FormatEGLError(EglGetError());
return;
}
EGLint major, minor;
if (EglInitialize(egl_.display, &major, &minor) == EGL_FALSE) {
RTC_LOG(LS_ERROR) << "Error during eglInitialize: "
<< FormatEGLError(EglGetError());
return;
}
if (EglBindAPI(EGL_OPENGL_API) == EGL_FALSE) {
RTC_LOG(LS_ERROR) << "bind OpenGL API failed";
return;
}
egl_.context =
EglCreateContext(egl_.display, nullptr, EGL_NO_CONTEXT, nullptr);
if (egl_.context == EGL_NO_CONTEXT) {
RTC_LOG(LS_ERROR) << "Couldn't create EGL context: "
<< FormatGLError(EglGetError());
return;
}
if (!GetClientExtensions(egl_.display, EGL_EXTENSIONS)) {
return;
}
bool has_image_dma_buf_import_modifiers_ext = false;
for (const auto& extension : egl_.extensions) {
if (extension == "EGL_EXT_image_dma_buf_import") {
has_image_dma_buf_import_ext_ = true;
continue;
} else if (extension == "EGL_EXT_image_dma_buf_import_modifiers") {
has_image_dma_buf_import_modifiers_ext = true;
continue;
}
}
if (has_image_dma_buf_import_ext_ && has_image_dma_buf_import_modifiers_ext) {
EglQueryDmaBufFormatsEXT = (eglQueryDmaBufFormatsEXT_func)EglGetProcAddress(
"eglQueryDmaBufFormatsEXT");
EglQueryDmaBufModifiersEXT =
(eglQueryDmaBufModifiersEXT_func)EglGetProcAddress(
"eglQueryDmaBufModifiersEXT");
}
RTC_LOG(LS_INFO) << "Egl initialization succeeded";
egl_initialized_ = true;
}
RTC_NO_SANITIZE("cfi-icall")
EglDmaBuf::~EglDmaBuf() {
if (gbm_device_) {
gbm_device_destroy(gbm_device_);
close(drm_fd_);
}
if (egl_.context != EGL_NO_CONTEXT) {
EglDestroyContext(egl_.display, egl_.context);
}
if (egl_.display != EGL_NO_DISPLAY) {
EglTerminate(egl_.display);
}
if (fbo_) {
GlDeleteFramebuffers(1, &fbo_);
}
if (texture_) {
GlDeleteTextures(1, &texture_);
}
// BUG: crbug.com/1290566
// Closing libEGL.so.1 when using NVidia drivers causes a crash
// when EglGetPlatformDisplayEXT() is used, at least this one is enough
// to be called to make it crash.
// It also looks that libepoxy and glad don't dlclose it either
// CloseLibrary(g_lib_egl);
// CloseLibrary(g_lib_gl);
}
RTC_NO_SANITIZE("cfi-icall")
bool EglDmaBuf::GetClientExtensions(EGLDisplay dpy, EGLint name) {
// Get the list of client extensions
const char* client_extensions_cstring = EglQueryString(dpy, name);
if (!client_extensions_cstring) {
// If eglQueryString() returned NULL, the implementation doesn't support
// EGL_EXT_client_extensions. Expect an EGL_BAD_DISPLAY error.
RTC_LOG(LS_ERROR) << "No client extensions defined! "
<< FormatEGLError(EglGetError());
return false;
}
std::vector<absl::string_view> client_extensions =
rtc::split(client_extensions_cstring, ' ');
for (const auto& extension : client_extensions) {
egl_.extensions.push_back(std::string(extension));
}
return true;
}
RTC_NO_SANITIZE("cfi-icall")
bool EglDmaBuf::ImageFromDmaBuf(const DesktopSize& size,
uint32_t format,
const std::vector<PlaneData>& plane_datas,
uint64_t modifier,
const DesktopVector& offset,
const DesktopSize& buffer_size,
uint8_t* data) {
if (!egl_initialized_) {
return false;
}
if (plane_datas.size() <= 0) {
RTC_LOG(LS_ERROR) << "Failed to process buffer: invalid number of planes";
return false;
}
EGLint attribs[47];
int atti = 0;
attribs[atti++] = EGL_WIDTH;
attribs[atti++] = static_cast<EGLint>(size.width());
attribs[atti++] = EGL_HEIGHT;
attribs[atti++] = static_cast<EGLint>(size.height());
attribs[atti++] = EGL_LINUX_DRM_FOURCC_EXT;
attribs[atti++] = SpaPixelFormatToDrmFormat(format);
if (plane_datas.size() > 0) {
attribs[atti++] = EGL_DMA_BUF_PLANE0_FD_EXT;
attribs[atti++] = plane_datas[0].fd;
attribs[atti++] = EGL_DMA_BUF_PLANE0_OFFSET_EXT;
attribs[atti++] = plane_datas[0].offset;
attribs[atti++] = EGL_DMA_BUF_PLANE0_PITCH_EXT;
attribs[atti++] = plane_datas[0].stride;
if (modifier != DRM_FORMAT_MOD_INVALID) {
attribs[atti++] = EGL_DMA_BUF_PLANE0_MODIFIER_LO_EXT;
attribs[atti++] = modifier & 0xFFFFFFFF;
attribs[atti++] = EGL_DMA_BUF_PLANE0_MODIFIER_HI_EXT;
attribs[atti++] = modifier >> 32;
}
}
if (plane_datas.size() > 1) {
attribs[atti++] = EGL_DMA_BUF_PLANE1_FD_EXT;
attribs[atti++] = plane_datas[1].fd;
attribs[atti++] = EGL_DMA_BUF_PLANE1_OFFSET_EXT;
attribs[atti++] = plane_datas[1].offset;
attribs[atti++] = EGL_DMA_BUF_PLANE1_PITCH_EXT;
attribs[atti++] = plane_datas[1].stride;
if (modifier != DRM_FORMAT_MOD_INVALID) {
attribs[atti++] = EGL_DMA_BUF_PLANE1_MODIFIER_LO_EXT;
attribs[atti++] = modifier & 0xFFFFFFFF;
attribs[atti++] = EGL_DMA_BUF_PLANE1_MODIFIER_HI_EXT;
attribs[atti++] = modifier >> 32;
}
}
if (plane_datas.size() > 2) {
attribs[atti++] = EGL_DMA_BUF_PLANE2_FD_EXT;
attribs[atti++] = plane_datas[2].fd;
attribs[atti++] = EGL_DMA_BUF_PLANE2_OFFSET_EXT;
attribs[atti++] = plane_datas[2].offset;
attribs[atti++] = EGL_DMA_BUF_PLANE2_PITCH_EXT;
attribs[atti++] = plane_datas[2].stride;
if (modifier != DRM_FORMAT_MOD_INVALID) {
attribs[atti++] = EGL_DMA_BUF_PLANE2_MODIFIER_LO_EXT;
attribs[atti++] = modifier & 0xFFFFFFFF;
attribs[atti++] = EGL_DMA_BUF_PLANE2_MODIFIER_HI_EXT;
attribs[atti++] = modifier >> 32;
}
}
if (plane_datas.size() > 3) {
attribs[atti++] = EGL_DMA_BUF_PLANE3_FD_EXT;
attribs[atti++] = plane_datas[3].fd;
attribs[atti++] = EGL_DMA_BUF_PLANE3_OFFSET_EXT;
attribs[atti++] = plane_datas[3].offset;
attribs[atti++] = EGL_DMA_BUF_PLANE3_PITCH_EXT;
attribs[atti++] = plane_datas[3].stride;
if (modifier != DRM_FORMAT_MOD_INVALID) {
attribs[atti++] = EGL_DMA_BUF_PLANE3_MODIFIER_LO_EXT;
attribs[atti++] = modifier & 0xFFFFFFFF;
attribs[atti++] = EGL_DMA_BUF_PLANE3_MODIFIER_HI_EXT;
attribs[atti++] = modifier >> 32;
}
}
attribs[atti++] = EGL_NONE;
// bind context to render thread
EglMakeCurrent(egl_.display, EGL_NO_SURFACE, EGL_NO_SURFACE, egl_.context);
// create EGL image from attribute list
EGLImageKHR image = EglCreateImageKHR(
egl_.display, EGL_NO_CONTEXT, EGL_LINUX_DMA_BUF_EXT, nullptr, attribs);
if (image == EGL_NO_IMAGE) {
RTC_LOG(LS_ERROR) << "Failed to record frame: Error creating EGLImage - "
<< FormatEGLError(EglGetError());
return false;
}
// create GL 2D texture for framebuffer
if (!texture_) {
GlGenTextures(1, &texture_);
GlTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
GlTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
GlTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
GlTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
GlBindTexture(GL_TEXTURE_2D, texture_);
GlEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
if (!fbo_) {
GlGenFramebuffers(1, &fbo_);
}
GlBindFramebuffer(GL_FRAMEBUFFER, fbo_);
GlFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D,
texture_, 0);
if (GlCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
RTC_LOG(LS_ERROR) << "Failed to bind DMA buf framebuffer";
EglDestroyImageKHR(egl_.display, image);
return false;
}
GLenum gl_format = GL_BGRA;
switch (format) {
case SPA_VIDEO_FORMAT_RGBx:
gl_format = GL_RGBA;
break;
case SPA_VIDEO_FORMAT_RGBA:
gl_format = GL_RGBA;
break;
case SPA_VIDEO_FORMAT_BGRx:
gl_format = GL_BGRA;
break;
default:
gl_format = GL_BGRA;
break;
}
GlReadPixels(offset.x(), offset.y(), buffer_size.width(),
buffer_size.height(), gl_format, GL_UNSIGNED_BYTE, data);
const GLenum error = GlGetError();
if (error) {
RTC_LOG(LS_ERROR) << "Failed to get image from DMA buffer.";
}
EglDestroyImageKHR(egl_.display, image);
return !error;
}
RTC_NO_SANITIZE("cfi-icall")
std::vector<uint64_t> EglDmaBuf::QueryDmaBufModifiers(uint32_t format) {
if (!egl_initialized_) {
return {};
}
// Explicit modifiers not supported, return just DRM_FORMAT_MOD_INVALID as we
// can still use modifier-less DMA-BUFs if we have required extension
if (EglQueryDmaBufFormatsEXT == nullptr ||
EglQueryDmaBufModifiersEXT == nullptr) {
return has_image_dma_buf_import_ext_
? std::vector<uint64_t>{DRM_FORMAT_MOD_INVALID}
: std::vector<uint64_t>{};
}
uint32_t drm_format = SpaPixelFormatToDrmFormat(format);
// Should never happen as it's us who controls the list of supported formats
RTC_DCHECK(drm_format != DRM_FORMAT_INVALID);
EGLint count = 0;
EGLBoolean success =
EglQueryDmaBufFormatsEXT(egl_.display, 0, nullptr, &count);
if (!success || !count) {
RTC_LOG(LS_WARNING) << "Cannot query the number of formats.";
return {DRM_FORMAT_MOD_INVALID};
}
std::vector<uint32_t> formats(count);
if (!EglQueryDmaBufFormatsEXT(egl_.display, count,
reinterpret_cast<EGLint*>(formats.data()),
&count)) {
RTC_LOG(LS_WARNING) << "Cannot query a list of formats.";
return {DRM_FORMAT_MOD_INVALID};
}
if (std::find(formats.begin(), formats.end(), drm_format) == formats.end()) {
RTC_LOG(LS_WARNING) << "Format " << drm_format
<< " not supported for modifiers.";
return {DRM_FORMAT_MOD_INVALID};
}
success = EglQueryDmaBufModifiersEXT(egl_.display, drm_format, 0, nullptr,
nullptr, &count);
if (!success || !count) {
RTC_LOG(LS_WARNING) << "Cannot query the number of modifiers.";
return {DRM_FORMAT_MOD_INVALID};
}
std::vector<uint64_t> modifiers(count);
if (!EglQueryDmaBufModifiersEXT(egl_.display, drm_format, count,
modifiers.data(), nullptr, &count)) {
RTC_LOG(LS_WARNING) << "Cannot query a list of modifiers.";
}
// Support modifier-less buffers
modifiers.push_back(DRM_FORMAT_MOD_INVALID);
return modifiers;
}
absl::optional<std::string> EglDmaBuf::GetRenderNode() {
int max_devices = drmGetDevices2(0, nullptr, 0);
if (max_devices <= 0) {
RTC_LOG(LS_ERROR) << "drmGetDevices2() has not found any devices (errno="
<< -max_devices << ")";
return absl::nullopt;
}
std::vector<drmDevicePtr> devices(max_devices);
int ret = drmGetDevices2(0, devices.data(), max_devices);
if (ret < 0) {
RTC_LOG(LS_ERROR) << "drmGetDevices2() returned an error " << ret;
return absl::nullopt;
}
std::string render_node;
for (const drmDevicePtr& device : devices) {
if (device->available_nodes & (1 << DRM_NODE_RENDER)) {
render_node = device->nodes[DRM_NODE_RENDER];
break;
}
}
drmFreeDevices(devices.data(), ret);
return render_node;
}
} // namespace webrtc

View file

@ -0,0 +1,74 @@
/*
* Copyright 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_EGL_DMABUF_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_EGL_DMABUF_H_
#include <epoxy/egl.h>
#include <epoxy/gl.h>
#include <gbm.h>
#include <memory>
#include <string>
#include <vector>
#include "absl/types/optional.h"
#include "modules/desktop_capture/desktop_geometry.h"
namespace webrtc {
class EglDmaBuf {
public:
struct EGLStruct {
std::vector<std::string> extensions;
EGLDisplay display = EGL_NO_DISPLAY;
EGLContext context = EGL_NO_CONTEXT;
};
struct PlaneData {
int32_t fd;
uint32_t stride;
uint32_t offset;
};
EglDmaBuf();
~EglDmaBuf();
// Returns whether the image was successfully imported from
// given DmaBuf and its parameters
bool ImageFromDmaBuf(const DesktopSize& size,
uint32_t format,
const std::vector<PlaneData>& plane_datas,
uint64_t modifiers,
const DesktopVector& offset,
const DesktopSize& buffer_size,
uint8_t* data);
std::vector<uint64_t> QueryDmaBufModifiers(uint32_t format);
bool IsEglInitialized() const { return egl_initialized_; }
private:
bool GetClientExtensions(EGLDisplay dpy, EGLint name);
bool egl_initialized_ = false;
bool has_image_dma_buf_import_ext_ = false;
int32_t drm_fd_ = -1; // for GBM buffer mmap
gbm_device* gbm_device_ = nullptr; // for passed GBM buffer retrieval
GLuint fbo_ = 0;
GLuint texture_ = 0;
EGLStruct egl_;
absl::optional<std::string> GetRenderNode();
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_EGL_DMABUF_H_

View file

@ -0,0 +1,59 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/mouse_cursor_monitor_pipewire.h"
#include <utility>
#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_capturer.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
namespace webrtc {
MouseCursorMonitorPipeWire::MouseCursorMonitorPipeWire(
const DesktopCaptureOptions& options)
: options_(options) {
sequence_checker_.Detach();
}
MouseCursorMonitorPipeWire::~MouseCursorMonitorPipeWire() {}
void MouseCursorMonitorPipeWire::Init(Callback* callback, Mode mode) {
RTC_DCHECK_RUN_ON(&sequence_checker_);
RTC_DCHECK(!callback_);
RTC_DCHECK(callback);
callback_ = callback;
mode_ = mode;
}
void MouseCursorMonitorPipeWire::Capture() {
RTC_DCHECK_RUN_ON(&sequence_checker_);
RTC_DCHECK(callback_);
std::unique_ptr<MouseCursor> mouse_cursor =
options_.screencast_stream()->CaptureCursor();
if (mouse_cursor && mouse_cursor->image()->data()) {
callback_->OnMouseCursor(mouse_cursor.release());
}
if (mode_ == SHAPE_AND_POSITION) {
absl::optional<DesktopVector> mouse_cursor_position =
options_.screencast_stream()->CaptureCursorPosition();
if (mouse_cursor_position) {
callback_->OnMouseCursorPosition(mouse_cursor_position.value());
}
}
}
} // namespace webrtc

View file

@ -0,0 +1,44 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_MOUSE_CURSOR_MONITOR_PIPEWIRE_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_MOUSE_CURSOR_MONITOR_PIPEWIRE_H_
#include <memory>
#include "api/scoped_refptr.h"
#include "api/sequence_checker.h"
#include "modules/desktop_capture/desktop_capture_options.h"
#include "modules/desktop_capture/desktop_capture_types.h"
#include "modules/desktop_capture/linux/wayland/shared_screencast_stream.h"
#include "modules/desktop_capture/mouse_cursor.h"
#include "modules/desktop_capture/mouse_cursor_monitor.h"
#include "rtc_base/system/no_unique_address.h"
namespace webrtc {
class MouseCursorMonitorPipeWire : public MouseCursorMonitor {
public:
explicit MouseCursorMonitorPipeWire(const DesktopCaptureOptions& options);
~MouseCursorMonitorPipeWire() override;
// MouseCursorMonitor:
void Init(Callback* callback, Mode mode) override;
void Capture() override;
DesktopCaptureOptions options_ RTC_GUARDED_BY(sequence_checker_);
Callback* callback_ RTC_GUARDED_BY(sequence_checker_) = nullptr;
Mode mode_ RTC_GUARDED_BY(sequence_checker_) = SHAPE_AND_POSITION;
RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_MOUSE_CURSOR_MONITOR_PIPEWIRE_H_

View file

@ -0,0 +1,17 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_
// TODO(bugs.webrtc.org/14187): remove when all users are gone
#include "modules/portal/portal_request_response.h"
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_PORTAL_REQUEST_RESPONSE_H_

View file

@ -0,0 +1,35 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/restore_token_manager.h"
namespace webrtc {
// static
RestoreTokenManager& RestoreTokenManager::GetInstance() {
static webrtc::RestoreTokenManager* manager = new RestoreTokenManager();
return *manager;
}
void RestoreTokenManager::AddToken(DesktopCapturer::SourceId id,
const std::string& token) {
restore_tokens_.insert({id, token});
}
std::string RestoreTokenManager::GetToken(DesktopCapturer::SourceId id) {
const std::string token = restore_tokens_[id];
return token;
}
DesktopCapturer::SourceId RestoreTokenManager::GetUnusedId() {
return ++last_source_id_;
}
} // namespace webrtc

View file

@ -0,0 +1,46 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_RESTORE_TOKEN_MANAGER_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_RESTORE_TOKEN_MANAGER_H_
#include <mutex>
#include <string>
#include <unordered_map>
#include "modules/desktop_capture/desktop_capturer.h"
namespace webrtc {
class RestoreTokenManager {
public:
RestoreTokenManager(const RestoreTokenManager& manager) = delete;
RestoreTokenManager& operator=(const RestoreTokenManager& manager) = delete;
static RestoreTokenManager& GetInstance();
void AddToken(DesktopCapturer::SourceId id, const std::string& token);
std::string GetToken(DesktopCapturer::SourceId id);
// Returns a source ID which does not have any token associated with it yet.
DesktopCapturer::SourceId GetUnusedId();
private:
RestoreTokenManager() = default;
~RestoreTokenManager() = default;
DesktopCapturer::SourceId last_source_id_ = 0;
std::unordered_map<DesktopCapturer::SourceId, std::string> restore_tokens_;
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_RESTORE_TOKEN_MANAGER_H_

View file

@ -0,0 +1,17 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_
// TODO(bugs.webrtc.org/14187): remove when all users are gone
#include "modules/portal/scoped_glib.h"
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCOPED_GLIB_H_

View file

@ -0,0 +1,127 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h"
#include <string>
#include "modules/portal/xdg_desktop_portal_utils.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace xdg_portal {
void ScreenCapturePortalInterface::RequestSessionUsingProxy(
GAsyncResult* result) {
Scoped<GError> error;
GDBusProxy* proxy = g_dbus_proxy_new_finish(result, error.receive());
if (!proxy) {
// Ignore the error caused by user cancelling the request via `cancellable_`
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to get a proxy for the portal: "
<< error->message;
OnPortalDone(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Successfully created proxy for the portal.";
RequestSession(proxy);
}
void ScreenCapturePortalInterface::OnSessionRequestResult(
GDBusProxy* proxy,
GAsyncResult* result) {
Scoped<GError> error;
Scoped<GVariant> variant(
g_dbus_proxy_call_finish(proxy, result, error.receive()));
if (!variant) {
// Ignore the error caused by user cancelling the request via `cancellable_`
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to request session: " << error->message;
OnPortalDone(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Initializing the session.";
Scoped<char> handle;
g_variant_get_child(variant.get(), /*index=*/0, /*format_string=*/"o",
&handle);
if (!handle) {
RTC_LOG(LS_ERROR) << "Failed to initialize the session.";
OnPortalDone(RequestResponse::kError);
return;
}
}
void ScreenCapturePortalInterface::RegisterSessionClosedSignalHandler(
const SessionClosedSignalHandler session_close_signal_handler,
GVariant* parameters,
GDBusConnection* connection,
std::string& session_handle,
guint& session_closed_signal_id) {
uint32_t portal_response = 2;
Scoped<GVariant> response_data;
g_variant_get(parameters, /*format_string=*/"(u@a{sv})", &portal_response,
response_data.receive());
if (RequestResponseFromPortalResponse(portal_response) !=
RequestResponse::kSuccess) {
RTC_LOG(LS_ERROR) << "Failed to request the session subscription.";
OnPortalDone(RequestResponse::kError);
return;
}
Scoped<GVariant> g_session_handle(
g_variant_lookup_value(response_data.get(), /*key=*/"session_handle",
/*expected_type=*/nullptr));
session_handle = g_variant_get_string(
/*value=*/g_session_handle.get(), /*length=*/nullptr);
if (session_handle.empty()) {
RTC_LOG(LS_ERROR) << "Could not get session handle despite valid response";
OnPortalDone(RequestResponse::kError);
return;
}
session_closed_signal_id = g_dbus_connection_signal_subscribe(
connection, kDesktopBusName, kSessionInterfaceName, /*member=*/"Closed",
session_handle.c_str(), /*arg0=*/nullptr, G_DBUS_SIGNAL_FLAGS_NONE,
session_close_signal_handler, this, /*user_data_free_func=*/nullptr);
}
void ScreenCapturePortalInterface::OnStartRequestResult(GDBusProxy* proxy,
GAsyncResult* result) {
Scoped<GError> error;
Scoped<GVariant> variant(
g_dbus_proxy_call_finish(proxy, result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to start the portal session: "
<< error->message;
OnPortalDone(RequestResponse::kError);
return;
}
Scoped<char> handle;
g_variant_get_child(variant.get(), 0, "o", handle.receive());
if (!handle) {
RTC_LOG(LS_ERROR) << "Failed to initialize the start portal session.";
OnPortalDone(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Subscribed to the start signal.";
}
} // namespace xdg_portal
} // namespace webrtc

View file

@ -0,0 +1,76 @@
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREEN_CAPTURE_PORTAL_INTERFACE_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREEN_CAPTURE_PORTAL_INTERFACE_H_
#include <gio/gio.h>
#include <string>
#include "modules/portal/portal_request_response.h"
#include "modules/portal/scoped_glib.h"
#include "modules/portal/xdg_desktop_portal_utils.h"
#include "modules/portal/xdg_session_details.h"
namespace webrtc {
namespace xdg_portal {
using SessionClosedSignalHandler = void (*)(GDBusConnection*,
const char*,
const char*,
const char*,
const char*,
GVariant*,
gpointer);
// A base class for XDG desktop portals that can capture desktop/screen.
// Note: downstream clients inherit from this class so it is advisable to
// provide a default implementation of any new virtual methods that may be added
// to this class.
class RTC_EXPORT ScreenCapturePortalInterface {
public:
virtual ~ScreenCapturePortalInterface() {}
// Gets details about the session such as session handle.
virtual xdg_portal::SessionDetails GetSessionDetails() { return {}; }
// Starts the portal setup.
virtual void Start() {}
// Stops and cleans up the portal.
virtual void Stop() {}
// Notifies observers about the success/fail state of the portal
// request/response.
virtual void OnPortalDone(xdg_portal::RequestResponse result) {}
// Sends a create session request to the portal.
virtual void RequestSession(GDBusProxy* proxy) {}
// Following methods should not be made virtual as they share a common
// implementation between portals.
// Requests portal session using the proxy object.
void RequestSessionUsingProxy(GAsyncResult* result);
// Handles the session request result.
void OnSessionRequestResult(GDBusProxy* proxy, GAsyncResult* result);
// Subscribes to session close signal and sets up a handler for it.
void RegisterSessionClosedSignalHandler(
const SessionClosedSignalHandler session_close_signal_handler,
GVariant* parameters,
GDBusConnection* connection,
std::string& session_handle,
guint& session_closed_signal_id);
// Handles the result of session start request.
void OnStartRequestResult(GDBusProxy* proxy, GAsyncResult* result);
};
} // namespace xdg_portal
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREEN_CAPTURE_PORTAL_INTERFACE_H_

View file

@ -0,0 +1,471 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/screencast_portal.h"
#include <gio/gunixfdlist.h>
#include <glib-object.h>
#include "modules/portal/scoped_glib.h"
#include "modules/portal/xdg_desktop_portal_utils.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace {
using xdg_portal::kScreenCastInterfaceName;
using xdg_portal::PrepareSignalHandle;
using xdg_portal::RequestResponse;
using xdg_portal::RequestResponseFromPortalResponse;
using xdg_portal::RequestSessionProxy;
using xdg_portal::SetupRequestResponseSignal;
using xdg_portal::SetupSessionRequestHandlers;
using xdg_portal::StartSessionRequest;
using xdg_portal::TearDownSession;
} // namespace
// static
ScreenCastPortal::CaptureSourceType ScreenCastPortal::ToCaptureSourceType(
CaptureType type) {
switch (type) {
case CaptureType::kScreen:
return ScreenCastPortal::CaptureSourceType::kScreen;
case CaptureType::kWindow:
return ScreenCastPortal::CaptureSourceType::kWindow;
case CaptureType::kAnyScreenContent:
return ScreenCastPortal::CaptureSourceType::kAnyScreenContent;
}
}
ScreenCastPortal::ScreenCastPortal(CaptureType type, PortalNotifier* notifier)
: ScreenCastPortal(type,
notifier,
OnProxyRequested,
OnSourcesRequestResponseSignal,
this) {}
ScreenCastPortal::ScreenCastPortal(
CaptureType type,
PortalNotifier* notifier,
ProxyRequestResponseHandler proxy_request_response_handler,
SourcesRequestResponseSignalHandler sources_request_response_signal_handler,
gpointer user_data,
bool prefer_cursor_embedded)
: notifier_(notifier),
capture_source_type_(ToCaptureSourceType(type)),
cursor_mode_(prefer_cursor_embedded ? CursorMode::kEmbedded
: CursorMode::kMetadata),
proxy_request_response_handler_(proxy_request_response_handler),
sources_request_response_signal_handler_(
sources_request_response_signal_handler),
user_data_(user_data) {}
ScreenCastPortal::~ScreenCastPortal() {
Stop();
}
void ScreenCastPortal::Stop() {
UnsubscribeSignalHandlers();
TearDownSession(std::move(session_handle_), proxy_, cancellable_,
connection_);
session_handle_ = "";
cancellable_ = nullptr;
proxy_ = nullptr;
restore_token_ = "";
if (pw_fd_ != kInvalidPipeWireFd) {
close(pw_fd_);
pw_fd_ = kInvalidPipeWireFd;
}
}
void ScreenCastPortal::UnsubscribeSignalHandlers() {
if (start_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_, start_request_signal_id_);
start_request_signal_id_ = 0;
}
if (sources_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_,
sources_request_signal_id_);
sources_request_signal_id_ = 0;
}
if (session_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(connection_,
session_request_signal_id_);
session_request_signal_id_ = 0;
}
}
void ScreenCastPortal::SetSessionDetails(
const xdg_portal::SessionDetails& session_details) {
if (session_details.proxy) {
proxy_ = session_details.proxy;
connection_ = g_dbus_proxy_get_connection(proxy_);
}
if (session_details.cancellable) {
cancellable_ = session_details.cancellable;
}
if (!session_details.session_handle.empty()) {
session_handle_ = session_details.session_handle;
}
if (session_details.pipewire_stream_node_id) {
pw_stream_node_id_ = session_details.pipewire_stream_node_id;
}
}
void ScreenCastPortal::Start() {
cancellable_ = g_cancellable_new();
RequestSessionProxy(kScreenCastInterfaceName, proxy_request_response_handler_,
cancellable_, this);
}
xdg_portal::SessionDetails ScreenCastPortal::GetSessionDetails() {
return {}; // No-op
}
void ScreenCastPortal::OnPortalDone(RequestResponse result) {
notifier_->OnScreenCastRequestResult(result, pw_stream_node_id_, pw_fd_);
if (result != RequestResponse::kSuccess) {
Stop();
}
}
// static
void ScreenCastPortal::OnProxyRequested(GObject* gobject,
GAsyncResult* result,
gpointer user_data) {
static_cast<ScreenCastPortal*>(user_data)->RequestSessionUsingProxy(result);
}
void ScreenCastPortal::RequestSession(GDBusProxy* proxy) {
proxy_ = proxy;
connection_ = g_dbus_proxy_get_connection(proxy_);
SetupSessionRequestHandlers(
"webrtc", OnSessionRequested, OnSessionRequestResponseSignal, connection_,
proxy_, cancellable_, portal_handle_, session_request_signal_id_, this);
}
// static
void ScreenCastPortal::OnSessionRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
static_cast<ScreenCastPortal*>(user_data)->OnSessionRequestResult(proxy,
result);
}
// static
void ScreenCastPortal::OnSessionRequestResponseSignal(
GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
that->RegisterSessionClosedSignalHandler(
OnSessionClosedSignal, parameters, that->connection_,
that->session_handle_, that->session_closed_signal_id_);
// Do not continue if we don't get session_handle back. The call above will
// already notify the capturer there is a failure, but we would still continue
// to make following request and crash on that.
if (!that->session_handle_.empty()) {
that->SourcesRequest();
}
}
// static
void ScreenCastPortal::OnSessionClosedSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "Received closed signal from session.";
that->notifier_->OnScreenCastSessionClosed();
// Unsubscribe from the signal and free the session handle to avoid calling
// Session::Close from the destructor since it's already closed
g_dbus_connection_signal_unsubscribe(that->connection_,
that->session_closed_signal_id_);
}
void ScreenCastPortal::SourcesRequest() {
GVariantBuilder builder;
Scoped<char> variant_string;
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
// We want to record monitor content.
g_variant_builder_add(
&builder, "{sv}", "types",
g_variant_new_uint32(static_cast<uint32_t>(capture_source_type_)));
// We don't want to allow selection of multiple sources.
g_variant_builder_add(&builder, "{sv}", "multiple",
g_variant_new_boolean(false));
Scoped<GVariant> cursorModesVariant(
g_dbus_proxy_get_cached_property(proxy_, "AvailableCursorModes"));
if (cursorModesVariant.get()) {
uint32_t modes = 0;
g_variant_get(cursorModesVariant.get(), "u", &modes);
// Make request only if this mode is advertised by the portal
// implementation.
if (modes & static_cast<uint32_t>(cursor_mode_)) {
g_variant_builder_add(
&builder, "{sv}", "cursor_mode",
g_variant_new_uint32(static_cast<uint32_t>(cursor_mode_)));
}
}
Scoped<GVariant> versionVariant(
g_dbus_proxy_get_cached_property(proxy_, "version"));
if (versionVariant.get()) {
uint32_t version = 0;
g_variant_get(versionVariant.get(), "u", &version);
// Make request only if xdg-desktop-portal has required API version
if (version >= 4) {
g_variant_builder_add(
&builder, "{sv}", "persist_mode",
g_variant_new_uint32(static_cast<uint32_t>(persist_mode_)));
if (!restore_token_.empty()) {
g_variant_builder_add(&builder, "{sv}", "restore_token",
g_variant_new_string(restore_token_.c_str()));
}
}
}
variant_string = g_strdup_printf("webrtc%d", g_random_int_range(0, G_MAXINT));
g_variant_builder_add(&builder, "{sv}", "handle_token",
g_variant_new_string(variant_string.get()));
sources_handle_ = PrepareSignalHandle(variant_string.get(), connection_);
sources_request_signal_id_ = SetupRequestResponseSignal(
sources_handle_.c_str(), sources_request_response_signal_handler_,
user_data_, connection_);
RTC_LOG(LS_INFO) << "Requesting sources from the screen cast session.";
g_dbus_proxy_call(
proxy_, "SelectSources",
g_variant_new("(oa{sv})", session_handle_.c_str(), &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnSourcesRequested), this);
}
// static
void ScreenCastPortal::OnSourcesRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GVariant> variant(
g_dbus_proxy_call_finish(proxy, result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to request the sources: " << error->message;
that->OnPortalDone(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Sources requested from the screen cast session.";
Scoped<char> handle;
g_variant_get_child(variant.get(), 0, "o", handle.receive());
if (!handle) {
RTC_LOG(LS_ERROR) << "Failed to initialize the screen cast session.";
if (that->sources_request_signal_id_) {
g_dbus_connection_signal_unsubscribe(that->connection_,
that->sources_request_signal_id_);
that->sources_request_signal_id_ = 0;
}
that->OnPortalDone(RequestResponse::kError);
return;
}
RTC_LOG(LS_INFO) << "Subscribed to sources signal.";
}
// static
void ScreenCastPortal::OnSourcesRequestResponseSignal(
GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "Received sources signal from session.";
uint32_t portal_response;
g_variant_get(parameters, "(u@a{sv})", &portal_response, nullptr);
if (portal_response) {
RTC_LOG(LS_ERROR)
<< "Failed to select sources for the screen cast session.";
that->OnPortalDone(RequestResponse::kError);
return;
}
that->StartRequest();
}
void ScreenCastPortal::StartRequest() {
StartSessionRequest("webrtc", session_handle_, OnStartRequestResponseSignal,
OnStartRequested, proxy_, connection_, cancellable_,
start_request_signal_id_, start_handle_, this);
}
// static
void ScreenCastPortal::OnStartRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
static_cast<ScreenCastPortal*>(user_data)->OnStartRequestResult(proxy,
result);
}
// static
void ScreenCastPortal::OnStartRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "Start signal received.";
uint32_t portal_response;
Scoped<GVariant> response_data;
Scoped<GVariantIter> iter;
Scoped<char> restore_token;
g_variant_get(parameters, "(u@a{sv})", &portal_response,
response_data.receive());
if (portal_response || !response_data) {
RTC_LOG(LS_ERROR) << "Failed to start the screen cast session.";
that->OnPortalDone(RequestResponseFromPortalResponse(portal_response));
return;
}
// Array of PipeWire streams. See
// https://github.com/flatpak/xdg-desktop-portal/blob/main/data/org.freedesktop.portal.ScreenCast.xml
// documentation for <method name="Start">.
if (g_variant_lookup(response_data.get(), "streams", "a(ua{sv})",
iter.receive())) {
Scoped<GVariant> variant;
while (g_variant_iter_next(iter.get(), "@(ua{sv})", variant.receive())) {
uint32_t stream_id;
uint32_t type;
Scoped<GVariant> options;
g_variant_get(variant.get(), "(u@a{sv})", &stream_id, options.receive());
RTC_DCHECK(options.get());
if (g_variant_lookup(options.get(), "source_type", "u", &type)) {
that->capture_source_type_ =
static_cast<ScreenCastPortal::CaptureSourceType>(type);
}
that->pw_stream_node_id_ = stream_id;
break;
}
}
if (g_variant_lookup(response_data.get(), "restore_token", "s",
restore_token.receive())) {
that->restore_token_ = restore_token.get();
}
that->OpenPipeWireRemote();
}
uint32_t ScreenCastPortal::pipewire_stream_node_id() {
return pw_stream_node_id_;
}
void ScreenCastPortal::SetPersistMode(ScreenCastPortal::PersistMode mode) {
persist_mode_ = mode;
}
void ScreenCastPortal::SetRestoreToken(const std::string& token) {
restore_token_ = token;
}
std::string ScreenCastPortal::RestoreToken() const {
return restore_token_;
}
void ScreenCastPortal::OpenPipeWireRemote() {
GVariantBuilder builder;
g_variant_builder_init(&builder, G_VARIANT_TYPE_VARDICT);
RTC_LOG(LS_INFO) << "Opening the PipeWire remote.";
g_dbus_proxy_call_with_unix_fd_list(
proxy_, "OpenPipeWireRemote",
g_variant_new("(oa{sv})", session_handle_.c_str(), &builder),
G_DBUS_CALL_FLAGS_NONE, /*timeout=*/-1, /*fd_list=*/nullptr, cancellable_,
reinterpret_cast<GAsyncReadyCallback>(OnOpenPipeWireRemoteRequested),
this);
}
// static
void ScreenCastPortal::OnOpenPipeWireRemoteRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data) {
ScreenCastPortal* that = static_cast<ScreenCastPortal*>(user_data);
RTC_DCHECK(that);
Scoped<GError> error;
Scoped<GUnixFDList> outlist;
Scoped<GVariant> variant(g_dbus_proxy_call_with_unix_fd_list_finish(
proxy, outlist.receive(), result, error.receive()));
if (!variant) {
if (g_error_matches(error.get(), G_IO_ERROR, G_IO_ERROR_CANCELLED))
return;
RTC_LOG(LS_ERROR) << "Failed to open the PipeWire remote: "
<< error->message;
that->OnPortalDone(RequestResponse::kError);
return;
}
int32_t index;
g_variant_get(variant.get(), "(h)", &index);
that->pw_fd_ = g_unix_fd_list_get(outlist.get(), index, error.receive());
if (that->pw_fd_ == kInvalidPipeWireFd) {
RTC_LOG(LS_ERROR) << "Failed to get file descriptor from the list: "
<< error->message;
that->OnPortalDone(RequestResponse::kError);
return;
}
that->OnPortalDone(RequestResponse::kSuccess);
}
} // namespace webrtc

View file

@ -0,0 +1,219 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_PORTAL_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_PORTAL_H_
#include <gio/gio.h>
#include <string>
#include "modules/desktop_capture/desktop_capture_types.h"
#include "modules/desktop_capture/linux/wayland/screen_capture_portal_interface.h"
#include "modules/portal/pipewire_utils.h"
#include "modules/portal/portal_request_response.h"
#include "modules/portal/xdg_desktop_portal_utils.h"
#include "modules/portal/xdg_session_details.h"
namespace webrtc {
class RTC_EXPORT ScreenCastPortal
: public xdg_portal::ScreenCapturePortalInterface {
public:
using ProxyRequestResponseHandler = void (*)(GObject* object,
GAsyncResult* result,
gpointer user_data);
using SourcesRequestResponseSignalHandler =
void (*)(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
// Values are set based on cursor mode property in
// xdg-desktop-portal/screencast
// https://github.com/flatpak/xdg-desktop-portal/blob/main/data/org.freedesktop.portal.ScreenCast.xml
enum class CursorMode : uint32_t {
// Mouse cursor will not be included in any form
kHidden = 0b01,
// Mouse cursor will be part of the screen content
kEmbedded = 0b10,
// Mouse cursor information will be send separately in form of metadata
kMetadata = 0b100
};
// Values are set based on persist mode property in
// xdg-desktop-portal/screencast
// https://github.com/flatpak/xdg-desktop-portal/blob/main/data/org.freedesktop.portal.ScreenCast.xml
enum class PersistMode : uint32_t {
// Do not allow to restore stream
kDoNotPersist = 0b00,
// The restore token is valid as long as the application is alive. It's
// stored in memory and revoked when the application closes its DBus
// connection
kTransient = 0b01,
// The restore token is stored in disk and is valid until the user manually
// revokes it
kPersistent = 0b10
};
// Interface that must be implemented by the ScreenCastPortal consumers.
class PortalNotifier {
public:
virtual void OnScreenCastRequestResult(xdg_portal::RequestResponse result,
uint32_t stream_node_id,
int fd) = 0;
virtual void OnScreenCastSessionClosed() = 0;
protected:
PortalNotifier() = default;
virtual ~PortalNotifier() = default;
};
ScreenCastPortal(CaptureType type, PortalNotifier* notifier);
ScreenCastPortal(CaptureType type,
PortalNotifier* notifier,
ProxyRequestResponseHandler proxy_request_response_handler,
SourcesRequestResponseSignalHandler
sources_request_response_signal_handler,
gpointer user_data,
// TODO(chromium:1291247): Remove the default option once
// downstream has been adjusted.
bool prefer_cursor_embedded = false);
~ScreenCastPortal();
// Initialize ScreenCastPortal with series of DBus calls where we try to
// obtain all the required information, like PipeWire file descriptor and
// PipeWire stream node ID.
//
// The observer will return whether the communication with xdg-desktop-portal
// was successful and only then you will be able to get all the required
// information in order to continue working with PipeWire.
void Start() override;
void Stop() override;
xdg_portal::SessionDetails GetSessionDetails() override;
// Method to notify the reason for failure of a portal request.
void OnPortalDone(xdg_portal::RequestResponse result) override;
// Sends a create session request to the portal.
void RequestSession(GDBusProxy* proxy) override;
// Set of methods leveraged by remote desktop portal to setup a common session
// with screen cast portal.
void SetSessionDetails(const xdg_portal::SessionDetails& session_details);
uint32_t pipewire_stream_node_id();
void SourcesRequest();
void OpenPipeWireRemote();
// ScreenCast specific methods for stream restoration
void SetPersistMode(ScreenCastPortal::PersistMode mode);
void SetRestoreToken(const std::string& token);
std::string RestoreToken() const;
private:
// Values are set based on source type property in
// xdg-desktop-portal/screencast
// https://github.com/flatpak/xdg-desktop-portal/blob/main/data/org.freedesktop.portal.ScreenCast.xml
enum class CaptureSourceType : uint32_t {
kScreen = 0b01,
kWindow = 0b10,
kAnyScreenContent = kScreen | kWindow
};
static CaptureSourceType ToCaptureSourceType(CaptureType type);
PortalNotifier* notifier_;
// A PipeWire stream ID of stream we will be connecting to
uint32_t pw_stream_node_id_ = 0;
// A file descriptor of PipeWire socket
int pw_fd_ = kInvalidPipeWireFd;
// Restore token that can be used to restore previous session
std::string restore_token_;
CaptureSourceType capture_source_type_ =
ScreenCastPortal::CaptureSourceType::kScreen;
CursorMode cursor_mode_ = CursorMode::kMetadata;
PersistMode persist_mode_ = ScreenCastPortal::PersistMode::kDoNotPersist;
ProxyRequestResponseHandler proxy_request_response_handler_;
SourcesRequestResponseSignalHandler sources_request_response_signal_handler_;
gpointer user_data_;
GDBusConnection* connection_ = nullptr;
GDBusProxy* proxy_ = nullptr;
GCancellable* cancellable_ = nullptr;
std::string portal_handle_;
std::string session_handle_;
std::string sources_handle_;
std::string start_handle_;
guint session_request_signal_id_ = 0;
guint sources_request_signal_id_ = 0;
guint start_request_signal_id_ = 0;
guint session_closed_signal_id_ = 0;
void UnsubscribeSignalHandlers();
static void OnProxyRequested(GObject* object,
GAsyncResult* result,
gpointer user_data);
static void OnSessionRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnSessionRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
static void OnSessionClosedSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
static void OnSourcesRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnSourcesRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
void StartRequest();
static void OnStartRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
static void OnStartRequestResponseSignal(GDBusConnection* connection,
const char* sender_name,
const char* object_path,
const char* interface_name,
const char* signal_name,
GVariant* parameters,
gpointer user_data);
static void OnOpenPipeWireRemoteRequested(GDBusProxy* proxy,
GAsyncResult* result,
gpointer user_data);
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_PORTAL_H_

View file

@ -0,0 +1,133 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/screencast_stream_utils.h"
#include <libdrm/drm_fourcc.h>
#include <pipewire/pipewire.h>
#include <spa/param/video/format-utils.h>
#include <string>
#include "rtc_base/string_to_number.h"
#if !PW_CHECK_VERSION(0, 3, 29)
#define SPA_POD_PROP_FLAG_MANDATORY (1u << 3)
#endif
#if !PW_CHECK_VERSION(0, 3, 33)
#define SPA_POD_PROP_FLAG_DONT_FIXATE (1u << 4)
#endif
namespace webrtc {
PipeWireVersion PipeWireVersion::Parse(const absl::string_view& version) {
std::vector<absl::string_view> parsed_version = rtc::split(version, '.');
if (parsed_version.size() != 3) {
return {};
}
absl::optional<int> major = rtc::StringToNumber<int>(parsed_version.at(0));
absl::optional<int> minor = rtc::StringToNumber<int>(parsed_version.at(1));
absl::optional<int> micro = rtc::StringToNumber<int>(parsed_version.at(2));
// Return invalid version if we failed to parse it
if (!major || !minor || !micro) {
return {};
}
return {major.value(), minor.value(), micro.value()};
}
bool PipeWireVersion::operator>=(const PipeWireVersion& other) {
if (!major && !minor && !micro) {
return false;
}
return std::tie(major, minor, micro) >=
std::tie(other.major, other.minor, other.micro);
}
bool PipeWireVersion::operator<=(const PipeWireVersion& other) {
if (!major && !minor && !micro) {
return false;
}
return std::tie(major, minor, micro) <=
std::tie(other.major, other.minor, other.micro);
}
spa_pod* BuildFormat(spa_pod_builder* builder,
uint32_t format,
const std::vector<uint64_t>& modifiers,
const struct spa_rectangle* resolution,
const struct spa_fraction* frame_rate) {
spa_pod_frame frames[2];
spa_rectangle pw_min_screen_bounds = spa_rectangle{1, 1};
spa_rectangle pw_max_screen_bounds = spa_rectangle{UINT32_MAX, UINT32_MAX};
spa_pod_builder_push_object(builder, &frames[0], SPA_TYPE_OBJECT_Format,
SPA_PARAM_EnumFormat);
spa_pod_builder_add(builder, SPA_FORMAT_mediaType,
SPA_POD_Id(SPA_MEDIA_TYPE_video), 0);
spa_pod_builder_add(builder, SPA_FORMAT_mediaSubtype,
SPA_POD_Id(SPA_MEDIA_SUBTYPE_raw), 0);
spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_format, SPA_POD_Id(format), 0);
if (modifiers.size()) {
if (modifiers.size() == 1 && modifiers[0] == DRM_FORMAT_MOD_INVALID) {
spa_pod_builder_prop(builder, SPA_FORMAT_VIDEO_modifier,
SPA_POD_PROP_FLAG_MANDATORY);
spa_pod_builder_long(builder, modifiers[0]);
} else {
spa_pod_builder_prop(
builder, SPA_FORMAT_VIDEO_modifier,
SPA_POD_PROP_FLAG_MANDATORY | SPA_POD_PROP_FLAG_DONT_FIXATE);
spa_pod_builder_push_choice(builder, &frames[1], SPA_CHOICE_Enum, 0);
// modifiers from the array
bool first = true;
for (int64_t val : modifiers) {
spa_pod_builder_long(builder, val);
// Add the first modifier twice as the very first value is the default
// option
if (first) {
spa_pod_builder_long(builder, val);
first = false;
}
}
spa_pod_builder_pop(builder, &frames[1]);
}
}
if (resolution) {
spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_size,
SPA_POD_Rectangle(resolution), 0);
} else {
spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_size,
SPA_POD_CHOICE_RANGE_Rectangle(&pw_min_screen_bounds,
&pw_min_screen_bounds,
&pw_max_screen_bounds),
0);
}
if (frame_rate) {
static const spa_fraction pw_min_frame_rate = spa_fraction{0, 1};
spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_framerate,
SPA_POD_CHOICE_RANGE_Fraction(
frame_rate, &pw_min_frame_rate, frame_rate),
0);
spa_pod_builder_add(builder, SPA_FORMAT_VIDEO_maxFramerate,
SPA_POD_CHOICE_RANGE_Fraction(
frame_rate, &pw_min_frame_rate, frame_rate),
0);
}
return static_cast<spa_pod*>(spa_pod_builder_pop(builder, &frames[0]));
}
} // namespace webrtc

View file

@ -0,0 +1,53 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_STREAM_UTILS_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_STREAM_UTILS_H_
#include <stdint.h>
#include <string>
#include <vector>
#include "rtc_base/string_encode.h"
struct spa_pod;
struct spa_pod_builder;
struct spa_rectangle;
struct spa_fraction;
namespace webrtc {
struct PipeWireVersion {
static PipeWireVersion Parse(const absl::string_view& version);
// Returns whether current version is newer or same as required version
bool operator>=(const PipeWireVersion& other);
// Returns whether current version is older or same as required version
bool operator<=(const PipeWireVersion& other);
int major = 0;
int minor = 0;
int micro = 0;
};
// Returns a spa_pod used to build PipeWire stream format using given
// arguments. Modifiers are optional value and when present they will be
// used with SPA_POD_PROP_FLAG_MANDATORY and SPA_POD_PROP_FLAG_DONT_FIXATE
// flags.
spa_pod* BuildFormat(spa_pod_builder* builder,
uint32_t format,
const std::vector<uint64_t>& modifiers,
const struct spa_rectangle* resolution,
const struct spa_fraction* frame_rate);
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SCREENCAST_STREAM_UTILS_H_

View file

@ -0,0 +1,99 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SHARED_SCREENCAST_STREAM_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SHARED_SCREENCAST_STREAM_H_
#include <memory>
#include "absl/types/optional.h"
#include "api/ref_counted_base.h"
#include "api/scoped_refptr.h"
#include "modules/desktop_capture/desktop_capturer.h"
#include "modules/desktop_capture/mouse_cursor.h"
#include "modules/desktop_capture/screen_capture_frame_queue.h"
#include "modules/desktop_capture/shared_desktop_frame.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
class SharedScreenCastStreamPrivate;
class RTC_EXPORT SharedScreenCastStream
: public rtc::RefCountedNonVirtual<SharedScreenCastStream> {
public:
class Observer {
public:
virtual void OnCursorPositionChanged() = 0;
virtual void OnCursorShapeChanged() = 0;
virtual void OnDesktopFrameChanged() = 0;
virtual void OnFailedToProcessBuffer() = 0;
virtual void OnStreamConfigured() = 0;
virtual void OnFrameRateChanged(uint32_t frame_rate) = 0;
protected:
Observer() = default;
virtual ~Observer() = default;
};
static rtc::scoped_refptr<SharedScreenCastStream> CreateDefault();
bool StartScreenCastStream(uint32_t stream_node_id);
bool StartScreenCastStream(uint32_t stream_node_id,
int fd,
uint32_t width = 0,
uint32_t height = 0,
bool is_cursor_embedded = false,
DesktopCapturer::Callback* callback = nullptr);
void UpdateScreenCastStreamResolution(uint32_t width, uint32_t height);
void UpdateScreenCastStreamFrameRate(uint32_t frame_rate);
void SetUseDamageRegion(bool use_damage_region);
void SetObserver(SharedScreenCastStream::Observer* observer);
void StopScreenCastStream();
// Below functions return the most recent information we get from a
// PipeWire buffer on each Process() callback. This assumes that we
// managed to successfuly connect to a PipeWire stream provided by the
// compositor (based on stream parameters). The cursor data are obtained
// from spa_meta_cursor stream metadata and therefore the cursor is not
// part of actual screen/window frame.
// Returns the most recent screen/window frame we obtained from PipeWire
// buffer. Will return an empty frame in case we didn't manage to get a frame
// from PipeWire buffer.
std::unique_ptr<SharedDesktopFrame> CaptureFrame();
// Returns the most recent mouse cursor image. Will return an nullptr cursor
// in case we didn't manage to get a cursor from PipeWire buffer. NOTE: the
// cursor image might not be updated on every cursor location change, but
// actually only when its shape changes.
std::unique_ptr<MouseCursor> CaptureCursor();
// Returns the most recent mouse cursor position. Will not return a value in
// case we didn't manage to get it from PipeWire buffer.
absl::optional<DesktopVector> CaptureCursorPosition();
~SharedScreenCastStream();
protected:
SharedScreenCastStream();
private:
friend class SharedScreenCastStreamPrivate;
SharedScreenCastStream(const SharedScreenCastStream&) = delete;
SharedScreenCastStream& operator=(const SharedScreenCastStream&) = delete;
std::unique_ptr<SharedScreenCastStreamPrivate> private_;
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_SHARED_SCREENCAST_STREAM_H_

View file

@ -0,0 +1,363 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/desktop_capture/linux/wayland/test/test_screencast_stream_provider.h"
#include <fcntl.h>
#include <sys/mman.h>
#include <sys/types.h>
#include <unistd.h>
#include <string>
#include <utility>
#include <vector>
#include "modules/portal/pipewire_utils.h"
#include "rtc_base/logging.h"
namespace webrtc {
constexpr int kBytesPerPixel = 4;
TestScreenCastStreamProvider::TestScreenCastStreamProvider(Observer* observer,
uint32_t width,
uint32_t height)
: observer_(observer), width_(width), height_(height) {
if (!InitializePipeWire()) {
RTC_LOG(LS_ERROR) << "Unable to open PipeWire";
return;
}
pw_init(/*argc=*/nullptr, /*argc=*/nullptr);
pw_main_loop_ = pw_thread_loop_new("pipewire-test-main-loop", nullptr);
pw_context_ =
pw_context_new(pw_thread_loop_get_loop(pw_main_loop_), nullptr, 0);
if (!pw_context_) {
RTC_LOG(LS_ERROR) << "PipeWire test: Failed to create PipeWire context";
return;
}
if (pw_thread_loop_start(pw_main_loop_) < 0) {
RTC_LOG(LS_ERROR) << "PipeWire test: Failed to start main PipeWire loop";
return;
}
// Initialize event handlers, remote end and stream-related.
pw_core_events_.version = PW_VERSION_CORE_EVENTS;
pw_core_events_.error = &OnCoreError;
pw_stream_events_.version = PW_VERSION_STREAM_EVENTS;
pw_stream_events_.add_buffer = &OnStreamAddBuffer;
pw_stream_events_.remove_buffer = &OnStreamRemoveBuffer;
pw_stream_events_.state_changed = &OnStreamStateChanged;
pw_stream_events_.param_changed = &OnStreamParamChanged;
{
PipeWireThreadLoopLock thread_loop_lock(pw_main_loop_);
pw_core_ = pw_context_connect(pw_context_, nullptr, 0);
if (!pw_core_) {
RTC_LOG(LS_ERROR) << "PipeWire test: Failed to connect PipeWire context";
return;
}
pw_core_add_listener(pw_core_, &spa_core_listener_, &pw_core_events_, this);
pw_stream_ = pw_stream_new(pw_core_, "webrtc-test-stream", nullptr);
if (!pw_stream_) {
RTC_LOG(LS_ERROR) << "PipeWire test: Failed to create PipeWire stream";
return;
}
pw_stream_add_listener(pw_stream_, &spa_stream_listener_,
&pw_stream_events_, this);
uint8_t buffer[2048] = {};
spa_pod_builder builder = spa_pod_builder{buffer, sizeof(buffer)};
std::vector<const spa_pod*> params;
spa_rectangle resolution =
SPA_RECTANGLE(uint32_t(width_), uint32_t(height_));
struct spa_fraction default_frame_rate = SPA_FRACTION(60, 1);
params.push_back(BuildFormat(&builder, SPA_VIDEO_FORMAT_BGRx,
/*modifiers=*/{}, &resolution,
&default_frame_rate));
auto flags =
pw_stream_flags(PW_STREAM_FLAG_DRIVER | PW_STREAM_FLAG_ALLOC_BUFFERS);
if (pw_stream_connect(pw_stream_, PW_DIRECTION_OUTPUT, SPA_ID_INVALID,
flags, params.data(), params.size()) != 0) {
RTC_LOG(LS_ERROR) << "PipeWire test: Could not connect receiving stream.";
pw_stream_destroy(pw_stream_);
pw_stream_ = nullptr;
return;
}
}
return;
}
TestScreenCastStreamProvider::~TestScreenCastStreamProvider() {
if (pw_main_loop_) {
pw_thread_loop_stop(pw_main_loop_);
}
if (pw_stream_) {
pw_stream_destroy(pw_stream_);
}
if (pw_core_) {
pw_core_disconnect(pw_core_);
}
if (pw_context_) {
pw_context_destroy(pw_context_);
}
if (pw_main_loop_) {
pw_thread_loop_destroy(pw_main_loop_);
}
}
void TestScreenCastStreamProvider::RecordFrame(RgbaColor rgba_color) {
const char* error;
if (pw_stream_get_state(pw_stream_, &error) != PW_STREAM_STATE_STREAMING) {
if (error) {
RTC_LOG(LS_ERROR)
<< "PipeWire test: Failed to record frame: stream is not active: "
<< error;
}
}
struct pw_buffer* buffer = pw_stream_dequeue_buffer(pw_stream_);
if (!buffer) {
RTC_LOG(LS_ERROR) << "PipeWire test: No available buffer";
return;
}
struct spa_buffer* spa_buffer = buffer->buffer;
struct spa_data* spa_data = spa_buffer->datas;
uint8_t* data = static_cast<uint8_t*>(spa_data->data);
if (!data) {
RTC_LOG(LS_ERROR)
<< "PipeWire test: Failed to record frame: invalid buffer data";
pw_stream_queue_buffer(pw_stream_, buffer);
return;
}
const int stride = SPA_ROUND_UP_N(width_ * kBytesPerPixel, 4);
spa_data->chunk->offset = 0;
spa_data->chunk->size = height_ * stride;
spa_data->chunk->stride = stride;
uint32_t color = rgba_color.ToUInt32();
for (uint32_t i = 0; i < height_; i++) {
uint32_t* column = reinterpret_cast<uint32_t*>(data);
for (uint32_t j = 0; j < width_; j++) {
column[j] = color;
}
data += stride;
}
pw_stream_queue_buffer(pw_stream_, buffer);
if (observer_) {
observer_->OnFrameRecorded();
}
}
void TestScreenCastStreamProvider::StartStreaming() {
if (pw_stream_ && pw_node_id_ != 0) {
pw_stream_set_active(pw_stream_, true);
}
}
void TestScreenCastStreamProvider::StopStreaming() {
if (pw_stream_ && pw_node_id_ != 0) {
pw_stream_set_active(pw_stream_, false);
}
}
// static
void TestScreenCastStreamProvider::OnCoreError(void* data,
uint32_t id,
int seq,
int res,
const char* message) {
TestScreenCastStreamProvider* that =
static_cast<TestScreenCastStreamProvider*>(data);
RTC_DCHECK(that);
RTC_LOG(LS_ERROR) << "PipeWire test: PipeWire remote error: " << message;
}
// static
void TestScreenCastStreamProvider::OnStreamStateChanged(
void* data,
pw_stream_state old_state,
pw_stream_state state,
const char* error_message) {
TestScreenCastStreamProvider* that =
static_cast<TestScreenCastStreamProvider*>(data);
RTC_DCHECK(that);
switch (state) {
case PW_STREAM_STATE_ERROR:
RTC_LOG(LS_ERROR) << "PipeWire test: PipeWire stream state error: "
<< error_message;
break;
case PW_STREAM_STATE_PAUSED:
if (that->pw_node_id_ == 0 && that->pw_stream_) {
that->pw_node_id_ = pw_stream_get_node_id(that->pw_stream_);
that->observer_->OnStreamReady(that->pw_node_id_);
} else {
// Stop streaming
that->is_streaming_ = false;
that->observer_->OnStopStreaming();
}
break;
case PW_STREAM_STATE_STREAMING:
// Start streaming
that->is_streaming_ = true;
that->observer_->OnStartStreaming();
break;
case PW_STREAM_STATE_CONNECTING:
break;
case PW_STREAM_STATE_UNCONNECTED:
if (that->is_streaming_) {
// Stop streaming
that->is_streaming_ = false;
that->observer_->OnStopStreaming();
}
break;
}
}
// static
void TestScreenCastStreamProvider::OnStreamParamChanged(
void* data,
uint32_t id,
const struct spa_pod* format) {
TestScreenCastStreamProvider* that =
static_cast<TestScreenCastStreamProvider*>(data);
RTC_DCHECK(that);
RTC_LOG(LS_INFO) << "PipeWire test: PipeWire stream format changed.";
if (!format || id != SPA_PARAM_Format) {
return;
}
spa_format_video_raw_parse(format, &that->spa_video_format_);
auto stride = SPA_ROUND_UP_N(that->width_ * kBytesPerPixel, 4);
uint8_t buffer[1024] = {};
auto builder = spa_pod_builder{buffer, sizeof(buffer)};
// Setup buffers and meta header for new format.
std::vector<const spa_pod*> params;
const int buffer_types = (1 << SPA_DATA_MemFd);
spa_rectangle resolution = SPA_RECTANGLE(that->width_, that->height_);
params.push_back(reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
&builder, SPA_TYPE_OBJECT_ParamBuffers, SPA_PARAM_Buffers,
SPA_FORMAT_VIDEO_size, SPA_POD_Rectangle(&resolution),
SPA_PARAM_BUFFERS_buffers, SPA_POD_CHOICE_RANGE_Int(16, 2, 16),
SPA_PARAM_BUFFERS_blocks, SPA_POD_Int(1), SPA_PARAM_BUFFERS_stride,
SPA_POD_Int(stride), SPA_PARAM_BUFFERS_size,
SPA_POD_Int(stride * that->height_), SPA_PARAM_BUFFERS_align,
SPA_POD_Int(16), SPA_PARAM_BUFFERS_dataType,
SPA_POD_CHOICE_FLAGS_Int(buffer_types))));
params.push_back(reinterpret_cast<spa_pod*>(spa_pod_builder_add_object(
&builder, SPA_TYPE_OBJECT_ParamMeta, SPA_PARAM_Meta, SPA_PARAM_META_type,
SPA_POD_Id(SPA_META_Header), SPA_PARAM_META_size,
SPA_POD_Int(sizeof(struct spa_meta_header)))));
pw_stream_update_params(that->pw_stream_, params.data(), params.size());
}
// static
void TestScreenCastStreamProvider::OnStreamAddBuffer(void* data,
pw_buffer* buffer) {
TestScreenCastStreamProvider* that =
static_cast<TestScreenCastStreamProvider*>(data);
RTC_DCHECK(that);
struct spa_data* spa_data = buffer->buffer->datas;
spa_data->mapoffset = 0;
spa_data->flags = SPA_DATA_FLAG_READWRITE;
if (!(spa_data[0].type & (1 << SPA_DATA_MemFd))) {
RTC_LOG(LS_ERROR)
<< "PipeWire test: Client doesn't support memfd buffer data type";
return;
}
const int stride = SPA_ROUND_UP_N(that->width_ * kBytesPerPixel, 4);
spa_data->maxsize = stride * that->height_;
spa_data->type = SPA_DATA_MemFd;
spa_data->fd =
memfd_create("pipewire-test-memfd", MFD_CLOEXEC | MFD_ALLOW_SEALING);
if (spa_data->fd == kInvalidPipeWireFd) {
RTC_LOG(LS_ERROR) << "PipeWire test: Can't create memfd";
return;
}
spa_data->mapoffset = 0;
if (ftruncate(spa_data->fd, spa_data->maxsize) < 0) {
RTC_LOG(LS_ERROR) << "PipeWire test: Can't truncate to"
<< spa_data->maxsize;
return;
}
unsigned int seals = F_SEAL_GROW | F_SEAL_SHRINK | F_SEAL_SEAL;
if (fcntl(spa_data->fd, F_ADD_SEALS, seals) == -1) {
RTC_LOG(LS_ERROR) << "PipeWire test: Failed to add seals";
}
spa_data->data = mmap(nullptr, spa_data->maxsize, PROT_READ | PROT_WRITE,
MAP_SHARED, spa_data->fd, spa_data->mapoffset);
if (spa_data->data == MAP_FAILED) {
RTC_LOG(LS_ERROR) << "PipeWire test: Failed to mmap memory";
} else {
that->observer_->OnBufferAdded();
RTC_LOG(LS_INFO) << "PipeWire test: Memfd created successfully: "
<< spa_data->data << spa_data->maxsize;
}
}
// static
void TestScreenCastStreamProvider::OnStreamRemoveBuffer(void* data,
pw_buffer* buffer) {
TestScreenCastStreamProvider* that =
static_cast<TestScreenCastStreamProvider*>(data);
RTC_DCHECK(that);
struct spa_buffer* spa_buffer = buffer->buffer;
struct spa_data* spa_data = spa_buffer->datas;
if (spa_data && spa_data->type == SPA_DATA_MemFd) {
munmap(spa_data->data, spa_data->maxsize);
close(spa_data->fd);
}
}
uint32_t TestScreenCastStreamProvider::PipeWireNodeId() {
return pw_node_id_;
}
} // namespace webrtc

View file

@ -0,0 +1,93 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_TEST_TEST_SCREENCAST_STREAM_PROVIDER_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_TEST_TEST_SCREENCAST_STREAM_PROVIDER_H_
#include <pipewire/pipewire.h>
#include <spa/param/video/format-utils.h>
#include "modules/desktop_capture/linux/wayland/screencast_stream_utils.h"
#include "modules/desktop_capture/rgba_color.h"
#include "rtc_base/random.h"
namespace webrtc {
class TestScreenCastStreamProvider {
public:
class Observer {
public:
virtual void OnBufferAdded() = 0;
virtual void OnFrameRecorded() = 0;
virtual void OnStreamReady(uint32_t stream_node_id) = 0;
virtual void OnStartStreaming() = 0;
virtual void OnStopStreaming() = 0;
protected:
Observer() = default;
virtual ~Observer() = default;
};
explicit TestScreenCastStreamProvider(Observer* observer,
uint32_t width,
uint32_t height);
~TestScreenCastStreamProvider();
uint32_t PipeWireNodeId();
void RecordFrame(RgbaColor rgba_color);
void StartStreaming();
void StopStreaming();
private:
Observer* observer_;
// Resolution parameters.
uint32_t width_ = 0;
uint32_t height_ = 0;
bool is_streaming_ = false;
uint32_t pw_node_id_ = 0;
// PipeWire types
struct pw_context* pw_context_ = nullptr;
struct pw_core* pw_core_ = nullptr;
struct pw_stream* pw_stream_ = nullptr;
struct pw_thread_loop* pw_main_loop_ = nullptr;
spa_hook spa_core_listener_;
spa_hook spa_stream_listener_;
// event handlers
pw_core_events pw_core_events_ = {};
pw_stream_events pw_stream_events_ = {};
struct spa_video_info_raw spa_video_format_;
// PipeWire callbacks
static void OnCoreError(void* data,
uint32_t id,
int seq,
int res,
const char* message);
static void OnStreamAddBuffer(void* data, pw_buffer* buffer);
static void OnStreamRemoveBuffer(void* data, pw_buffer* buffer);
static void OnStreamParamChanged(void* data,
uint32_t id,
const struct spa_pod* format);
static void OnStreamStateChanged(void* data,
pw_stream_state old_state,
pw_stream_state state,
const char* error_message);
};
} // namespace webrtc
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_TEST_TEST_SCREENCAST_STREAM_PROVIDER_H_

View file

@ -0,0 +1,17 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_
// TODO(bugs.webrtc.org/14187): remove when all users are gone
#include "modules/portal/xdg_desktop_portal_utils.h"
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_DESKTOP_PORTAL_UTILS_H_

View file

@ -0,0 +1,17 @@
/*
* Copyright 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_
#define MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_
// TODO(bugs.webrtc.org/14187): remove when all users are gone
#include "modules/portal/xdg_session_details.h"
#endif // MODULES_DESKTOP_CAPTURE_LINUX_WAYLAND_XDG_SESSION_DETAILS_H_