Repo created

This commit is contained in:
Fr4nz D13trich 2025-11-22 14:04:28 +01:00
parent 81b91f4139
commit f8c34fa5ee
22732 changed files with 4815320 additions and 2 deletions

View file

@ -0,0 +1,647 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_capture/windows/device_info_ds.h"
#include <dvdmedia.h>
#include "modules/video_capture/video_capture_config.h"
#include "modules/video_capture/windows/help_functions_ds.h"
#include "rtc_base/logging.h"
#include "rtc_base/string_utils.h"
namespace webrtc {
namespace videocapturemodule {
// static
DeviceInfoDS* DeviceInfoDS::Create() {
DeviceInfoDS* dsInfo = new DeviceInfoDS();
if (!dsInfo || dsInfo->Init() != 0) {
delete dsInfo;
dsInfo = NULL;
}
return dsInfo;
}
DeviceInfoDS::DeviceInfoDS()
: _dsDevEnum(NULL),
_dsMonikerDevEnum(NULL),
_CoUninitializeIsRequired(true) {
// 1) Initialize the COM library (make Windows load the DLLs).
//
// CoInitializeEx must be called at least once, and is usually called only
// once, for each thread that uses the COM library. Multiple calls to
// CoInitializeEx by the same thread are allowed as long as they pass the same
// concurrency flag, but subsequent valid calls return S_FALSE. To close the
// COM library gracefully on a thread, each successful call to CoInitializeEx,
// including any call that returns S_FALSE, must be balanced by a
// corresponding call to CoUninitialize.
//
/*Apartment-threading, while allowing for multiple threads of execution,
serializes all incoming calls by requiring that calls to methods of objects
created by this thread always run on the same thread the apartment/thread
that created them. In addition, calls can arrive only at message-queue
boundaries (i.e., only during a PeekMessage, SendMessage, DispatchMessage,
etc.). Because of this serialization, it is not typically necessary to write
concurrency control into the code for the object, other than to avoid calls
to PeekMessage and SendMessage during processing that must not be interrupted
by other method invocations or calls to other objects in the same
apartment/thread.*/
/// CoInitializeEx(NULL, COINIT_APARTMENTTHREADED ); //|
/// COINIT_SPEED_OVER_MEMORY
HRESULT hr = CoInitializeEx(
NULL, COINIT_MULTITHREADED); // Use COINIT_MULTITHREADED since Voice
// Engine uses COINIT_MULTITHREADED
if (FAILED(hr)) {
// Avoid calling CoUninitialize() since CoInitializeEx() failed.
_CoUninitializeIsRequired = FALSE;
if (hr == RPC_E_CHANGED_MODE) {
// Calling thread has already initialized COM to be used in a
// single-threaded apartment (STA). We are then prevented from using STA.
// Details: hr = 0x80010106 <=> "Cannot change thread mode after it is
// set".
//
RTC_DLOG(LS_INFO) << __FUNCTION__
<< ": CoInitializeEx(NULL, COINIT_APARTMENTTHREADED)"
" => RPC_E_CHANGED_MODE, error 0x"
<< rtc::ToHex(hr);
}
}
}
DeviceInfoDS::~DeviceInfoDS() {
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
RELEASE_AND_CLEAR(_dsDevEnum);
if (_CoUninitializeIsRequired) {
CoUninitialize();
}
}
int32_t DeviceInfoDS::Init() {
HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
IID_ICreateDevEnum, (void**)&_dsDevEnum);
if (hr != NOERROR) {
RTC_LOG(LS_INFO) << "Failed to create CLSID_SystemDeviceEnum, error 0x"
<< rtc::ToHex(hr);
return -1;
}
return 0;
}
uint32_t DeviceInfoDS::NumberOfDevices() {
MutexLock lock(&_apiLock);
return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0);
}
int32_t DeviceInfoDS::GetDeviceName(uint32_t deviceNumber,
char* deviceNameUTF8,
uint32_t deviceNameLength,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) {
MutexLock lock(&_apiLock);
const int32_t result = GetDeviceInfo(
deviceNumber, deviceNameUTF8, deviceNameLength, deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length, productUniqueIdUTF8, productUniqueIdUTF8Length);
return result > (int32_t)deviceNumber ? 0 : -1;
}
int32_t DeviceInfoDS::GetDeviceInfo(uint32_t deviceNumber,
char* deviceNameUTF8,
uint32_t deviceNameLength,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length)
{
// enumerate all video capture devices
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
&_dsMonikerDevEnum, 0);
if (hr != NOERROR) {
RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x"
<< rtc::ToHex(hr) << ". No webcam exist?";
return 0;
}
_dsMonikerDevEnum->Reset();
ULONG cFetched;
IMoniker* pM;
int index = 0;
while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched)) {
IPropertyBag* pBag;
hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pBag);
if (S_OK == hr) {
// Find the description or friendly name.
VARIANT varName;
VariantInit(&varName);
hr = pBag->Read(L"Description", &varName, 0);
if (FAILED(hr)) {
hr = pBag->Read(L"FriendlyName", &varName, 0);
}
if (SUCCEEDED(hr)) {
// ignore all VFW drivers
if ((wcsstr(varName.bstrVal, (L"(VFW)")) == NULL) &&
(_wcsnicmp(varName.bstrVal, (L"Google Camera Adapter"), 21) != 0)) {
// Found a valid device.
if (index == static_cast<int>(deviceNumber)) {
int convResult = 0;
if (deviceNameLength > 0) {
convResult = WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1,
(char*)deviceNameUTF8,
deviceNameLength, NULL, NULL);
if (convResult == 0) {
RTC_LOG(LS_INFO) << "Failed to convert device name to UTF8, "
"error = "
<< GetLastError();
return -1;
}
}
if (deviceUniqueIdUTF8Length > 0) {
hr = pBag->Read(L"DevicePath", &varName, 0);
if (FAILED(hr)) {
strncpy_s((char*)deviceUniqueIdUTF8, deviceUniqueIdUTF8Length,
(char*)deviceNameUTF8, convResult);
RTC_LOG(LS_INFO) << "Failed to get "
"deviceUniqueIdUTF8 using "
"deviceNameUTF8";
} else {
convResult = WideCharToMultiByte(
CP_UTF8, 0, varName.bstrVal, -1, (char*)deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length, NULL, NULL);
if (convResult == 0) {
RTC_LOG(LS_INFO) << "Failed to convert device "
"name to UTF8, error = "
<< GetLastError();
return -1;
}
if (productUniqueIdUTF8 && productUniqueIdUTF8Length > 0) {
GetProductId(deviceUniqueIdUTF8, productUniqueIdUTF8,
productUniqueIdUTF8Length);
}
}
}
}
++index; // increase the number of valid devices
}
}
VariantClear(&varName);
pBag->Release();
pM->Release();
}
}
if (deviceNameLength) {
RTC_DLOG(LS_INFO) << __FUNCTION__ << " " << deviceNameUTF8;
}
return index;
}
IBaseFilter* DeviceInfoDS::GetDeviceFilter(const char* deviceUniqueIdUTF8,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) {
const int32_t deviceUniqueIdUTF8Length = (int32_t)strlen(
(char*)deviceUniqueIdUTF8); // UTF8 is also NULL terminated
if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
RTC_LOG(LS_INFO) << "Device name too long";
return NULL;
}
// enumerate all video capture devices
RELEASE_AND_CLEAR(_dsMonikerDevEnum);
HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
&_dsMonikerDevEnum, 0);
if (hr != NOERROR) {
RTC_LOG(LS_INFO) << "Failed to enumerate CLSID_SystemDeviceEnum, error 0x"
<< rtc::ToHex(hr) << ". No webcam exist?";
return 0;
}
_dsMonikerDevEnum->Reset();
ULONG cFetched;
IMoniker* pM;
IBaseFilter* captureFilter = NULL;
bool deviceFound = false;
while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched) && !deviceFound) {
IPropertyBag* pBag;
hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void**)&pBag);
if (S_OK == hr) {
// Find the description or friendly name.
VARIANT varName;
VariantInit(&varName);
if (deviceUniqueIdUTF8Length > 0) {
hr = pBag->Read(L"DevicePath", &varName, 0);
if (FAILED(hr)) {
hr = pBag->Read(L"Description", &varName, 0);
if (FAILED(hr)) {
hr = pBag->Read(L"FriendlyName", &varName, 0);
}
}
if (SUCCEEDED(hr)) {
char tempDevicePathUTF8[256];
tempDevicePathUTF8[0] = 0;
WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1,
tempDevicePathUTF8, sizeof(tempDevicePathUTF8),
NULL, NULL);
if (strncmp(tempDevicePathUTF8, (const char*)deviceUniqueIdUTF8,
deviceUniqueIdUTF8Length) == 0) {
// We have found the requested device
deviceFound = true;
hr =
pM->BindToObject(0, 0, IID_IBaseFilter, (void**)&captureFilter);
if FAILED (hr) {
RTC_LOG(LS_ERROR) << "Failed to bind to the selected "
"capture device "
<< hr;
}
if (productUniqueIdUTF8 &&
productUniqueIdUTF8Length > 0) // Get the device name
{
GetProductId(deviceUniqueIdUTF8, productUniqueIdUTF8,
productUniqueIdUTF8Length);
}
}
}
}
VariantClear(&varName);
pBag->Release();
}
pM->Release();
}
return captureFilter;
}
int32_t DeviceInfoDS::GetWindowsCapability(
const int32_t capabilityIndex,
VideoCaptureCapabilityWindows& windowsCapability) {
MutexLock lock(&_apiLock);
if (capabilityIndex < 0 || static_cast<size_t>(capabilityIndex) >=
_captureCapabilitiesWindows.size()) {
return -1;
}
windowsCapability = _captureCapabilitiesWindows[capabilityIndex];
return 0;
}
int32_t DeviceInfoDS::CreateCapabilityMap(const char* deviceUniqueIdUTF8)
{
// Reset old capability list
_captureCapabilities.clear();
const int32_t deviceUniqueIdUTF8Length =
(int32_t)strlen((char*)deviceUniqueIdUTF8);
if (deviceUniqueIdUTF8Length >= kVideoCaptureUniqueNameLength) {
RTC_LOG(LS_INFO) << "Device name too long";
return -1;
}
RTC_LOG(LS_INFO) << "CreateCapabilityMap called for device "
<< deviceUniqueIdUTF8;
char productId[kVideoCaptureProductIdLength];
IBaseFilter* captureDevice = DeviceInfoDS::GetDeviceFilter(
deviceUniqueIdUTF8, productId, kVideoCaptureProductIdLength);
if (!captureDevice)
return -1;
IPin* outputCapturePin = GetOutputPin(captureDevice, GUID_NULL);
if (!outputCapturePin) {
RTC_LOG(LS_INFO) << "Failed to get capture device output pin";
RELEASE_AND_CLEAR(captureDevice);
return -1;
}
IAMExtDevice* extDevice = NULL;
HRESULT hr =
captureDevice->QueryInterface(IID_IAMExtDevice, (void**)&extDevice);
if (SUCCEEDED(hr) && extDevice) {
RTC_LOG(LS_INFO) << "This is an external device";
extDevice->Release();
}
IAMStreamConfig* streamConfig = NULL;
hr = outputCapturePin->QueryInterface(IID_IAMStreamConfig,
(void**)&streamConfig);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to get IID_IAMStreamConfig interface "
"from capture device";
return -1;
}
// this gets the FPS
IAMVideoControl* videoControlConfig = NULL;
HRESULT hrVC = captureDevice->QueryInterface(IID_IAMVideoControl,
(void**)&videoControlConfig);
if (FAILED(hrVC)) {
RTC_LOG(LS_INFO) << "IID_IAMVideoControl Interface NOT SUPPORTED";
}
AM_MEDIA_TYPE* pmt = NULL;
VIDEO_STREAM_CONFIG_CAPS caps;
int count, size;
hr = streamConfig->GetNumberOfCapabilities(&count, &size);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to GetNumberOfCapabilities";
RELEASE_AND_CLEAR(videoControlConfig);
RELEASE_AND_CLEAR(streamConfig);
RELEASE_AND_CLEAR(outputCapturePin);
RELEASE_AND_CLEAR(captureDevice);
return -1;
}
// Check if the device support formattype == FORMAT_VideoInfo2 and
// FORMAT_VideoInfo. Prefer FORMAT_VideoInfo since some cameras (ZureCam) has
// been seen having problem with MJPEG and FORMAT_VideoInfo2 Interlace flag is
// only supported in FORMAT_VideoInfo2
bool supportFORMAT_VideoInfo2 = false;
bool supportFORMAT_VideoInfo = false;
bool foundInterlacedFormat = false;
GUID preferedVideoFormat = FORMAT_VideoInfo;
for (int32_t tmp = 0; tmp < count; ++tmp) {
hr = streamConfig->GetStreamCaps(tmp, &pmt, reinterpret_cast<BYTE*>(&caps));
if (hr == S_OK) {
if (pmt->majortype == MEDIATYPE_Video &&
pmt->formattype == FORMAT_VideoInfo2) {
RTC_LOG(LS_INFO) << "Device support FORMAT_VideoInfo2";
supportFORMAT_VideoInfo2 = true;
VIDEOINFOHEADER2* h =
reinterpret_cast<VIDEOINFOHEADER2*>(pmt->pbFormat);
RTC_DCHECK(h);
foundInterlacedFormat |=
h->dwInterlaceFlags &
(AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOnly);
}
if (pmt->majortype == MEDIATYPE_Video &&
pmt->formattype == FORMAT_VideoInfo) {
RTC_LOG(LS_INFO) << "Device support FORMAT_VideoInfo2";
supportFORMAT_VideoInfo = true;
}
FreeMediaType(pmt);
pmt = NULL;
}
}
if (supportFORMAT_VideoInfo2) {
if (supportFORMAT_VideoInfo && !foundInterlacedFormat) {
preferedVideoFormat = FORMAT_VideoInfo;
} else {
preferedVideoFormat = FORMAT_VideoInfo2;
}
}
for (int32_t tmp = 0; tmp < count; ++tmp) {
hr = streamConfig->GetStreamCaps(tmp, &pmt, reinterpret_cast<BYTE*>(&caps));
if (hr != S_OK) {
RTC_LOG(LS_INFO) << "Failed to GetStreamCaps";
RELEASE_AND_CLEAR(videoControlConfig);
RELEASE_AND_CLEAR(streamConfig);
RELEASE_AND_CLEAR(outputCapturePin);
RELEASE_AND_CLEAR(captureDevice);
return -1;
}
if (pmt->majortype == MEDIATYPE_Video &&
pmt->formattype == preferedVideoFormat) {
VideoCaptureCapabilityWindows capability;
int64_t avgTimePerFrame = 0;
if (pmt->formattype == FORMAT_VideoInfo) {
VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
RTC_DCHECK(h);
capability.directShowCapabilityIndex = tmp;
capability.width = h->bmiHeader.biWidth;
capability.height = h->bmiHeader.biHeight;
avgTimePerFrame = h->AvgTimePerFrame;
}
if (pmt->formattype == FORMAT_VideoInfo2) {
VIDEOINFOHEADER2* h =
reinterpret_cast<VIDEOINFOHEADER2*>(pmt->pbFormat);
RTC_DCHECK(h);
capability.directShowCapabilityIndex = tmp;
capability.width = h->bmiHeader.biWidth;
capability.height = h->bmiHeader.biHeight;
capability.interlaced =
h->dwInterlaceFlags &
(AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOnly);
avgTimePerFrame = h->AvgTimePerFrame;
}
if (hrVC == S_OK) {
LONGLONG* frameDurationList = NULL;
LONGLONG maxFPS = 0;
long listSize = 0;
SIZE size;
size.cx = capability.width;
size.cy = capability.height;
// GetMaxAvailableFrameRate doesn't return max frame rate always
// eg: Logitech Notebook. This may be due to a bug in that API
// because GetFrameRateList array is reversed in the above camera. So
// a util method written. Can't assume the first value will return
// the max fps.
hrVC = videoControlConfig->GetFrameRateList(
outputCapturePin, tmp, size, &listSize, &frameDurationList);
if (hrVC == S_OK) {
maxFPS = GetMaxOfFrameArray(frameDurationList, listSize);
}
CoTaskMemFree(frameDurationList);
frameDurationList = NULL;
listSize = 0;
// On some odd cameras, you may get a 0 for duration. Some others may
// not update the out vars. GetMaxOfFrameArray returns the lowest
// duration (highest FPS), or 0 if there was no list with elements.
if (0 != maxFPS) {
capability.maxFPS = static_cast<int>(10000000 / maxFPS);
capability.supportFrameRateControl = true;
} else // use existing method
{
RTC_LOG(LS_INFO) << "GetMaxAvailableFrameRate NOT SUPPORTED";
if (avgTimePerFrame > 0)
capability.maxFPS = static_cast<int>(10000000 / avgTimePerFrame);
else
capability.maxFPS = 0;
}
} else // use existing method in case IAMVideoControl is not supported
{
if (avgTimePerFrame > 0)
capability.maxFPS = static_cast<int>(10000000 / avgTimePerFrame);
else
capability.maxFPS = 0;
}
// can't switch MEDIATYPE :~(
if (pmt->subtype == MEDIASUBTYPE_I420) {
capability.videoType = VideoType::kI420;
} else if (pmt->subtype == MEDIASUBTYPE_IYUV) {
capability.videoType = VideoType::kIYUV;
} else if (pmt->subtype == MEDIASUBTYPE_RGB24) {
capability.videoType = VideoType::kRGB24;
} else if (pmt->subtype == MEDIASUBTYPE_YUY2) {
capability.videoType = VideoType::kYUY2;
} else if (pmt->subtype == MEDIASUBTYPE_RGB565) {
capability.videoType = VideoType::kRGB565;
} else if (pmt->subtype == MEDIASUBTYPE_MJPG) {
capability.videoType = VideoType::kMJPEG;
} else if (pmt->subtype == MEDIASUBTYPE_dvsl ||
pmt->subtype == MEDIASUBTYPE_dvsd ||
pmt->subtype ==
MEDIASUBTYPE_dvhd) // If this is an external DV camera
{
capability.videoType =
VideoType::kYUY2; // MS DV filter seems to create this type
} else if (pmt->subtype ==
MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
{
capability.videoType = VideoType::kUYVY;
} else if (pmt->subtype ==
MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses
// BT. 709 color. Not entiry correct to use
// UYVY. http://en.wikipedia.org/wiki/YCbCr
{
RTC_LOG(LS_INFO) << "Device support HDYC.";
capability.videoType = VideoType::kUYVY;
} else {
WCHAR strGuid[39];
StringFromGUID2(pmt->subtype, strGuid, 39);
RTC_LOG(LS_WARNING)
<< "Device support unknown media type " << strGuid << ", width "
<< capability.width << ", height " << capability.height;
continue;
}
_captureCapabilities.push_back(capability);
_captureCapabilitiesWindows.push_back(capability);
RTC_LOG(LS_INFO) << "Camera capability, width:" << capability.width
<< " height:" << capability.height
<< " type:" << static_cast<int>(capability.videoType)
<< " fps:" << capability.maxFPS;
}
FreeMediaType(pmt);
pmt = NULL;
}
RELEASE_AND_CLEAR(streamConfig);
RELEASE_AND_CLEAR(videoControlConfig);
RELEASE_AND_CLEAR(outputCapturePin);
RELEASE_AND_CLEAR(captureDevice); // Release the capture device
// Store the new used device name
_lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
_lastUsedDeviceName =
(char*)realloc(_lastUsedDeviceName, _lastUsedDeviceNameLength + 1);
memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8,
_lastUsedDeviceNameLength + 1);
RTC_LOG(LS_INFO) << "CreateCapabilityMap " << _captureCapabilities.size();
return static_cast<int32_t>(_captureCapabilities.size());
}
// Constructs a product ID from the Windows DevicePath. on a USB device the
// devicePath contains product id and vendor id. This seems to work for firewire
// as well.
// Example of device path:
// "\\?\usb#vid_0408&pid_2010&mi_00#7&258e7aaf&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
// "\\?\avc#sony&dv-vcr&camcorder&dv#65b2d50301460008#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
void DeviceInfoDS::GetProductId(const char* devicePath,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) {
*productUniqueIdUTF8 = '\0';
char* startPos = strstr((char*)devicePath, "\\\\?\\");
if (!startPos) {
strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
RTC_LOG(LS_INFO) << "Failed to get the product Id";
return;
}
startPos += 4;
char* pos = strchr(startPos, '&');
if (!pos || pos >= (char*)devicePath + strlen((char*)devicePath)) {
strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
RTC_LOG(LS_INFO) << "Failed to get the product Id";
return;
}
// Find the second occurrence.
pos = strchr(pos + 1, '&');
uint32_t bytesToCopy = (uint32_t)(pos - startPos);
if (pos && (bytesToCopy < productUniqueIdUTF8Length) &&
bytesToCopy <= kVideoCaptureProductIdLength) {
strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length,
(char*)startPos, bytesToCopy);
} else {
strncpy_s((char*)productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
RTC_LOG(LS_INFO) << "Failed to get the product Id";
}
}
int32_t DeviceInfoDS::DisplayCaptureSettingsDialogBox(
const char* deviceUniqueIdUTF8,
const char* dialogTitleUTF8,
void* parentWindow,
uint32_t positionX,
uint32_t positionY) {
MutexLock lock(&_apiLock);
HWND window = (HWND)parentWindow;
IBaseFilter* filter = GetDeviceFilter(deviceUniqueIdUTF8, NULL, 0);
if (!filter)
return -1;
ISpecifyPropertyPages* pPages = NULL;
CAUUID uuid;
HRESULT hr = S_OK;
hr = filter->QueryInterface(IID_ISpecifyPropertyPages, (LPVOID*)&pPages);
if (!SUCCEEDED(hr)) {
filter->Release();
return -1;
}
hr = pPages->GetPages(&uuid);
if (!SUCCEEDED(hr)) {
filter->Release();
return -1;
}
WCHAR tempDialogTitleWide[256];
tempDialogTitleWide[0] = 0;
int size = 255;
// UTF-8 to wide char
MultiByteToWideChar(CP_UTF8, 0, (char*)dialogTitleUTF8, -1,
tempDialogTitleWide, size);
// Invoke a dialog box to display.
hr = OleCreatePropertyFrame(
window, // You must create the parent window.
positionX, // Horizontal position for the dialog box.
positionY, // Vertical position for the dialog box.
tempDialogTitleWide, // String used for the dialog box caption.
1, // Number of pointers passed in pPlugin.
(LPUNKNOWN*)&filter, // Pointer to the filter.
uuid.cElems, // Number of property pages.
uuid.pElems, // Array of property page CLSIDs.
LOCALE_USER_DEFAULT, // Locale ID for the dialog box.
0, NULL); // Reserved
// Release memory.
if (uuid.pElems) {
CoTaskMemFree(uuid.pElems);
}
filter->Release();
return 0;
}
} // namespace videocapturemodule
} // namespace webrtc

View file

@ -0,0 +1,99 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_
#include <dshow.h>
#include "modules/video_capture/device_info_impl.h"
#include "modules/video_capture/video_capture_impl.h"
namespace webrtc {
namespace videocapturemodule {
struct VideoCaptureCapabilityWindows : public VideoCaptureCapability {
uint32_t directShowCapabilityIndex;
bool supportFrameRateControl;
VideoCaptureCapabilityWindows() {
directShowCapabilityIndex = 0;
supportFrameRateControl = false;
}
};
class DeviceInfoDS : public DeviceInfoImpl {
public:
// Factory function.
static DeviceInfoDS* Create();
DeviceInfoDS();
~DeviceInfoDS() override;
int32_t Init() override;
uint32_t NumberOfDevices() override;
/*
* Returns the available capture devices.
*/
int32_t GetDeviceName(uint32_t deviceNumber,
char* deviceNameUTF8,
uint32_t deviceNameLength,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length) override;
/*
* Display OS /capture device specific settings dialog
*/
int32_t DisplayCaptureSettingsDialogBox(const char* deviceUniqueIdUTF8,
const char* dialogTitleUTF8,
void* parentWindow,
uint32_t positionX,
uint32_t positionY) override;
// Windows specific
/* Gets a capture device filter
The user of this API is responsible for releasing the filter when it not
needed.
*/
IBaseFilter* GetDeviceFilter(const char* deviceUniqueIdUTF8,
char* productUniqueIdUTF8 = NULL,
uint32_t productUniqueIdUTF8Length = 0);
int32_t GetWindowsCapability(
int32_t capabilityIndex,
VideoCaptureCapabilityWindows& windowsCapability);
static void GetProductId(const char* devicePath,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length);
protected:
int32_t GetDeviceInfo(uint32_t deviceNumber,
char* deviceNameUTF8,
uint32_t deviceNameLength,
char* deviceUniqueIdUTF8,
uint32_t deviceUniqueIdUTF8Length,
char* productUniqueIdUTF8,
uint32_t productUniqueIdUTF8Length);
int32_t CreateCapabilityMap(const char* deviceUniqueIdUTF8) override
RTC_EXCLUSIVE_LOCKS_REQUIRED(_apiLock);
private:
ICreateDevEnum* _dsDevEnum;
IEnumMoniker* _dsMonikerDevEnum;
bool _CoUninitializeIsRequired;
std::vector<VideoCaptureCapabilityWindows> _captureCapabilitiesWindows;
};
} // namespace videocapturemodule
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_DS_H_

View file

@ -0,0 +1,158 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <initguid.h> // Must come before the help_functions_ds.h include so
// that DEFINE_GUID() entries will be defined in this
// object file.
#include <cguid.h>
#include "modules/video_capture/windows/help_functions_ds.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace videocapturemodule {
// This returns minimum :), which will give max frame rate...
LONGLONG GetMaxOfFrameArray(LONGLONG* maxFps, long size) {
if (!maxFps || size <= 0) {
return 0;
}
LONGLONG maxFPS = maxFps[0];
for (int i = 0; i < size; i++) {
if (maxFPS > maxFps[i])
maxFPS = maxFps[i];
}
return maxFPS;
}
IPin* GetInputPin(IBaseFilter* filter) {
IPin* pin = NULL;
IEnumPins* pPinEnum = NULL;
filter->EnumPins(&pPinEnum);
if (pPinEnum == NULL) {
return NULL;
}
// get first unconnected pin
pPinEnum->Reset(); // set to first pin
while (S_OK == pPinEnum->Next(1, &pin, NULL)) {
PIN_DIRECTION pPinDir;
pin->QueryDirection(&pPinDir);
if (PINDIR_INPUT == pPinDir) // This is an input pin
{
IPin* tempPin = NULL;
if (S_OK != pin->ConnectedTo(&tempPin)) // The pint is not connected
{
pPinEnum->Release();
return pin;
}
}
pin->Release();
}
pPinEnum->Release();
return NULL;
}
IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category) {
IPin* pin = NULL;
IEnumPins* pPinEnum = NULL;
filter->EnumPins(&pPinEnum);
if (pPinEnum == NULL) {
return NULL;
}
// get first unconnected pin
pPinEnum->Reset(); // set to first pin
while (S_OK == pPinEnum->Next(1, &pin, NULL)) {
PIN_DIRECTION pPinDir;
pin->QueryDirection(&pPinDir);
if (PINDIR_OUTPUT == pPinDir) // This is an output pin
{
if (Category == GUID_NULL || PinMatchesCategory(pin, Category)) {
pPinEnum->Release();
return pin;
}
}
pin->Release();
pin = NULL;
}
pPinEnum->Release();
return NULL;
}
BOOL PinMatchesCategory(IPin* pPin, REFGUID Category) {
BOOL bFound = FALSE;
IKsPropertySet* pKs = NULL;
HRESULT hr = pPin->QueryInterface(IID_PPV_ARGS(&pKs));
if (SUCCEEDED(hr)) {
GUID PinCategory;
DWORD cbReturned;
hr = pKs->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
&PinCategory, sizeof(GUID), &cbReturned);
if (SUCCEEDED(hr) && (cbReturned == sizeof(GUID))) {
bFound = (PinCategory == Category);
}
pKs->Release();
}
return bFound;
}
void ResetMediaType(AM_MEDIA_TYPE* media_type) {
if (!media_type)
return;
if (media_type->cbFormat != 0) {
CoTaskMemFree(media_type->pbFormat);
media_type->cbFormat = 0;
media_type->pbFormat = nullptr;
}
if (media_type->pUnk) {
media_type->pUnk->Release();
media_type->pUnk = nullptr;
}
}
void FreeMediaType(AM_MEDIA_TYPE* media_type) {
if (!media_type)
return;
ResetMediaType(media_type);
CoTaskMemFree(media_type);
}
HRESULT CopyMediaType(AM_MEDIA_TYPE* target, const AM_MEDIA_TYPE* source) {
RTC_DCHECK_NE(source, target);
*target = *source;
if (source->cbFormat != 0) {
RTC_DCHECK(source->pbFormat);
target->pbFormat =
reinterpret_cast<BYTE*>(CoTaskMemAlloc(source->cbFormat));
if (target->pbFormat == nullptr) {
target->cbFormat = 0;
return E_OUTOFMEMORY;
} else {
CopyMemory(target->pbFormat, source->pbFormat, target->cbFormat);
}
}
if (target->pUnk != nullptr)
target->pUnk->AddRef();
return S_OK;
}
wchar_t* DuplicateWideString(const wchar_t* str) {
size_t len = lstrlenW(str);
wchar_t* ret =
reinterpret_cast<LPWSTR>(CoTaskMemAlloc((len + 1) * sizeof(wchar_t)));
lstrcpyW(ret, str);
return ret;
}
} // namespace videocapturemodule
} // namespace webrtc

View file

@ -0,0 +1,118 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_
#include <dshow.h>
#include <type_traits>
#include <utility>
#include "api/scoped_refptr.h"
#include "rtc_base/ref_counter.h"
DEFINE_GUID(MEDIASUBTYPE_I420,
0x30323449,
0x0000,
0x0010,
0x80,
0x00,
0x00,
0xAA,
0x00,
0x38,
0x9B,
0x71);
DEFINE_GUID(MEDIASUBTYPE_HDYC,
0x43594448,
0x0000,
0x0010,
0x80,
0x00,
0x00,
0xAA,
0x00,
0x38,
0x9B,
0x71);
#define RELEASE_AND_CLEAR(p) \
if (p) { \
(p)->Release(); \
(p) = NULL; \
}
namespace webrtc {
namespace videocapturemodule {
LONGLONG GetMaxOfFrameArray(LONGLONG* maxFps, long size);
IPin* GetInputPin(IBaseFilter* filter);
IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category);
BOOL PinMatchesCategory(IPin* pPin, REFGUID Category);
void ResetMediaType(AM_MEDIA_TYPE* media_type);
void FreeMediaType(AM_MEDIA_TYPE* media_type);
HRESULT CopyMediaType(AM_MEDIA_TYPE* target, const AM_MEDIA_TYPE* source);
// Helper function to make using scoped_refptr with COM interface pointers
// a little less awkward. rtc::scoped_refptr doesn't support the & operator
// or a way to receive values via an out ptr.
// The function is intentionally not called QueryInterface to make things less
// confusing for the compiler to figure out what the caller wants to do when
// called from within the context of a class that also implements COM
// interfaces.
template <class T>
HRESULT GetComInterface(IUnknown* object, rtc::scoped_refptr<T>* ptr) {
// This helper function is not meant to magically free ptr. If we do that
// we add code bloat to most places where it's not needed and make the code
// less readable since it's not clear at the call site that the pointer
// would get freed even inf QI() fails.
RTC_DCHECK(!ptr->get());
void* new_ptr = nullptr;
HRESULT hr = object->QueryInterface(__uuidof(T), &new_ptr);
if (SUCCEEDED(hr))
ptr->swap(reinterpret_cast<T**>(&new_ptr));
return hr;
}
// Provides a reference count implementation for COM (IUnknown derived) classes.
// The implementation uses atomics for managing the ref count.
template <class T>
class ComRefCount : public T {
public:
ComRefCount() {}
template <class P0>
explicit ComRefCount(P0&& p0) : T(std::forward<P0>(p0)) {}
STDMETHOD_(ULONG, AddRef)() override {
ref_count_.IncRef();
return 1;
}
STDMETHOD_(ULONG, Release)() override {
const auto status = ref_count_.DecRef();
if (status == rtc::RefCountReleaseStatus::kDroppedLastRef) {
delete this;
return 0;
}
return 1;
}
protected:
~ComRefCount() {}
private:
webrtc::webrtc_impl::RefCounter ref_count_{0};
};
} // namespace videocapturemodule
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_DS_H_

View file

@ -0,0 +1,961 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_capture/windows/sink_filter_ds.h"
#include <dvdmedia.h> // VIDEOINFOHEADER2
#include <initguid.h>
#include <algorithm>
#include <list>
#include "rtc_base/arraysize.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/platform_thread.h"
#include "rtc_base/string_utils.h"
DEFINE_GUID(CLSID_SINKFILTER,
0x88cdbbdc,
0xa73b,
0x4afa,
0xac,
0xbf,
0x15,
0xd5,
0xe2,
0xce,
0x12,
0xc3);
namespace webrtc {
namespace videocapturemodule {
namespace {
// Simple enumeration implementation that enumerates over a single pin :-/
class EnumPins : public IEnumPins {
public:
EnumPins(IPin* pin) : pin_(pin) {}
protected:
virtual ~EnumPins() {}
private:
STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override {
if (riid == IID_IUnknown || riid == IID_IEnumPins) {
*ppv = static_cast<IEnumPins*>(this);
AddRef();
return S_OK;
}
return E_NOINTERFACE;
}
STDMETHOD(Clone)(IEnumPins** pins) {
RTC_DCHECK_NOTREACHED();
return E_NOTIMPL;
}
STDMETHOD(Next)(ULONG count, IPin** pins, ULONG* fetched) {
RTC_DCHECK(count > 0);
RTC_DCHECK(pins);
// fetched may be NULL.
if (pos_ > 0) {
if (fetched)
*fetched = 0;
return S_FALSE;
}
++pos_;
pins[0] = pin_.get();
pins[0]->AddRef();
if (fetched)
*fetched = 1;
return count == 1 ? S_OK : S_FALSE;
}
STDMETHOD(Skip)(ULONG count) {
RTC_DCHECK_NOTREACHED();
return E_NOTIMPL;
}
STDMETHOD(Reset)() {
pos_ = 0;
return S_OK;
}
rtc::scoped_refptr<IPin> pin_;
int pos_ = 0;
};
bool IsMediaTypePartialMatch(const AM_MEDIA_TYPE& a, const AM_MEDIA_TYPE& b) {
if (b.majortype != GUID_NULL && a.majortype != b.majortype)
return false;
if (b.subtype != GUID_NULL && a.subtype != b.subtype)
return false;
if (b.formattype != GUID_NULL) {
// if the format block is specified then it must match exactly
if (a.formattype != b.formattype)
return false;
if (a.cbFormat != b.cbFormat)
return false;
if (a.cbFormat != 0 && memcmp(a.pbFormat, b.pbFormat, a.cbFormat) != 0)
return false;
}
return true;
}
bool IsMediaTypeFullySpecified(const AM_MEDIA_TYPE& type) {
return type.majortype != GUID_NULL && type.formattype != GUID_NULL;
}
BYTE* AllocMediaTypeFormatBuffer(AM_MEDIA_TYPE* media_type, ULONG length) {
RTC_DCHECK(length);
if (media_type->cbFormat == length)
return media_type->pbFormat;
BYTE* buffer = static_cast<BYTE*>(CoTaskMemAlloc(length));
if (!buffer)
return nullptr;
if (media_type->pbFormat) {
RTC_DCHECK(media_type->cbFormat);
CoTaskMemFree(media_type->pbFormat);
media_type->pbFormat = nullptr;
}
media_type->cbFormat = length;
media_type->pbFormat = buffer;
return buffer;
}
void GetSampleProperties(IMediaSample* sample, AM_SAMPLE2_PROPERTIES* props) {
rtc::scoped_refptr<IMediaSample2> sample2;
if (SUCCEEDED(GetComInterface(sample, &sample2))) {
sample2->GetProperties(sizeof(*props), reinterpret_cast<BYTE*>(props));
return;
}
// Get the properties the hard way.
props->cbData = sizeof(*props);
props->dwTypeSpecificFlags = 0;
props->dwStreamId = AM_STREAM_MEDIA;
props->dwSampleFlags = 0;
if (sample->IsDiscontinuity() == S_OK)
props->dwSampleFlags |= AM_SAMPLE_DATADISCONTINUITY;
if (sample->IsPreroll() == S_OK)
props->dwSampleFlags |= AM_SAMPLE_PREROLL;
if (sample->IsSyncPoint() == S_OK)
props->dwSampleFlags |= AM_SAMPLE_SPLICEPOINT;
if (SUCCEEDED(sample->GetTime(&props->tStart, &props->tStop)))
props->dwSampleFlags |= AM_SAMPLE_TIMEVALID | AM_SAMPLE_STOPVALID;
if (sample->GetMediaType(&props->pMediaType) == S_OK)
props->dwSampleFlags |= AM_SAMPLE_TYPECHANGED;
sample->GetPointer(&props->pbBuffer);
props->lActual = sample->GetActualDataLength();
props->cbBuffer = sample->GetSize();
}
// Returns true if the media type is supported, false otherwise.
// For supported types, the `capability` will be populated accordingly.
bool TranslateMediaTypeToVideoCaptureCapability(
const AM_MEDIA_TYPE* media_type,
VideoCaptureCapability* capability) {
RTC_DCHECK(capability);
if (!media_type || media_type->majortype != MEDIATYPE_Video ||
!media_type->pbFormat) {
return false;
}
const BITMAPINFOHEADER* bih = nullptr;
if (media_type->formattype == FORMAT_VideoInfo) {
bih = &reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat)->bmiHeader;
} else if (media_type->formattype != FORMAT_VideoInfo2) {
bih = &reinterpret_cast<VIDEOINFOHEADER2*>(media_type->pbFormat)->bmiHeader;
} else {
return false;
}
RTC_LOG(LS_INFO) << "TranslateMediaTypeToVideoCaptureCapability width:"
<< bih->biWidth << " height:" << bih->biHeight
<< " Compression:0x" << rtc::ToHex(bih->biCompression);
const GUID& sub_type = media_type->subtype;
if (sub_type == MEDIASUBTYPE_MJPG &&
bih->biCompression == MAKEFOURCC('M', 'J', 'P', 'G')) {
capability->videoType = VideoType::kMJPEG;
} else if (sub_type == MEDIASUBTYPE_I420 &&
bih->biCompression == MAKEFOURCC('I', '4', '2', '0')) {
capability->videoType = VideoType::kI420;
} else if (sub_type == MEDIASUBTYPE_YUY2 &&
bih->biCompression == MAKEFOURCC('Y', 'U', 'Y', '2')) {
capability->videoType = VideoType::kYUY2;
} else if (sub_type == MEDIASUBTYPE_UYVY &&
bih->biCompression == MAKEFOURCC('U', 'Y', 'V', 'Y')) {
capability->videoType = VideoType::kUYVY;
} else if (sub_type == MEDIASUBTYPE_HDYC) {
capability->videoType = VideoType::kUYVY;
} else if (sub_type == MEDIASUBTYPE_RGB24 && bih->biCompression == BI_RGB) {
capability->videoType = VideoType::kRGB24;
} else {
return false;
}
// Store the incoming width and height
capability->width = bih->biWidth;
// Store the incoming height,
// for RGB24 we assume the frame to be upside down
if (sub_type == MEDIASUBTYPE_RGB24 && bih->biHeight > 0) {
capability->height = -(bih->biHeight);
} else {
capability->height = abs(bih->biHeight);
}
return true;
}
class MediaTypesEnum : public IEnumMediaTypes {
public:
MediaTypesEnum(const VideoCaptureCapability& capability)
: capability_(capability),
format_preference_order_(
{// Default preferences, sorted by cost-to-convert-to-i420.
VideoType::kI420, VideoType::kYUY2, VideoType::kRGB24,
VideoType::kUYVY, VideoType::kMJPEG}) {
// Use the preferred video type, if supported.
auto it = std::find(format_preference_order_.begin(),
format_preference_order_.end(), capability_.videoType);
if (it != format_preference_order_.end()) {
RTC_LOG(LS_INFO) << "Selected video type: " << *it;
// Move it to the front of the list, if it isn't already there.
if (it != format_preference_order_.begin()) {
format_preference_order_.splice(format_preference_order_.begin(),
format_preference_order_, it,
std::next(it));
}
} else {
RTC_LOG(LS_WARNING) << "Unsupported video type: "
<< rtc::ToString(
static_cast<int>(capability_.videoType))
<< ", using default preference list.";
}
}
protected:
virtual ~MediaTypesEnum() {}
private:
STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override {
if (riid == IID_IUnknown || riid == IID_IEnumMediaTypes) {
*ppv = static_cast<IEnumMediaTypes*>(this);
AddRef();
return S_OK;
}
return E_NOINTERFACE;
}
// IEnumMediaTypes
STDMETHOD(Clone)(IEnumMediaTypes** pins) {
RTC_DCHECK_NOTREACHED();
return E_NOTIMPL;
}
STDMETHOD(Next)(ULONG count, AM_MEDIA_TYPE** types, ULONG* fetched) {
RTC_DCHECK(count > 0);
RTC_DCHECK(types);
// fetched may be NULL.
if (fetched)
*fetched = 0;
for (ULONG i = 0;
i < count && pos_ < static_cast<int>(format_preference_order_.size());
++i) {
AM_MEDIA_TYPE* media_type = reinterpret_cast<AM_MEDIA_TYPE*>(
CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE)));
ZeroMemory(media_type, sizeof(*media_type));
types[i] = media_type;
VIDEOINFOHEADER* vih = reinterpret_cast<VIDEOINFOHEADER*>(
AllocMediaTypeFormatBuffer(media_type, sizeof(VIDEOINFOHEADER)));
ZeroMemory(vih, sizeof(*vih));
vih->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
vih->bmiHeader.biPlanes = 1;
vih->bmiHeader.biClrImportant = 0;
vih->bmiHeader.biClrUsed = 0;
if (capability_.maxFPS != 0)
vih->AvgTimePerFrame = 10000000 / capability_.maxFPS;
SetRectEmpty(&vih->rcSource); // we want the whole image area rendered.
SetRectEmpty(&vih->rcTarget); // no particular destination rectangle
media_type->majortype = MEDIATYPE_Video;
media_type->formattype = FORMAT_VideoInfo;
media_type->bTemporalCompression = FALSE;
// Set format information.
auto format_it = std::next(format_preference_order_.begin(), pos_++);
SetMediaInfoFromVideoType(*format_it, &vih->bmiHeader, media_type);
vih->bmiHeader.biWidth = capability_.width;
vih->bmiHeader.biHeight = capability_.height;
vih->bmiHeader.biSizeImage = ((vih->bmiHeader.biBitCount / 4) *
capability_.height * capability_.width) /
2;
RTC_DCHECK(vih->bmiHeader.biSizeImage);
media_type->lSampleSize = vih->bmiHeader.biSizeImage;
media_type->bFixedSizeSamples = true;
if (fetched)
++(*fetched);
}
return pos_ == static_cast<int>(format_preference_order_.size()) ? S_FALSE
: S_OK;
}
static void SetMediaInfoFromVideoType(VideoType video_type,
BITMAPINFOHEADER* bitmap_header,
AM_MEDIA_TYPE* media_type) {
switch (video_type) {
case VideoType::kI420:
bitmap_header->biCompression = MAKEFOURCC('I', '4', '2', '0');
bitmap_header->biBitCount = 12; // bit per pixel
media_type->subtype = MEDIASUBTYPE_I420;
break;
case VideoType::kYUY2:
bitmap_header->biCompression = MAKEFOURCC('Y', 'U', 'Y', '2');
bitmap_header->biBitCount = 16; // bit per pixel
media_type->subtype = MEDIASUBTYPE_YUY2;
break;
case VideoType::kRGB24:
bitmap_header->biCompression = BI_RGB;
bitmap_header->biBitCount = 24; // bit per pixel
media_type->subtype = MEDIASUBTYPE_RGB24;
break;
case VideoType::kUYVY:
bitmap_header->biCompression = MAKEFOURCC('U', 'Y', 'V', 'Y');
bitmap_header->biBitCount = 16; // bit per pixel
media_type->subtype = MEDIASUBTYPE_UYVY;
break;
case VideoType::kMJPEG:
bitmap_header->biCompression = MAKEFOURCC('M', 'J', 'P', 'G');
bitmap_header->biBitCount = 12; // bit per pixel
media_type->subtype = MEDIASUBTYPE_MJPG;
break;
default:
RTC_DCHECK_NOTREACHED();
}
}
STDMETHOD(Skip)(ULONG count) {
RTC_DCHECK_NOTREACHED();
return E_NOTIMPL;
}
STDMETHOD(Reset)() {
pos_ = 0;
return S_OK;
}
int pos_ = 0;
const VideoCaptureCapability capability_;
std::list<VideoType> format_preference_order_;
};
} // namespace
CaptureInputPin::CaptureInputPin(CaptureSinkFilter* filter) {
capture_checker_.Detach();
// No reference held to avoid circular references.
info_.pFilter = filter;
info_.dir = PINDIR_INPUT;
}
CaptureInputPin::~CaptureInputPin() {
RTC_DCHECK_RUN_ON(&main_checker_);
ResetMediaType(&media_type_);
}
HRESULT CaptureInputPin::SetRequestedCapability(
const VideoCaptureCapability& capability) {
RTC_DCHECK_RUN_ON(&main_checker_);
RTC_DCHECK(Filter()->IsStopped());
requested_capability_ = capability;
resulting_capability_ = VideoCaptureCapability();
return S_OK;
}
void CaptureInputPin::OnFilterActivated() {
RTC_DCHECK_RUN_ON(&main_checker_);
runtime_error_ = false;
flushing_ = false;
capture_checker_.Detach();
capture_thread_id_ = 0;
}
void CaptureInputPin::OnFilterDeactivated() {
RTC_DCHECK_RUN_ON(&main_checker_);
// Expedite shutdown by raising the flushing flag so no further processing
// on the capture thread occurs. When the graph is stopped and all filters
// have been told to stop, the media controller (graph) will wait for the
// capture thread to stop.
flushing_ = true;
if (allocator_)
allocator_->Decommit();
}
CaptureSinkFilter* CaptureInputPin::Filter() const {
return static_cast<CaptureSinkFilter*>(info_.pFilter);
}
HRESULT CaptureInputPin::AttemptConnection(IPin* receive_pin,
const AM_MEDIA_TYPE* media_type) {
RTC_DCHECK_RUN_ON(&main_checker_);
RTC_DCHECK(Filter()->IsStopped());
// Check that the connection is valid -- need to do this for every
// connect attempt since BreakConnect will undo it.
HRESULT hr = CheckDirection(receive_pin);
if (FAILED(hr))
return hr;
if (!TranslateMediaTypeToVideoCaptureCapability(media_type,
&resulting_capability_)) {
ClearAllocator(true);
return VFW_E_TYPE_NOT_ACCEPTED;
}
// See if the other pin will accept this type.
hr = receive_pin->ReceiveConnection(static_cast<IPin*>(this), media_type);
if (FAILED(hr)) {
receive_pin_ = nullptr; // Should already be null, but just in case.
return hr;
}
// Should have been set as part of the connect process.
RTC_DCHECK_EQ(receive_pin_, receive_pin);
ResetMediaType(&media_type_);
CopyMediaType(&media_type_, media_type);
return S_OK;
}
std::vector<AM_MEDIA_TYPE*> CaptureInputPin::DetermineCandidateFormats(
IPin* receive_pin,
const AM_MEDIA_TYPE* media_type) {
RTC_DCHECK_RUN_ON(&main_checker_);
RTC_DCHECK(receive_pin);
RTC_DCHECK(media_type);
std::vector<AM_MEDIA_TYPE*> ret;
for (int i = 0; i < 2; i++) {
IEnumMediaTypes* types = nullptr;
if (i == 0) {
// First time around, try types from receive_pin.
receive_pin->EnumMediaTypes(&types);
} else {
// Then try ours.
EnumMediaTypes(&types);
}
if (types) {
while (true) {
ULONG fetched = 0;
AM_MEDIA_TYPE* this_type = nullptr;
if (types->Next(1, &this_type, &fetched) != S_OK)
break;
if (IsMediaTypePartialMatch(*this_type, *media_type)) {
ret.push_back(this_type);
} else {
FreeMediaType(this_type);
}
}
types->Release();
}
}
return ret;
}
void CaptureInputPin::ClearAllocator(bool decommit) {
RTC_DCHECK_RUN_ON(&main_checker_);
if (!allocator_)
return;
if (decommit)
allocator_->Decommit();
allocator_ = nullptr;
}
HRESULT CaptureInputPin::CheckDirection(IPin* pin) const {
RTC_DCHECK_RUN_ON(&main_checker_);
PIN_DIRECTION pd;
pin->QueryDirection(&pd);
// Fairly basic check, make sure we don't pair input with input etc.
return pd == info_.dir ? VFW_E_INVALID_DIRECTION : S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::QueryInterface(REFIID riid,
void** ppv) {
(*ppv) = nullptr;
if (riid == IID_IUnknown || riid == IID_IMemInputPin) {
*ppv = static_cast<IMemInputPin*>(this);
} else if (riid == IID_IPin) {
*ppv = static_cast<IPin*>(this);
}
if (!(*ppv))
return E_NOINTERFACE;
static_cast<IMemInputPin*>(this)->AddRef();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::Connect(IPin* receive_pin, const AM_MEDIA_TYPE* media_type) {
RTC_DCHECK_RUN_ON(&main_checker_);
if (!media_type || !receive_pin)
return E_POINTER;
if (!Filter()->IsStopped())
return VFW_E_NOT_STOPPED;
if (receive_pin_) {
RTC_DCHECK_NOTREACHED();
return VFW_E_ALREADY_CONNECTED;
}
if (IsMediaTypeFullySpecified(*media_type))
return AttemptConnection(receive_pin, media_type);
auto types = DetermineCandidateFormats(receive_pin, media_type);
bool connected = false;
for (auto* type : types) {
if (!connected && AttemptConnection(receive_pin, media_type) == S_OK)
connected = true;
FreeMediaType(type);
}
return connected ? S_OK : VFW_E_NO_ACCEPTABLE_TYPES;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::ReceiveConnection(IPin* connector,
const AM_MEDIA_TYPE* media_type) {
RTC_DCHECK_RUN_ON(&main_checker_);
RTC_DCHECK(Filter()->IsStopped());
if (receive_pin_) {
RTC_DCHECK_NOTREACHED();
return VFW_E_ALREADY_CONNECTED;
}
HRESULT hr = CheckDirection(connector);
if (FAILED(hr))
return hr;
if (!TranslateMediaTypeToVideoCaptureCapability(media_type,
&resulting_capability_))
return VFW_E_TYPE_NOT_ACCEPTED;
// Complete the connection
receive_pin_ = connector;
ResetMediaType(&media_type_);
CopyMediaType(&media_type_, media_type);
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::Disconnect() {
RTC_DCHECK_RUN_ON(&main_checker_);
if (!Filter()->IsStopped())
return VFW_E_NOT_STOPPED;
if (!receive_pin_)
return S_FALSE;
ClearAllocator(true);
receive_pin_ = nullptr;
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::ConnectedTo(IPin** pin) {
RTC_DCHECK_RUN_ON(&main_checker_);
if (!receive_pin_)
return VFW_E_NOT_CONNECTED;
*pin = receive_pin_.get();
receive_pin_->AddRef();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::ConnectionMediaType(AM_MEDIA_TYPE* media_type) {
RTC_DCHECK_RUN_ON(&main_checker_);
if (!receive_pin_)
return VFW_E_NOT_CONNECTED;
CopyMediaType(media_type, &media_type_);
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::QueryPinInfo(PIN_INFO* info) {
RTC_DCHECK_RUN_ON(&main_checker_);
*info = info_;
if (info_.pFilter)
info_.pFilter->AddRef();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::QueryDirection(PIN_DIRECTION* pin_dir) {
RTC_DCHECK_RUN_ON(&main_checker_);
*pin_dir = info_.dir;
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::QueryId(LPWSTR* id) {
RTC_DCHECK_RUN_ON(&main_checker_);
size_t len = lstrlenW(info_.achName);
*id = reinterpret_cast<LPWSTR>(CoTaskMemAlloc((len + 1) * sizeof(wchar_t)));
lstrcpyW(*id, info_.achName);
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::QueryAccept(const AM_MEDIA_TYPE* media_type) {
RTC_DCHECK_RUN_ON(&main_checker_);
RTC_DCHECK(Filter()->IsStopped());
VideoCaptureCapability capability(resulting_capability_);
return TranslateMediaTypeToVideoCaptureCapability(media_type, &capability)
? S_FALSE
: S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::EnumMediaTypes(IEnumMediaTypes** types) {
RTC_DCHECK_RUN_ON(&main_checker_);
*types = new ComRefCount<MediaTypesEnum>(requested_capability_);
(*types)->AddRef();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::QueryInternalConnections(IPin** pins, ULONG* count) {
return E_NOTIMPL;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::EndOfStream() {
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::BeginFlush() {
RTC_DCHECK_RUN_ON(&main_checker_);
flushing_ = true;
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::EndFlush() {
RTC_DCHECK_RUN_ON(&main_checker_);
flushing_ = false;
runtime_error_ = false;
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::NewSegment(REFERENCE_TIME start,
REFERENCE_TIME stop,
double rate) {
RTC_DCHECK_RUN_ON(&main_checker_);
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::GetAllocator(IMemAllocator** allocator) {
RTC_DCHECK_RUN_ON(&main_checker_);
if (allocator_ == nullptr) {
HRESULT hr = CoCreateInstance(CLSID_MemoryAllocator, 0,
CLSCTX_INPROC_SERVER, IID_IMemAllocator,
reinterpret_cast<void**>(allocator));
if (FAILED(hr))
return hr;
allocator_.swap(allocator);
}
*allocator = allocator_.get();
allocator_->AddRef();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::NotifyAllocator(IMemAllocator* allocator, BOOL read_only) {
RTC_DCHECK_RUN_ON(&main_checker_);
allocator_.swap(&allocator);
if (allocator_)
allocator_->AddRef();
if (allocator)
allocator->Release();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::GetAllocatorRequirements(ALLOCATOR_PROPERTIES* props) {
return E_NOTIMPL;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::Receive(IMediaSample* media_sample) {
RTC_DCHECK_RUN_ON(&capture_checker_);
CaptureSinkFilter* const filter = static_cast<CaptureSinkFilter*>(Filter());
if (flushing_.load(std::memory_order_relaxed))
return S_FALSE;
if (runtime_error_.load(std::memory_order_relaxed))
return VFW_E_RUNTIME_ERROR;
if (!capture_thread_id_) {
// Make sure we set the thread name only once.
capture_thread_id_ = GetCurrentThreadId();
rtc::SetCurrentThreadName("webrtc_video_capture");
}
AM_SAMPLE2_PROPERTIES sample_props = {};
GetSampleProperties(media_sample, &sample_props);
// Has the format changed in this sample?
if (sample_props.dwSampleFlags & AM_SAMPLE_TYPECHANGED) {
// Check the derived class accepts the new format.
// This shouldn't fail as the source must call QueryAccept first.
// Note: This will modify resulting_capability_.
// That should be OK as long as resulting_capability_ is only modified
// on this thread while it is running (filter is not stopped), and only
// modified on the main thread when the filter is stopped (i.e. this thread
// is not running).
if (!TranslateMediaTypeToVideoCaptureCapability(sample_props.pMediaType,
&resulting_capability_)) {
// Raise a runtime error if we fail the media type
runtime_error_ = true;
EndOfStream();
Filter()->NotifyEvent(EC_ERRORABORT, VFW_E_TYPE_NOT_ACCEPTED, 0);
return VFW_E_INVALIDMEDIATYPE;
}
}
filter->ProcessCapturedFrame(sample_props.pbBuffer, sample_props.lActual,
resulting_capability_);
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureInputPin::ReceiveMultiple(IMediaSample** samples,
long count,
long* processed) {
HRESULT hr = S_OK;
*processed = 0;
while (count-- > 0) {
hr = Receive(samples[*processed]);
if (hr != S_OK)
break;
++(*processed);
}
return hr;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureInputPin::ReceiveCanBlock() {
return S_FALSE;
}
// ----------------------------------------------------------------------------
CaptureSinkFilter::CaptureSinkFilter(VideoCaptureImpl* capture_observer)
: input_pin_(new ComRefCount<CaptureInputPin>(this)),
capture_observer_(capture_observer) {}
CaptureSinkFilter::~CaptureSinkFilter() {
RTC_DCHECK_RUN_ON(&main_checker_);
}
HRESULT CaptureSinkFilter::SetRequestedCapability(
const VideoCaptureCapability& capability) {
RTC_DCHECK_RUN_ON(&main_checker_);
// Called on the same thread as capture is started on.
return input_pin_->SetRequestedCapability(capability);
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::GetState(DWORD msecs, FILTER_STATE* state) {
RTC_DCHECK_RUN_ON(&main_checker_);
*state = state_;
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::SetSyncSource(IReferenceClock* clock) {
RTC_DCHECK_RUN_ON(&main_checker_);
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::GetSyncSource(IReferenceClock** clock) {
RTC_DCHECK_RUN_ON(&main_checker_);
return E_NOTIMPL;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::Pause() {
RTC_DCHECK_RUN_ON(&main_checker_);
state_ = State_Paused;
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::Run(REFERENCE_TIME tStart) {
RTC_DCHECK_RUN_ON(&main_checker_);
if (state_ == State_Stopped)
Pause();
state_ = State_Running;
input_pin_->OnFilterActivated();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::Stop() {
RTC_DCHECK_RUN_ON(&main_checker_);
if (state_ == State_Stopped)
return S_OK;
state_ = State_Stopped;
input_pin_->OnFilterDeactivated();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::EnumPins(IEnumPins** pins) {
RTC_DCHECK_RUN_ON(&main_checker_);
*pins = new ComRefCount<class EnumPins>(input_pin_.get());
(*pins)->AddRef();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::FindPin(LPCWSTR id,
IPin** pin) {
RTC_DCHECK_RUN_ON(&main_checker_);
// There's no ID assigned to our input pin, so looking it up based on one
// is pointless (and in practice, this method isn't being used).
return VFW_E_NOT_FOUND;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::QueryFilterInfo(FILTER_INFO* info) {
RTC_DCHECK_RUN_ON(&main_checker_);
*info = info_;
if (info->pGraph)
info->pGraph->AddRef();
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::JoinFilterGraph(IFilterGraph* graph, LPCWSTR name) {
RTC_DCHECK_RUN_ON(&main_checker_);
RTC_DCHECK(IsStopped());
// Note, since a reference to the filter is held by the graph manager,
// filters must not hold a reference to the graph. If they would, we'd have
// a circular reference. Instead, a pointer to the graph can be held without
// reference. See documentation for IBaseFilter::JoinFilterGraph for more.
info_.pGraph = graph; // No AddRef().
sink_ = nullptr;
if (info_.pGraph) {
// make sure we don't hold on to the reference we may receive.
// Note that this assumes the same object identity, but so be it.
rtc::scoped_refptr<IMediaEventSink> sink;
GetComInterface(info_.pGraph, &sink);
sink_ = sink.get();
}
info_.achName[0] = L'\0';
if (name)
lstrcpynW(info_.achName, name, arraysize(info_.achName));
return S_OK;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::QueryVendorInfo(LPWSTR* vendor_info) {
return E_NOTIMPL;
}
void CaptureSinkFilter::ProcessCapturedFrame(
unsigned char* buffer,
size_t length,
const VideoCaptureCapability& frame_info) {
// Called on the capture thread.
capture_observer_->IncomingFrame(buffer, length, frame_info);
}
void CaptureSinkFilter::NotifyEvent(long code,
LONG_PTR param1,
LONG_PTR param2) {
// Called on the capture thread.
if (!sink_)
return;
if (EC_COMPLETE == code)
param2 = reinterpret_cast<LONG_PTR>(static_cast<IBaseFilter*>(this));
sink_->Notify(code, param1, param2);
}
bool CaptureSinkFilter::IsStopped() const {
RTC_DCHECK_RUN_ON(&main_checker_);
return state_ == State_Stopped;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP
CaptureSinkFilter::QueryInterface(REFIID riid, void** ppv) {
if (riid == IID_IUnknown || riid == IID_IPersist || riid == IID_IBaseFilter) {
*ppv = static_cast<IBaseFilter*>(this);
AddRef();
return S_OK;
}
return E_NOINTERFACE;
}
COM_DECLSPEC_NOTHROW STDMETHODIMP CaptureSinkFilter::GetClassID(CLSID* clsid) {
*clsid = CLSID_SINKFILTER;
return S_OK;
}
} // namespace videocapturemodule
} // namespace webrtc

View file

@ -0,0 +1,162 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
#include <dshow.h>
#include <atomic>
#include <memory>
#include <vector>
#include "api/sequence_checker.h"
#include "modules/video_capture/video_capture_impl.h"
#include "modules/video_capture/windows/help_functions_ds.h"
#include "rtc_base/thread_annotations.h"
namespace webrtc {
namespace videocapturemodule {
// forward declarations
class CaptureSinkFilter;
// Input pin for camera input
// Implements IMemInputPin, IPin.
class CaptureInputPin : public IMemInputPin, public IPin {
public:
CaptureInputPin(CaptureSinkFilter* filter);
HRESULT SetRequestedCapability(const VideoCaptureCapability& capability);
// Notifications from the filter.
void OnFilterActivated();
void OnFilterDeactivated();
protected:
virtual ~CaptureInputPin();
private:
CaptureSinkFilter* Filter() const;
HRESULT AttemptConnection(IPin* receive_pin, const AM_MEDIA_TYPE* media_type);
std::vector<AM_MEDIA_TYPE*> DetermineCandidateFormats(
IPin* receive_pin,
const AM_MEDIA_TYPE* media_type);
void ClearAllocator(bool decommit);
HRESULT CheckDirection(IPin* pin) const;
// IUnknown
STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override;
// clang-format off
// clang isn't sure what to do with the longer STDMETHOD() function
// declarations.
// IPin
STDMETHOD(Connect)(IPin* receive_pin,
const AM_MEDIA_TYPE* media_type) override;
STDMETHOD(ReceiveConnection)(IPin* connector,
const AM_MEDIA_TYPE* media_type) override;
STDMETHOD(Disconnect)() override;
STDMETHOD(ConnectedTo)(IPin** pin) override;
STDMETHOD(ConnectionMediaType)(AM_MEDIA_TYPE* media_type) override;
STDMETHOD(QueryPinInfo)(PIN_INFO* info) override;
STDMETHOD(QueryDirection)(PIN_DIRECTION* pin_dir) override;
STDMETHOD(QueryId)(LPWSTR* id) override;
STDMETHOD(QueryAccept)(const AM_MEDIA_TYPE* media_type) override;
STDMETHOD(EnumMediaTypes)(IEnumMediaTypes** types) override;
STDMETHOD(QueryInternalConnections)(IPin** pins, ULONG* count) override;
STDMETHOD(EndOfStream)() override;
STDMETHOD(BeginFlush)() override;
STDMETHOD(EndFlush)() override;
STDMETHOD(NewSegment)(REFERENCE_TIME start, REFERENCE_TIME stop,
double rate) override;
// IMemInputPin
STDMETHOD(GetAllocator)(IMemAllocator** allocator) override;
STDMETHOD(NotifyAllocator)(IMemAllocator* allocator, BOOL read_only) override;
STDMETHOD(GetAllocatorRequirements)(ALLOCATOR_PROPERTIES* props) override;
STDMETHOD(Receive)(IMediaSample* sample) override;
STDMETHOD(ReceiveMultiple)(IMediaSample** samples, long count,
long* processed) override;
STDMETHOD(ReceiveCanBlock)() override;
// clang-format on
SequenceChecker main_checker_;
SequenceChecker capture_checker_;
VideoCaptureCapability requested_capability_ RTC_GUARDED_BY(main_checker_);
// Accessed on the main thread when Filter()->IsStopped() (capture thread not
// running), otherwise accessed on the capture thread.
VideoCaptureCapability resulting_capability_;
DWORD capture_thread_id_ = 0;
rtc::scoped_refptr<IMemAllocator> allocator_ RTC_GUARDED_BY(main_checker_);
rtc::scoped_refptr<IPin> receive_pin_ RTC_GUARDED_BY(main_checker_);
std::atomic_bool flushing_{false};
std::atomic_bool runtime_error_{false};
// Holds a referenceless pointer to the owning filter, the name and
// direction of the pin. The filter pointer can be considered const.
PIN_INFO info_ = {};
AM_MEDIA_TYPE media_type_ RTC_GUARDED_BY(main_checker_) = {};
};
// Implement IBaseFilter (including IPersist and IMediaFilter).
class CaptureSinkFilter : public IBaseFilter {
public:
CaptureSinkFilter(VideoCaptureImpl* capture_observer);
HRESULT SetRequestedCapability(const VideoCaptureCapability& capability);
// Called on the capture thread.
void ProcessCapturedFrame(unsigned char* buffer,
size_t length,
const VideoCaptureCapability& frame_info);
void NotifyEvent(long code, LONG_PTR param1, LONG_PTR param2);
bool IsStopped() const;
// IUnknown
STDMETHOD(QueryInterface)(REFIID riid, void** ppv) override;
// IPersist
STDMETHOD(GetClassID)(CLSID* clsid) override;
// IMediaFilter.
STDMETHOD(GetState)(DWORD msecs, FILTER_STATE* state) override;
STDMETHOD(SetSyncSource)(IReferenceClock* clock) override;
STDMETHOD(GetSyncSource)(IReferenceClock** clock) override;
STDMETHOD(Pause)() override;
STDMETHOD(Run)(REFERENCE_TIME start) override;
STDMETHOD(Stop)() override;
// IBaseFilter
STDMETHOD(EnumPins)(IEnumPins** pins) override;
STDMETHOD(FindPin)(LPCWSTR id, IPin** pin) override;
STDMETHOD(QueryFilterInfo)(FILTER_INFO* info) override;
STDMETHOD(JoinFilterGraph)(IFilterGraph* graph, LPCWSTR name) override;
STDMETHOD(QueryVendorInfo)(LPWSTR* vendor_info) override;
protected:
virtual ~CaptureSinkFilter();
private:
SequenceChecker main_checker_;
const rtc::scoped_refptr<ComRefCount<CaptureInputPin>> input_pin_;
VideoCaptureImpl* const capture_observer_;
FILTER_INFO info_ RTC_GUARDED_BY(main_checker_) = {};
// Set/cleared in JoinFilterGraph. The filter must be stopped (no capture)
// at that time, so no lock is required. While the state is not stopped,
// the sink will be used from the capture thread.
IMediaEventSink* sink_ = nullptr;
FILTER_STATE state_ RTC_GUARDED_BY(main_checker_) = State_Stopped;
};
} // namespace videocapturemodule
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_

View file

@ -0,0 +1,337 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_capture/windows/video_capture_ds.h"
#include <dvdmedia.h> // VIDEOINFOHEADER2
#include "modules/video_capture/video_capture_config.h"
#include "modules/video_capture/windows/help_functions_ds.h"
#include "modules/video_capture/windows/sink_filter_ds.h"
#include "rtc_base/logging.h"
namespace webrtc {
namespace videocapturemodule {
VideoCaptureDS::VideoCaptureDS()
: _captureFilter(NULL),
_graphBuilder(NULL),
_mediaControl(NULL),
_inputSendPin(NULL),
_outputCapturePin(NULL),
_dvFilter(NULL),
_inputDvPin(NULL),
_outputDvPin(NULL) {}
VideoCaptureDS::~VideoCaptureDS() {
if (_mediaControl) {
_mediaControl->Stop();
}
if (_graphBuilder) {
if (sink_filter_)
_graphBuilder->RemoveFilter(sink_filter_.get());
if (_captureFilter)
_graphBuilder->RemoveFilter(_captureFilter);
if (_dvFilter)
_graphBuilder->RemoveFilter(_dvFilter);
}
RELEASE_AND_CLEAR(_inputSendPin);
RELEASE_AND_CLEAR(_outputCapturePin);
RELEASE_AND_CLEAR(_captureFilter); // release the capture device
RELEASE_AND_CLEAR(_dvFilter);
RELEASE_AND_CLEAR(_mediaControl);
RELEASE_AND_CLEAR(_inputDvPin);
RELEASE_AND_CLEAR(_outputDvPin);
RELEASE_AND_CLEAR(_graphBuilder);
}
int32_t VideoCaptureDS::Init(const char* deviceUniqueIdUTF8) {
RTC_DCHECK_RUN_ON(&api_checker_);
const int32_t nameLength = (int32_t)strlen((char*)deviceUniqueIdUTF8);
if (nameLength >= kVideoCaptureUniqueNameLength)
return -1;
// Store the device name
_deviceUniqueId = new (std::nothrow) char[nameLength + 1];
memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
if (_dsInfo.Init() != 0)
return -1;
_captureFilter = _dsInfo.GetDeviceFilter(deviceUniqueIdUTF8);
if (!_captureFilter) {
RTC_LOG(LS_INFO) << "Failed to create capture filter.";
return -1;
}
// Get the interface for DirectShow's GraphBuilder
HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL, CLSCTX_INPROC_SERVER,
IID_IGraphBuilder, (void**)&_graphBuilder);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to create graph builder.";
return -1;
}
hr = _graphBuilder->QueryInterface(IID_IMediaControl, (void**)&_mediaControl);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to create media control builder.";
return -1;
}
hr = _graphBuilder->AddFilter(_captureFilter, CAPTURE_FILTER_NAME);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to add the capture device to the graph.";
return -1;
}
_outputCapturePin = GetOutputPin(_captureFilter, PIN_CATEGORY_CAPTURE);
if (!_outputCapturePin) {
RTC_LOG(LS_INFO) << "Failed to get output capture pin";
return -1;
}
// Create the sink filte used for receiving Captured frames.
sink_filter_ = new ComRefCount<CaptureSinkFilter>(this);
hr = _graphBuilder->AddFilter(sink_filter_.get(), SINK_FILTER_NAME);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to add the send filter to the graph.";
return -1;
}
_inputSendPin = GetInputPin(sink_filter_.get());
if (!_inputSendPin) {
RTC_LOG(LS_INFO) << "Failed to get input send pin";
return -1;
}
if (SetCameraOutput(_requestedCapability) != 0) {
return -1;
}
RTC_LOG(LS_INFO) << "Capture device '" << deviceUniqueIdUTF8
<< "' initialized.";
return 0;
}
int32_t VideoCaptureDS::StartCapture(const VideoCaptureCapability& capability) {
RTC_DCHECK_RUN_ON(&api_checker_);
if (capability != _requestedCapability) {
DisconnectGraph();
if (SetCameraOutput(capability) != 0) {
return -1;
}
}
HRESULT hr = _mediaControl->Pause();
if (FAILED(hr)) {
RTC_LOG(LS_INFO)
<< "Failed to Pause the Capture device. Is it already occupied? " << hr;
return -1;
}
hr = _mediaControl->Run();
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to start the Capture device.";
return -1;
}
return 0;
}
int32_t VideoCaptureDS::StopCapture() {
RTC_DCHECK_RUN_ON(&api_checker_);
HRESULT hr = _mediaControl->StopWhenReady();
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to stop the capture graph. " << hr;
return -1;
}
return 0;
}
bool VideoCaptureDS::CaptureStarted() {
RTC_DCHECK_RUN_ON(&api_checker_);
OAFilterState state = 0;
HRESULT hr = _mediaControl->GetState(1000, &state);
if (hr != S_OK && hr != VFW_S_CANT_CUE) {
RTC_LOG(LS_INFO) << "Failed to get the CaptureStarted status";
}
RTC_LOG(LS_INFO) << "CaptureStarted " << state;
return state == State_Running;
}
int32_t VideoCaptureDS::CaptureSettings(VideoCaptureCapability& settings) {
RTC_DCHECK_RUN_ON(&api_checker_);
settings = _requestedCapability;
return 0;
}
int32_t VideoCaptureDS::SetCameraOutput(
const VideoCaptureCapability& requestedCapability) {
RTC_DCHECK_RUN_ON(&api_checker_);
// Get the best matching capability
VideoCaptureCapability capability;
int32_t capabilityIndex;
// Store the new requested size
_requestedCapability = requestedCapability;
// Match the requested capability with the supported.
if ((capabilityIndex = _dsInfo.GetBestMatchedCapability(
_deviceUniqueId, _requestedCapability, capability)) < 0) {
return -1;
}
// Reduce the frame rate if possible.
if (capability.maxFPS > requestedCapability.maxFPS) {
capability.maxFPS = requestedCapability.maxFPS;
} else if (capability.maxFPS <= 0) {
capability.maxFPS = 30;
}
// Convert it to the windows capability index since they are not nexessary
// the same
VideoCaptureCapabilityWindows windowsCapability;
if (_dsInfo.GetWindowsCapability(capabilityIndex, windowsCapability) != 0) {
return -1;
}
IAMStreamConfig* streamConfig = NULL;
AM_MEDIA_TYPE* pmt = NULL;
VIDEO_STREAM_CONFIG_CAPS caps;
HRESULT hr = _outputCapturePin->QueryInterface(IID_IAMStreamConfig,
(void**)&streamConfig);
if (hr) {
RTC_LOG(LS_INFO) << "Can't get the Capture format settings.";
return -1;
}
// Get the windows capability from the capture device
bool isDVCamera = false;
hr = streamConfig->GetStreamCaps(windowsCapability.directShowCapabilityIndex,
&pmt, reinterpret_cast<BYTE*>(&caps));
if (hr == S_OK) {
if (pmt->formattype == FORMAT_VideoInfo2) {
VIDEOINFOHEADER2* h = reinterpret_cast<VIDEOINFOHEADER2*>(pmt->pbFormat);
if (capability.maxFPS > 0 && windowsCapability.supportFrameRateControl) {
h->AvgTimePerFrame = REFERENCE_TIME(10000000.0 / capability.maxFPS);
}
} else {
VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
if (capability.maxFPS > 0 && windowsCapability.supportFrameRateControl) {
h->AvgTimePerFrame = REFERENCE_TIME(10000000.0 / capability.maxFPS);
}
}
// Set the sink filter to request this capability
sink_filter_->SetRequestedCapability(capability);
// Order the capture device to use this capability
hr += streamConfig->SetFormat(pmt);
// Check if this is a DV camera and we need to add MS DV Filter
if (pmt->subtype == MEDIASUBTYPE_dvsl ||
pmt->subtype == MEDIASUBTYPE_dvsd ||
pmt->subtype == MEDIASUBTYPE_dvhd) {
isDVCamera = true; // This is a DV camera. Use MS DV filter
}
FreeMediaType(pmt);
pmt = NULL;
}
RELEASE_AND_CLEAR(streamConfig);
if (FAILED(hr)) {
RTC_LOG(LS_INFO) << "Failed to set capture device output format";
return -1;
}
if (isDVCamera) {
hr = ConnectDVCamera();
} else {
hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputSendPin, NULL);
}
if (hr != S_OK) {
RTC_LOG(LS_INFO) << "Failed to connect the Capture graph " << hr;
return -1;
}
return 0;
}
int32_t VideoCaptureDS::DisconnectGraph() {
RTC_DCHECK_RUN_ON(&api_checker_);
HRESULT hr = _mediaControl->Stop();
hr += _graphBuilder->Disconnect(_outputCapturePin);
hr += _graphBuilder->Disconnect(_inputSendPin);
// if the DV camera filter exist
if (_dvFilter) {
_graphBuilder->Disconnect(_inputDvPin);
_graphBuilder->Disconnect(_outputDvPin);
}
if (hr != S_OK) {
RTC_LOG(LS_ERROR)
<< "Failed to Stop the Capture device for reconfiguration " << hr;
return -1;
}
return 0;
}
HRESULT VideoCaptureDS::ConnectDVCamera() {
RTC_DCHECK_RUN_ON(&api_checker_);
HRESULT hr = S_OK;
if (!_dvFilter) {
hr = CoCreateInstance(CLSID_DVVideoCodec, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void**)&_dvFilter);
if (hr != S_OK) {
RTC_LOG(LS_INFO) << "Failed to create the dv decoder: " << hr;
return hr;
}
hr = _graphBuilder->AddFilter(_dvFilter, L"VideoDecoderDV");
if (hr != S_OK) {
RTC_LOG(LS_INFO) << "Failed to add the dv decoder to the graph: " << hr;
return hr;
}
_inputDvPin = GetInputPin(_dvFilter);
if (_inputDvPin == NULL) {
RTC_LOG(LS_INFO) << "Failed to get input pin from DV decoder";
return -1;
}
_outputDvPin = GetOutputPin(_dvFilter, GUID_NULL);
if (_outputDvPin == NULL) {
RTC_LOG(LS_INFO) << "Failed to get output pin from DV decoder";
return -1;
}
}
hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputDvPin, NULL);
if (hr != S_OK) {
RTC_LOG(LS_INFO) << "Failed to connect capture device to the dv devoder: "
<< hr;
return hr;
}
hr = _graphBuilder->ConnectDirect(_outputDvPin, _inputSendPin, NULL);
if (hr != S_OK) {
if (hr == HRESULT_FROM_WIN32(ERROR_TOO_MANY_OPEN_FILES)) {
RTC_LOG(LS_INFO) << "Failed to connect the capture device, busy";
} else {
RTC_LOG(LS_INFO) << "Failed to connect capture device to the send graph: "
<< hr;
}
}
return hr;
}
} // namespace videocapturemodule
} // namespace webrtc

View file

@ -0,0 +1,75 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
#define MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_
#include "api/scoped_refptr.h"
#include "modules/video_capture/video_capture_impl.h"
#include "modules/video_capture/windows/device_info_ds.h"
#define CAPTURE_FILTER_NAME L"VideoCaptureFilter"
#define SINK_FILTER_NAME L"SinkFilter"
namespace webrtc {
namespace videocapturemodule {
// Forward declaraion
class CaptureSinkFilter;
class VideoCaptureDS : public VideoCaptureImpl {
public:
VideoCaptureDS();
virtual int32_t Init(const char* deviceUniqueIdUTF8);
/*************************************************************************
*
* Start/Stop
*
*************************************************************************/
int32_t StartCapture(const VideoCaptureCapability& capability) override;
int32_t StopCapture() override;
/**************************************************************************
*
* Properties of the set device
*
**************************************************************************/
bool CaptureStarted() override;
int32_t CaptureSettings(VideoCaptureCapability& settings) override;
protected:
~VideoCaptureDS() override;
// Help functions
int32_t SetCameraOutput(const VideoCaptureCapability& requestedCapability);
int32_t DisconnectGraph();
HRESULT ConnectDVCamera();
DeviceInfoDS _dsInfo RTC_GUARDED_BY(api_checker_);
IBaseFilter* _captureFilter RTC_GUARDED_BY(api_checker_);
IGraphBuilder* _graphBuilder RTC_GUARDED_BY(api_checker_);
IMediaControl* _mediaControl RTC_GUARDED_BY(api_checker_);
rtc::scoped_refptr<CaptureSinkFilter> sink_filter_
RTC_GUARDED_BY(api_checker_);
IPin* _inputSendPin RTC_GUARDED_BY(api_checker_);
IPin* _outputCapturePin RTC_GUARDED_BY(api_checker_);
// Microsoft DV interface (external DV cameras)
IBaseFilter* _dvFilter RTC_GUARDED_BY(api_checker_);
IPin* _inputDvPin RTC_GUARDED_BY(api_checker_);
IPin* _outputDvPin RTC_GUARDED_BY(api_checker_);
};
} // namespace videocapturemodule
} // namespace webrtc
#endif // MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_DS_H_

View file

@ -0,0 +1,38 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/scoped_refptr.h"
#include "modules/video_capture/windows/video_capture_ds.h"
namespace webrtc {
namespace videocapturemodule {
// static
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo() {
// TODO(tommi): Use the Media Foundation version on Vista and up.
return DeviceInfoDS::Create();
}
rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
const char* device_id) {
if (device_id == nullptr)
return nullptr;
// TODO(tommi): Use Media Foundation implementation for Vista and up.
auto capture = rtc::make_ref_counted<VideoCaptureDS>();
if (capture->Init(device_id) != 0) {
return nullptr;
}
return capture;
}
} // namespace videocapturemodule
} // namespace webrtc