Skip to content
This repository was archived by the owner on Oct 25, 2024. It is now read-only.

Commit 1082ec6

Browse files
committed
New APIs in Device utils
1 parent f697bf6 commit 1082ec6

File tree

8 files changed

+185
-28
lines changed

8 files changed

+185
-28
lines changed

talk/owt/sdk/base/deviceutils.cc

Lines changed: 67 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,8 @@ std::vector<Resolution> DeviceUtils::VideoCapturerSupportedResolutions(
6262
} else {
6363
for (int32_t i = 0; i < info->NumberOfCapabilities(id.c_str()); i++) {
6464
if (info->GetCapability(id.c_str(), i, capability) == 0) {
65-
resolutions.push_back(Resolution(capability.width, capability.height));
65+
resolutions.push_back(
66+
Resolution(capability.width, capability.height));
6667
} else {
6768
RTC_LOG(LS_WARNING) << "Failed to get capability.";
6869
}
@@ -89,8 +90,8 @@ std::vector<Resolution> DeviceUtils::VideoCapturerSupportedResolutions(
8990
if (found) {
9091
for (int32_t i = 0; i < info->NumberOfCapabilities(vcm_id); i++) {
9192
if (info->GetCapability(vcm_id, i, capability) == 0) {
92-
resolutions.push_back(
93-
Resolution(capability.width, capability.height));
93+
resolutions.push_back(Resolution(
94+
capability.width, capability.height));
9495
} else {
9596
RTC_LOG(LS_WARNING) << "Failed to get capability.";
9697
}
@@ -100,5 +101,68 @@ std::vector<Resolution> DeviceUtils::VideoCapturerSupportedResolutions(
100101
}
101102
return resolutions;
102103
}
104+
105+
std::vector<CameraCapability> DeviceUtils::VideoCapturerSupportedCapabilities(
106+
const std::string& id) {
107+
std::vector<CameraCapability> resolutions;
108+
webrtc::VideoCaptureCapability capability;
109+
std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info(
110+
webrtc::VideoCaptureFactory::CreateDeviceInfo());
111+
if (!info) {
112+
RTC_LOG(LS_ERROR) << "CreateDeviceInfo failed";
113+
} else {
114+
for (int32_t i = 0; i < info->NumberOfCapabilities(id.c_str()); i++) {
115+
if (info->GetCapability(id.c_str(), i, capability) == 0) {
116+
resolutions.push_back(CameraCapability(
117+
capability.width, capability.height, capability.maxFPS));
118+
} else {
119+
RTC_LOG(LS_WARNING) << "Failed to get capability.";
120+
}
121+
}
122+
// Try to get capabilities by device name if getting capabilities by ID is
123+
// failed.
124+
// TODO(jianjun): Remove this when creating stream by device name is no
125+
// longer supported.
126+
if (resolutions.size() == 0) {
127+
// Get device ID by name.
128+
int num_cams = info->NumberOfDevices();
129+
char vcm_id[256] = "";
130+
bool found = false;
131+
for (int index = 0; index < num_cams; ++index) {
132+
char vcm_name[256] = "";
133+
if (info->GetDeviceName(index, vcm_name, arraysize(vcm_name), vcm_id,
134+
arraysize(vcm_id)) != -1) {
135+
if (id == reinterpret_cast<char*>(vcm_name)) {
136+
found = true;
137+
break;
138+
}
139+
}
140+
}
141+
if (found) {
142+
for (int32_t i = 0; i < info->NumberOfCapabilities(vcm_id); i++) {
143+
if (info->GetCapability(vcm_id, i, capability) == 0) {
144+
resolutions.push_back(CameraCapability(
145+
capability.width, capability.height, capability.maxFPS));
146+
} else {
147+
RTC_LOG(LS_WARNING) << "Failed to get capability.";
148+
}
149+
}
150+
}
151+
}
152+
}
153+
return resolutions;
154+
}
155+
156+
std::string DeviceUtils::GetDeviceNameByIndex(int index) {
157+
std::unique_ptr<webrtc::VideoCaptureModule::DeviceInfo> info(
158+
webrtc::VideoCaptureFactory::CreateDeviceInfo());
159+
char device_name[256];
160+
char unique_name[256];
161+
info->GetDeviceName(static_cast<uint32_t>(index), device_name,
162+
sizeof(device_name), unique_name, sizeof(unique_name));
163+
std::string name(device_name);
164+
return name;
165+
}
166+
103167
}
104168
}

talk/owt/sdk/base/peerconnectionchannel.cc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ bool PeerConnectionChannel::InitializePeerConnection() {
2929
video_transceiver_direction_ = webrtc::RtpTransceiverDirection::kSendRecv;
3030
configuration_.enable_dtls_srtp = true;
3131
configuration_.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan;
32+
configuration_.media_config.enable_dscp = true;
3233
peer_connection_ =
3334
(factory_->CreatePeerConnection(configuration_, this)).get();
3435
if (!peer_connection_.get()) {

talk/owt/sdk/base/win/msdkvideoencoderfactory.cc

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ MSDKVideoEncoderFactory::MSDKVideoEncoderFactory() {
3838
codecs_to_check.push_back(owt::base::VideoCodec::kVp8);
3939
std::vector<VideoEncoderCapability> capabilities =
4040
media_capability->SupportedCapabilitiesForVideoEncoder(codecs_to_check);
41-
41+
#if 0
4242
for (auto& capability : capabilities) {
4343
if (capability.codec_type == owt::base::VideoCodec::kH264 &&
4444
!is_h264_hw_supported) {
@@ -65,6 +65,12 @@ MSDKVideoEncoderFactory::MSDKVideoEncoderFactory() {
6565
}
6666
#endif
6767
}
68+
#endif
69+
supported_codec_types.push_back(webrtc::kVideoCodecH264);
70+
supported_codec_types.push_back(webrtc::kVideoCodecVP9);
71+
supported_codec_types.push_back(webrtc::kVideoCodecVP8);
72+
supported_codec_types.push_back(webrtc::kVideoCodecH265);
73+
supported_codec_types.push_back(webrtc::kVideoCodecAV1);
6874
}
6975

7076
std::unique_ptr<webrtc::VideoEncoder> MSDKVideoEncoderFactory::CreateVideoEncoder(
@@ -100,8 +106,9 @@ std::unique_ptr<webrtc::VideoEncoder> MSDKVideoEncoderFactory::CreateVideoEncode
100106
return webrtc::CreateLibaomAv1Encoder();
101107
#ifndef DISABLE_H265
102108
else if (absl::EqualsIgnoreCase(format.name, cricket::kH265CodecName) &&
103-
!h265_hw)
104-
return nullptr;
109+
!h265_hw) {
110+
}
111+
// return nullptr;
105112
#endif
106113
return MSDKVideoEncoder::Create(cricket::VideoCodec(format));
107114
}

talk/owt/sdk/base/win/videorendererd3d11.cc

Lines changed: 52 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -79,13 +79,24 @@ void WebrtcVideoRendererD3D11Impl::OnFrame(
7979
else
8080
return;
8181

82-
width_ = width;
83-
height_ = height;
82+
if (width_ != width || height_ != height) {
83+
width_ = width;
84+
height_ = height;
85+
}
86+
87+
RECT rect;
88+
GetClientRect(wnd_, &rect);
8489

90+
if (window_width != rect.right - rect.left ||
91+
window_height != rect.bottom - rect.top) {
92+
need_swapchain_recreate_ = true;
93+
window_width = rect.right - rect.left;
94+
window_height = rect.bottom - rect.top;
95+
}
8596

8697
if (render_device != d3d11_device_) {
87-
d3d11_device_ = render_device;
8898
need_swapchain_recreate_ = true;
99+
d3d11_device_ = render_device;
89100
}
90101

91102
if (need_swapchain_recreate_) {
@@ -113,7 +124,23 @@ void WebrtcVideoRendererD3D11Impl::OnFrame(
113124
D3DPRESENT_PARAMETERS d3d_params = {};
114125

115126
d3d_params.Windowed = true;
116-
d3d_params.SwapEffect = D3DSWAPEFFECT_COPY;
127+
d3d_params.SwapEffect = D3DSWAPEFFECT_DISCARD;
128+
129+
// Check anti-alias support
130+
static D3DMULTISAMPLE_TYPE multisample_types[] = {
131+
D3DMULTISAMPLE_5_SAMPLES, D3DMULTISAMPLE_4_SAMPLES,
132+
D3DMULTISAMPLE_2_SAMPLES, D3DMULTISAMPLE_NONE};
133+
DWORD multisample_quality = 0;
134+
for (int i = 0; i < 4; i++) {
135+
HRESULT hr = m_d3d_->CheckDeviceMultiSampleType(
136+
D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, D3DFMT_A8R8G8B8, true,
137+
multisample_types[i], &multisample_quality);
138+
if (SUCCEEDED(hr)) {
139+
d3d_params.MultiSampleType = multisample_types[i];
140+
d3d_params.MultiSampleQuality = multisample_quality - 1;
141+
break;
142+
}
143+
}
117144

118145
IDirect3DDevice9* d3d_device;
119146
if (m_d3d_->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, wnd_,
@@ -138,6 +165,7 @@ void WebrtcVideoRendererD3D11Impl::OnFrame(
138165

139166
m_d3d_device_->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
140167
m_d3d_device_->SetRenderState(D3DRS_LIGHTING, FALSE);
168+
m_d3d_device_->SetRenderState(D3DRS_MULTISAMPLEANTIALIAS, TRUE);
141169
ResizeD3D9RenderPipeline(video_frame.width(), video_frame.height());
142170
d3d9_inited_for_raw_ = true;
143171
} else {
@@ -186,6 +214,7 @@ void WebrtcVideoRendererD3D11Impl::ResizeD3D9RenderPipeline(size_t width, size_t
186214
width_ = width;
187215
height_ = height;
188216
IDirect3DTexture9* texture;
217+
// Texture should alwasy be created with the size of frame.
189218
m_d3d_device_->CreateTexture(static_cast<UINT>(width_),
190219
static_cast<UINT>(height_), 1, 0, D3DFMT_A8R8G8B8,
191220
D3DPOOL_MANAGED, &texture, nullptr);
@@ -208,6 +237,8 @@ void WebrtcVideoRendererD3D11Impl::ResizeD3D9RenderPipeline(size_t width, size_t
208237
m_vertex_buffer_->Unlock();
209238
}
210239

240+
// TODO: Use D3D11 for rendering of I420 view as well.
241+
#if 0
211242
HRESULT WebrtcVideoRendererD3D11Impl::CreateD3D11Device() {
212243
HRESULT hr = S_OK;
213244

@@ -228,23 +259,21 @@ HRESULT WebrtcVideoRendererD3D11Impl::CreateD3D11Device() {
228259
// Create the surface to copy NV12 image to.
229260
D3D11_TEXTURE2D_DESC texture_desc;
230261
memset(&texture_desc, 0, sizeof(texture_desc));
231-
texture_desc.Format = DXGI_FORMAT_NV12;
232-
texture_desc.Width = width_;
233-
texture_desc.Height = height_;
262+
texture_desc.Format = DXGI_FORMAT_R8_UNORM;
263+
texture_desc.Width = width_; // same as video width
264+
texture_desc.Height = height_; // same as video height
234265
texture_desc.ArraySize = 1;
235266
texture_desc.MipLevels = 1;
236-
texture_desc.Usage = D3D11_USAGE_DYNAMIC;
237-
texture_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
238-
texture_desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
267+
texture_desc.Usage = D3D11_USAGE_DEFAULT; // We use UpdateSubresource to update the texture,
268+
// so not setting to D3D11_USAGE_DEFAULT;
239269

240270
hr = d3d11_device_->CreateTexture2D(&texture_desc, nullptr,
241-
&sw_shared_texture_);
242-
if (FAILED(hr)) {
243-
RTC_LOG(LS_ERROR)
244-
<< "Failed to create shared texture to copy nv12 frame to.";
245-
return hr;
246-
}
271+
&texture_planes_[0]);
272+
273+
texture_desc.Width = width_ / 2;
274+
texture_desc.Height = height_ / 2;
247275
}
276+
#endif
248277

249278
// Method to create the necessary 3D pipeline objects to render a textured 3D
250279
// QUAD
@@ -471,12 +500,15 @@ void WebrtcVideoRendererD3D11Impl::RenderToBackbuffer(int array_slice) {
471500
FLOAT clear_color[4] = {0.0f, 0.0f, 0.0f, 0.0f};
472501
d3d11_device_context_->ClearRenderTargetView(render_target_view_, clear_color);
473502

503+
RECT rect;
504+
GetClientRect(wnd_, &rect);
505+
474506
// Set viewport
475507
D3D11_VIEWPORT viewport = {0};
476-
viewport.TopLeftX = x_offset_;
477-
viewport.TopLeftY = y_offset_;
478-
viewport.Width = width_;
479-
viewport.Height = height_;
508+
viewport.TopLeftX = 0;
509+
viewport.TopLeftY = 0;
510+
viewport.Width = rect.right - rect.left;
511+
viewport.Height = rect.bottom - rect.top;
480512
d3d11_device_context_->RSSetViewports(1, &viewport);
481513

482514
// Bind the textures

talk/owt/sdk/base/win/videorendererd3d11.h

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,10 @@ class WebrtcVideoRendererD3D11Impl
6060
void Destroy();
6161
void FillSwapChainDesc(DXGI_SWAP_CHAIN_DESC1& scd);
6262
void ResetTextureViews();
63+
#if 0
6364
HRESULT CreateD3D11Device();
6465
void WriteNV12ToTexture();
66+
#endif
6567
HRESULT CreateRenderPipeline();
6668
HRESULT ResizeRenderPipeline();
6769
void ResizeD3D9RenderPipeline(size_t width, size_t height);
@@ -81,6 +83,8 @@ class WebrtcVideoRendererD3D11Impl
8183
uint16_t y_offset_ = 0;
8284
uint16_t width_ = 0;
8385
uint16_t height_ = 0;
86+
int window_width = 0;
87+
int window_height = 0;
8488
bool need_swapchain_recreate_ = true;
8589

8690
// Owner of the d3d11 device/context is decoder.
@@ -101,7 +105,6 @@ class WebrtcVideoRendererD3D11Impl
101105
ID3D11PixelShader* pixel_shader_ = nullptr;
102106
ID3D11InputLayout* input_layout_ = nullptr;
103107
ID3D11Buffer* vertex_buffer_ = nullptr;
104-
ID3D11Texture2D* sw_shared_texture_ = nullptr;
105108

106109
// Using D3D9 for rendering SW frames.
107110
bool d3d9_inited_for_raw_ = false;
@@ -110,6 +113,7 @@ class WebrtcVideoRendererD3D11Impl
110113
rtc::scoped_refptr<IDirect3DTexture9> m_texture_;
111114
rtc::scoped_refptr<IDirect3DVertexBuffer9> m_vertex_buffer_;
112115
UINT views_count = 0;
116+
113117
};
114118
} // namespace base
115119
} // namespace owt

talk/owt/sdk/conference/conferencepeerconnectionchannel.cc

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1007,6 +1007,24 @@ void ConferencePeerConnectionChannel::SendPublishMessage(
10071007
encoding.num_temporal_layers <= 4) {
10081008
param.num_temporal_layers = encoding.num_temporal_layers;
10091009
}
1010+
if (encoding.priority != owt::base::NetworkPriority::kDefault) {
1011+
switch (encoding.priority) {
1012+
case owt::base::NetworkPriority::kVeryLow:
1013+
param.network_priority = webrtc::Priority::kVeryLow;
1014+
break;
1015+
case owt::base::NetworkPriority::kLow:
1016+
param.network_priority = webrtc::Priority::kLow;
1017+
break;
1018+
case owt::base::NetworkPriority::kMedium:
1019+
param.network_priority = webrtc::Priority::kMedium;
1020+
break;
1021+
case owt::base::NetworkPriority::kHigh:
1022+
param.network_priority = webrtc::Priority::kHigh;
1023+
break;
1024+
default:
1025+
break;
1026+
}
1027+
}
10101028
param.active = encoding.active;
10111029
transceiver_init.send_encodings.push_back(param);
10121030
}

talk/owt/sdk/include/cpp/owt/base/commontypes.h

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,12 +31,20 @@ enum class VideoCodec : int {
3131
kUnknown
3232
};
3333
/// Track kind
34-
enum class TrackKind : int{
34+
enum class TrackKind : int {
3535
kAudio = 1,
3636
kVideo,
3737
kAudioAndVideo,
3838
kUnknown
3939
};
40+
/// Network priority
41+
enum class NetworkPriority : int {
42+
kVeryLow,
43+
kLow,
44+
kMedium,
45+
kHigh,
46+
kDefault
47+
};
4048
/// This class represents a resolution value.
4149
struct Resolution {
4250
/// Construct an instance with width and height equal to 0.
@@ -49,6 +57,23 @@ struct Resolution {
4957
unsigned long width;
5058
unsigned long height;
5159
};
60+
61+
/// This class represents a camera capability.
62+
struct CameraCapability {
63+
/// Construct an instance with width and height equal to 0.
64+
explicit CameraCapability() : width(0), height(0), fps(0) {}
65+
/// Construct an instance with specify width and height.
66+
CameraCapability(unsigned long w, unsigned long h, int fps)
67+
: width(w), height(h), fps(fps) {}
68+
bool operator==(const CameraCapability& rhs) const {
69+
return this->width == rhs.width && this->height == rhs.height &&
70+
this->fps == rhs.fps ;
71+
}
72+
unsigned long width;
73+
unsigned long height;
74+
int32_t fps;
75+
};
76+
5277
/// Audio codec parameters for an audio track.
5378
struct AudioCodecParameters {
5479
/// Construct an instance of AudioCodecParameters with default param.
@@ -93,6 +118,8 @@ struct RtpEncodingParameters {
93118
// Value to use for RID RTP header extension.
94119
// Called "encodingId" in ORTC.
95120
std::string rid = "";
121+
122+
NetworkPriority priority = NetworkPriority::kDefault;
96123
};
97124

98125
/// Audio encoding parameters.

talk/owt/sdk/include/cpp/owt/base/deviceutils.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,10 @@ class DeviceUtils {
2222
const std::string& id);
2323
/// Get the camera device index by its device id.
2424
static int GetVideoCaptureDeviceIndex(const std::string& id);
25+
/// Get camera device's user friendly name by index.
26+
static std::string GetDeviceNameByIndex(int index);
27+
static std::vector<CameraCapability> VideoCapturerSupportedCapabilities(
28+
const std::string& id);
2529
};
2630
}
2731
}

0 commit comments

Comments
 (0)